exports.id = 184;
exports.ids = [184];
exports.modules = {
/***/ 28231:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const GraphemeSplitter = __webpack_require__(30323);
const ANSI_SEQUENCE = /^(.*?)(\x1b\[[^m]+m|\x1b\]8;;.*?(\x1b\\|\u0007))/;
const splitter = new GraphemeSplitter();
module.exports = (orig, at = 0, until = orig.length) => {
// Because to do this we'd need to know the printable length of the string,
// which would require to do two passes (or would complexify the main one)
if (at < 0 || until < 0)
throw new RangeError(`Negative indices aren't supported by this implementation`);
const length = until - at;
let output = ``;
let skipped = 0;
let visible = 0;
while (orig.length > 0) {
const lookup = orig.match(ANSI_SEQUENCE) || [orig, orig, undefined];
let graphemes = splitter.splitGraphemes(lookup[1]);
const skipping = Math.min(at - skipped, graphemes.length);
graphemes = graphemes.slice(skipping);
const displaying = Math.min(length - visible, graphemes.length);
output += graphemes.slice(0, displaying).join(``);
skipped += skipping;
visible += displaying;
if (typeof lookup[2] !== `undefined`)
output += lookup[2];
orig = orig.slice(lookup[0].length);
}
return output;
};
/***/ }),
/***/ 54916:
/***/ ((__unused_webpack_module, exports) => {
exports.file = `# Write glob rules for ignored files.
# Check syntax on https://deepcode.freshdesk.com/support/solutions/articles/60000531055-how-can-i-ignore-files-or-directories-
# Check examples on https://github.com/github/gitignore
`;
/***/ }),
/***/ 78105:
/***/ ((__unused_webpack_module, exports) => {
exports.file = `# Write glob rules for ignored files.
# Check syntax on https://deepcode.freshdesk.com/support/solutions/articles/60000531055-how-can-i-ignore-files-or-directories-
# Check examples on https://github.com/github/gitignore
# Hidden directories
.*/
# Julia
deps/downloads/
deps/usr/
docs/build/
docs/site/
# CakePHP
/vendor/*
/tmp/cache/models/*
!/tmp/cache/models/empty
/tmp/cache/persistent/*
!/tmp/cache/persistent/empty
/tmp/cache/views/*
!/tmp/cache/views/empty
/tmp/sessions/*
!/tmp/sessions/empty
/tmp/tests/*
!/tmp/tests/empty
/logs/*
!/logs/empty
/app/tmp/*
/vendors/*
# KiCad
*~
_autosave-*
fp-info-cache
# Dart
build/
doc/api/
# PlayFramework
bin/
/db
/lib/
/logs/
/modules
/project/project
/project/target
/target
tmp/
test-result
/dist/
# Zephir
ext/build/
ext/modules/
ext/Makefile*
ext/config*
ext/autom4te*
ext/install-sh
ext/missing
ext/mkinstalldirs
ext/libtool
# RhodesRhomobile
rholog-*
sim-*
bin/libs
bin/RhoBundle
bin/tmp
bin/target
# AppEngine
appengine-generated/
# Textpattern
rpc/
sites/site*/admin/
sites/site*/private/
sites/site*/public/admin/
sites/site*/public/setup/
sites/site*/public/theme/
textpattern/
# ExpressionEngine
images/avatars/
images/captchas/
images/smileys/
images/member_photos/
images/signature_attachments/
images/pm_attachments/
sized/
thumbs/
_thumbs/
*/expressionengine/cache/*
# CMake
CMakeFiles
CMakeScripts
Testing
Makefile
_deps
# Qt
Makefile*
*build-*
# Yeoman
node_modules/
bower_components/
build/
dist/
# ExtJs
build/
ext/
# R
/*.Rcheck/
*_cache/
/cache/
docs/
po/*~
# Python
__pycache__/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
MANIFEST
htmlcov/
cover/
instance/
docs/_build/
target/
profile_default/
__pypackages__/
celerybeat-schedule
env/
venv/
ENV/
env.bak/
venv.bak/
/site
cython_debug/
# Magento
/media/*
!/media/customer
/media/customer/*
!/media/dhl
/media/dhl/*
!/media/downloadable
/media/downloadable/*
!/media/xmlconnect
/media/xmlconnect/*
!/media/xmlconnect/custom
/media/xmlconnect/custom/*
!/media/xmlconnect/original
/media/xmlconnect/original/*
!/media/xmlconnect/system
/media/xmlconnect/system/*
/var/*
!/var/package
/var/package/*
# CodeIgniter
*/config/development
*/cache/*
application/logs/*
/vendor/
# Haskell
dist
dist-*
cabal-dev
# ArchLinuxPackages
pkg/
# Elm
elm-stuff
repl-temp-*
# Lithium
libraries/*
resources/tmp/*
# Erlang
rel/example_project
deps
_build/
_checkouts/
# ForceDotCom
Referenced Packages
# Plone
bin/
build/
develop-eggs/
downloads/
eggs/
fake-eggs/
parts/
dist/
var/
# AppceleratorTitanium
build/
# ChefCookbook
/cookbooks
bin/*
# Objective-C
xcuserdata/
build/
DerivedData/
Carthage/Build/
fastlane/test_output
iOSInjectionProject/
# Opa
_build
_tracks
opa-debug-js
# Smalltalk
/package-cache
/play-cache
/play-stash
/github-cache
# SeamGen
/bootstrap/data
/bootstrap/tmp
/classes/
/dist/
/exploded-archives/
/test-build/
/test-output/
/test-report/
/target/
# Xilinx
*_synth_*
*/*/bd/*/hdl
*/*/*/bd/*/hdl
*/*/bd/*/ip/*/*/
*/*/*/bd/*/ip/*/*/
hw_handoff
ipshared
# Strapi
Icon
*~
$RECYCLE.BIN/
*#
nbproject
lib-cov
pids
logs
results
build
node_modules
testApp
coverage
cypress/screenshots
cypress/videos
dist
packages/strapi-generate-new/files/public/
# Snap
parts/
prime/
stage/
# Logtalk
lgt_tmp/
logtalk_tester_logs/
logtalk_doclet_logs/
# SPFx
logs
node_modules
dist
lib
solution
temp
coverage
bin
obj
# Vue
docs/_book
test/
# NWjs
locales/
pnacl/
# Cordova
/platforms
# Bazel
/bazel-*
# AltiumDesigner
History
__Previews
Project\ Logs*
Project\ Outputs*
# ThinkPHP
/Application/Runtime/
# Jigsaw
build_*
# Bitrix
/bitrix/*
!/bitrix/templates
!/bitrix/components
/bitrix/components/bitrix
!/bitrix/gadgets
/bitrix/gadgets/bitrix
!/bitrix/php_interface/
/upload/
# CodeSniffer
/wpcs/*
# Pimcore
/pimcore
/website/var/assets/*
/website/var/backup/*
/website/var/cache/*
/website/var/classes/Object*
!/website/var/classes/objectbricks
/website/var/config/Geo*
/website/var/config/object/*
/website/var/config/portal/*
/website/var/config/sqlreport/*
/website/var/email/*
/website/var/recyclebin/*
/website/var/search/*
/website/var/system/*
/website/var/tmp/*
/website/var/versions/asset/*
/website/var/versions/document/*
/website/var/versions/object/*
/website/var/user-image/*
# Magento1
/media/*
!/media/customer
/media/customer/*
!/media/dhl
/media/dhl/*
!/media/downloadable
/media/downloadable/*
!/media/xmlconnect
/media/xmlconnect/*
!/media/xmlconnect/custom
/media/xmlconnect/custom/*
!/media/xmlconnect/original
/media/xmlconnect/original/*
!/media/xmlconnect/system
/media/xmlconnect/system/*
/var/*
!/var/package
/var/package/*
# Magento2
/sitemap
/pub/sitemap
/app/config_sandbox
/app/code/Magento/TestModule*
/pub/media/attribute/*
/pub/media/analytics/*
/pub/media/catalog/*
/pub/media/customer/*
/pub/media/downloadable/*
/pub/media/favicon/*
/pub/media/import/*
/pub/media/logo/*
/pub/media/theme/*
/pub/media/theme_customization/*
/pub/media/wysiwyg/*
/pub/media/tmp/*
/pub/media/captcha/*
/pub/static/*
/var/*
/vendor/*
/generated/*
# Drupal7
files/
sites/*/files
sites/*/private
sites/*/translations
/includes
/misc
/modules
/profiles
/scripts
/themes
# InforCMS
[Mm]odel/[Dd]eployment
!Model/Portal/*/SupportFiles/[Bb]in/
!Model/Portal/PortalTemplates/*/SupportFiles/[Bb]in
# Kentico
!CMS/CMSAdminControls/*/
!CMS/CMSModules/System/*/
!CMS/App_Data/CIRepository/**
CMS/App_Data/AzureCache
CMS/App_Data/AzureTemp
CMS/App_Data/CMSTemp
CMS/App_Data/Persistent
CMS/CMSSiteUtils/Export
CMS/CMSSiteUtils/Import
CMS/App_Data/CMSModules/SmartSearch/**
!CMS/App_Data/CMSModules/SmartSearch/*/
!CMS/App_Data/CMSModules/SmartSearch/_StopWords/**
!CMS/App_Data/CMSModules/SmartSearch/_Synonyms/**
CMS/App_Data/DancingGoat
CMS/App_Data/Templates/CommunitySite
CMS/App_Data/Templates/CorporateSite
CMS/App_Data/Templates/DancingGoat
CMS/App_Data/Templates/EcommerceSite
CMS/App_Data/Templates/IntranetPortal
CMS/App_Data/Templates/PersonalSite
CMS/App_Themes/CommunitySite
CMS/App_Themes/CorporateSite
CMS/App_Themes/EcommerceSite
CMS/App_Themes/IntranetPortal*
CMS/App_Themes/PersonalSite
CMS/CMSTemplates/CorporateSite
CMS/CommunitySite
CMS/CorporateSite
CMS/DancingGoat
CMS/EcommerceSite
CMS/IntranetPortal
CMS/PersonalSite
# ROS2
install/
log/
build/
qtcreator-*
*~
COLCON_IGNORE
AMENT_IGNORE
# Splunk
local
# Racket
compiled/
# JupyterNotebooks
profile_default/
# Nikola
cache/
output/
# Red
quick-test/runnable/
system/tests/source/units/auto-tests/
tests/source/units/auto-tests/
# AtmelStudio
[Dd]ebug/
[Rr]elease/
# IAR_EWARM
EWARM/**/Obj
EWARM/**/List
EWARM/**/Exe
EWARM/settings
# esp-idf
build/
sdkconfig
# Phoenix
/tmp
/node_modules
/assets/node_modules
/priv/static/
/installer/_build
/installer/tmp
/installer/doc
/installer/deps
# JBoss4
/server/all/data
/server/all/log
/server/all/tmp
/server/all/work
/server/default/data
/server/default/log
/server/default/tmp
/server/default/work
/server/minimal/data
/server/minimal/log
/server/minimal/tmp
/server/minimal/work
# JBoss6
/server/all/data
/server/all/log
/server/all/tmp
/server/all/work
/server/default/data
/server/default/log
/server/default/tmp
/server/default/work
/server/minimal/data
/server/minimal/log
/server/minimal/tmp
/server/minimal/work
/server/jbossweb-standalone/data
/server/jbossweb-standalone/log
/server/jbossweb-standalone/tmp
/server/jbossweb-standalone/work
/server/standard/data
/server/standard/log
/server/standard/tmp
/server/standard/work
# Hugo
/public/
/resources/_gen/
# Puppet
pkg/*
spec/fixtures/*
coverage/*
vendor/*
# Kotlin
hs_err_pid*
# Composer
/vendor/
# Android
bin/
gen/
out/
build/
proguard/
captures/
freeline/
fastlane/screenshots
fastlane/test_output
lint/intermediates/
lint/generated/
lint/outputs/
lint/tmp/
# Nim
nimcache/
nimblecache/
htmldocs/
# Actionscript
bin-debug/
bin-release/
[Oo]bj/
[Bb]in/
# Maven
target/
# Agda
MAlonzo/**
# Unity
/[Ll]ibrary/
/[Tt]emp/
/[Oo]bj/
/[Bb]uild/
/[Bb]uilds/
/[Ll]ogs/
/[Uu]ser[Ss]ettings/
/[Mm]emoryCaptures/
/[Aa]ssets/Plugins/Editor/JetBrains*
ExportedObj/
/[Aa]ssets/[Ss]treamingAssets/aa/*
# GWT
war/gwt_bree/
gwt-unitCache/
war/WEB-INF/deploy/
war/WEB-INF/classes/
www-test/
# VirtualEnv
[Bb]in
[Ii]nclude
[Ll]ib
[Ll]ib64
[Ll]ocal
[Ss]cripts
# SBT
dist/*
target/
lib_managed/
project/boot/
project/plugins/project/
# PSoCCreator
Debug/
Release/
Export/
*/codegentemp
*/Generated_Source
# TextMate
tmtags
# MonoDevelop
test-results/
# SublimeText
Package Control.cache/
Package Control.ca-certs/
# Dreamweaver
_notes
_compareTemp
configs/
# NetBeans
**/nbproject/private/
build/
nbbuild/
dist/
nbdist/
# Windows
$RECYCLE.BIN/
# MATLAB
helpsearch*/
slprj/
sccprj/
codegen/
octave-workspace
# Octave
helpsearch*/
slprj/
sccprj/
codegen/
octave-workspace
# FlexBuilder
bin/
bin-debug/
bin-release/
# Xcode
xcuserdata/
build/
DerivedData/
# Lazarus
backup/
lib/
*.app/
# CVS
/CVS/*
**/CVS/*
# Eclipse
bin/
tmp/
# Momentics
x86/
arm/
arm-p/
# Linux
*~
# Virtuoso
lvsRunDir/*
drcRunDir/*
# Emacs
*~
\#*\#
auto-save-list
tramp
*_archive
/eshell/history
/eshell/lastdir
/elpa/
/auto/
dist/
/server/
# EiffelStudio
EIFGENs
# macOS
Icon
Network Trash Folder
Temporary Items
# XilinxISE
iseconfig/
xlnx_auto_0_xdb/
xst/
_ngo/
_xmsgs/
# CodeKit
/min
# SynopsysVCS
simv
simv.daidir/
simv.db.dir/
simv.vdb/
urgReport/
DVEfiles/
# Tags
TAGS
!TAGS/
tags
!tags/
GTAGS
GRTAGS
GPATH
GSYMS
# ModelSim
[_@]*
wlf*
cov*/
transcript*
# JetBrains
cmake-build-*/
out/
# WebMethods
**/IntegrationServer/datastore/
**/IntegrationServer/db/
**/IntegrationServer/DocumentStore/
**/IntegrationServer/lib/
**/IntegrationServer/logs/
**/IntegrationServer/replicate/
**/IntegrationServer/sdk/
**/IntegrationServer/support/
**/IntegrationServer/update/
**/IntegrationServer/userFtpRoot/
**/IntegrationServer/web/
**/IntegrationServer/WmRepository4/
**/IntegrationServer/XAStore/
**/IntegrationServer/packages/Wm*/
# Vim
*~
tags
# Calabash
rerun/
reports/
screenshots/
test-servers/
vendor
# JDeveloper
temp/
classes/
deploy/
javadoc/
# Drupal
/sites/*/files
/sites/*/public
/sites/*/private
/sites/*/files-public
/sites/*/files-private
/sites/*/translations
/sites/*/tmp
/sites/*/cache
/sites/simpletest
/core
/vendor
# UnrealEngine
Binaries/*
Plugins/*/Binaries/*
Build/*
!Build/*/
Build/*/**
Saved/*
Intermediate/*
Plugins/*/Intermediate/*
DerivedDataCache/*
# Symfony
/app/cache/*
/app/logs/*
/app/spool/*
/var/cache/*
/var/logs/*
/var/sessions/*
/var/log/*
/bin/*
!bin/console
!bin/symfony_requirements
/vendor/
/web/bundles/
/web/uploads/
/build/
**/Entity/*~
# JBoss
jboss/server/all/tmp/**/*
jboss/server/all/data/**/*
jboss/server/all/work/**/*
jboss/server/default/tmp/**/*
jboss/server/default/data/**/*
jboss/server/default/work/**/*
jboss/server/minimal/tmp/**/*
jboss/server/minimal/data/**/*
jboss/server/minimal/work/**/*
# SugarCRM
/cache/*
/custom/history/
/custom/modulebuilder/
/custom/working/
/custom/modules/*/Ext/
/custom/application/Ext/
/upload/*
/upload_backup/
# Leiningen
/lib/
/classes/
/target/
/checkouts/
# OpenCart
download/
image/data/
image/cache/
system/cache/
system/logs/
system/storage/
vqmod/logs/*
vqmod/vqcache/*
# VVVV
bin/
# Gradle
**/build/
# FuelPHP
/fuel/vendor
/docs/
/fuel/app/logs/*/*/*
/fuel/app/cache/*/*
# Autotools
/ar-lib
/mdate-sh
/py-compile
/test-driver
/ylwrap
/compile
/configure
/depcomp
/install-sh
/missing
/stamp-h1
Makefile
# Delphi
__history/
__recovery/
modules/
# GitBook
node_modules
_book
# Prestashop
/cache/*
!/cache/push/activity
!/cache/push/trends
/download/*
/img/*
!/img/jquery-ui
!/img/scenes
/upload/*
/vendor/*
/docs/phpdoc-sf/
/admin-dev/autoupgrade/*
/admin-dev/backups/*
/admin-dev/import/*
/admin-dev/export/*
themes/*/cache/*
config/xml/*
config/themes/*
modules/*
override/*
themes/*/
!themes/classic
!themes/_core
!themes/_libraries
bower_components/
node_modules/
php-cs-fixer
translations/*
mails/*
!mails/themes/
!mails/_partials/
themes/default-bootstrap/lang/*
themes/default-bootstrap/mails/*
!themes/default-bootstrap/mails/en/
themes/default-bootstrap/modules/*/mails/*
!themes/default-bootstrap/modules/*/mails/en
/bin/
/app/Resources/translations/*
!/app/Resources/translations/default
/build/
/var/*
!/var/cache
/var/cache/*
!/var/logs
/var/logs/*
!/var/sessions
/var/sessions/*
/vendor/
/web/bundles/
# Xojo
Builds*
Debug*/Debug*\ Libs
# WordPress
!wp-content/
wp-content/*
!wp-content/mu-plugins/
!wp-content/plugins/
!wp-content/themes/
wp-content/themes/twenty*/
node_modules/
# ROS
devel/
logs/
build/
bin/
lib/
msg_gen/
srv_gen/
build_isolated/
devel_isolated/
/cfg/cpp/
qtcreator-*
/planning/cfg
/planning/docs
*~
CATKIN_IGNORE
# Scala
hs_err_pid*
# VisualStudio
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
Generated\ Files/
[Tt]est[Rr]esult*/
[Dd]ebugPS/
[Rr]eleasePS/
BenchmarkDotNet.Artifacts/
artifacts/
_Chutzpah*
ipch/
$tf/
_ReSharper*/
_TeamCity*
_NCrunch_*
nCrunchTemp_*
AutoTest.Net/
[Ee]xpress/
DocProject/buildhelp/
DocProject/Help/Html2
DocProject/Help/html
publish/
PublishScripts/
**/[Pp]ackages/*
!**/[Pp]ackages/build/
csx/
ecf/
rcf/
AppPackages/
BundleArtifacts/
!?*.[Cc]ache/
ClientBin/
~$*
*~
Generated_Code/
_UpgradeReport_Files/
Backup*/
ServiceFabricBackup/
FakesAssemblies/
node_modules/
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.Server/GeneratedArtifacts
_Pvt_Extensions
paket-files/
__pycache__/
OpenCover/
ASALocalRun/
healthchecksdb
MigrationBackup/
# Qooxdoo
cache
cache-downloads
inspector
api
# Concrete5
error_log
files/cache/*
files/tmp/*
/application/files/*
/updates/*
# Grails
/web-app/WEB-INF/classes
/test/reports
/logs
/plugins
/web-app/plugins
/target
# Stella
obj/
# Processing
applet
out
# Rails
/public/system
/coverage/
/spec/tmp
/log/*
/tmp/*
/vendor/bundle
/vendor/assets/bower_components
node_modules/
/public/packs
/public/packs-test
/public/assets
/storage/*
/public/uploads
# PureScript
bower_components
node_modules
output
# Laravel
/vendor/
node_modules/
app/storage/
public/storage
public/hot
public_html/storage
public_html/hot
# TwinCAT3
_Boot/
_CompileInfo/
_Libraries/
# Elixir
/_build
/cover
/deps
/doc
# TurboGears2
data/*
dist
build
# Godot
data_*/
# Java
hs_err_pid*
# Swift
xcuserdata/
build/
DerivedData/
Carthage/Build/
Dependencies/
fastlane/test_output
iOSInjectionProject/
# Lilypond
*~
# Joomla
/administrator/cache/*
/administrator/components/com_actionlogs/*
/administrator/components/com_admin/*
/administrator/components/com_ajax/*
/administrator/components/com_associations/*
/administrator/components/com_banners/*
/administrator/components/com_cache/*
/administrator/components/com_categories/*
/administrator/components/com_checkin/*
/administrator/components/com_config/*
/administrator/components/com_contact/*
/administrator/components/com_content/*
/administrator/components/com_contenthistory/*
/administrator/components/com_cpanel/*
/administrator/components/com_fields/*
/administrator/components/com_finder/*
/administrator/components/com_installer/*
/administrator/components/com_joomlaupdate/*
/administrator/components/com_languages/*
/administrator/components/com_login/*
/administrator/components/com_media/*
/administrator/components/com_menus/*
/administrator/components/com_messages/*
/administrator/components/com_modules/*
/administrator/components/com_newsfeeds/*
/administrator/components/com_plugins/*
/administrator/components/com_postinstall/*
/administrator/components/com_privacy/*
/administrator/components/com_redirect/*
/administrator/components/com_search/*
/administrator/components/com_tags/*
/administrator/components/com_templates/*
/administrator/components/com_users/*
/administrator/help/*
/administrator/includes/*
/administrator/language/overrides/*
/administrator/logs/*
/administrator/modules/mod_custom/*
/administrator/modules/mod_feed/*
/administrator/modules/mod_latest/*
/administrator/modules/mod_latestactions/*
/administrator/modules/mod_logged/*
/administrator/modules/mod_login/*
/administrator/modules/mod_menu/*
/administrator/modules/mod_multilangstatus/*
/administrator/modules/mod_online/*
/administrator/modules/mod_popular/*
/administrator/modules/mod_privacy_dashboard/*
/administrator/modules/mod_quickicon/*
/administrator/modules/mod_sampledata/*
/administrator/modules/mod_stats_admin/*
/administrator/modules/mod_status/*
/administrator/modules/mod_submenu/*
/administrator/modules/mod_title/*
/administrator/modules/mod_toolbar/*
/administrator/modules/mod_unread/*
/administrator/modules/mod_version/*
/administrator/templates/hathor/*
/administrator/templates/isis/*
/administrator/templates/system/*
/bin/*
/cache/*
/cli/*
/components/com_ajax/*
/components/com_banners/*
/components/com_config/*
/components/com_contact/*
/components/com_content/*
/components/com_contenthistory/*
/components/com_fields/*
/components/com_finder/*
/components/com_mailto/*
/components/com_media/*
/components/com_menus/*
/components/com_modules/*
/components/com_newsfeeds/*
/components/com_privacy/*
/components/com_search/*
/components/com_tags/*
/components/com_users/*
/components/com_wrapper/*
/images/banners/*
/images/headers/*
/images/sampledata/*
/images/joomla*
/includes/*
/installation/*
/language/overrides/*
/layouts/joomla/*
/layouts/libraries/*
/layouts/plugins/*
/libraries/cms/*
/libraries/fof/*
/libraries/idna_convert/*
/libraries/joomla/*
/libraries/legacy/*
/libraries/php-encryption/*
/libraries/phpass/*
/libraries/phpmailer/*
/libraries/phputf8/*
/libraries/simplepie/*
/libraries/vendor/*
/media/cms/*
/media/com_associations/*
/media/com_contact/*
/media/com_content/*
/media/com_contenthistory/*
/media/com_fields/*
/media/com_finder/*
/media/com_joomlaupdate/*
/media/com_menus/*
/media/com_modules/*
/media/com_wrapper/*
/media/contacts/*
/media/editors/*
/media/jui/*
/media/mailto/*
/media/media/*
/media/mod_languages/*
/media/mod_sampledata/*
/media/overrider/*
/media/plg_captcha_recaptcha/*
/media/plg_captcha_recaptcha_invisible/*
/media/plg_quickicon_extensionupdate/*
/media/plg_quickicon_joomlaupdate/*
/media/plg_quickicon_privacycheck/*
/media/plg_system_highlight/*
/media/plg_system_stats/*
/media/plg_twofactorauth_totp/*
/media/system/*
/modules/mod_articles_archive/*
/modules/mod_articles_categories/*
/modules/mod_articles_category/*
/modules/mod_articles_latest/*
/modules/mod_articles_news/*
/modules/mod_articles_popular/*
/modules/mod_banners/*
/modules/mod_breadcrumbs/*
/modules/mod_custom/*
/modules/mod_feed/*
/modules/mod_finder/*
/modules/mod_footer/*
/modules/mod_languages/*
/modules/mod_login/*
/modules/mod_menu/*
/modules/mod_random_image/*
/modules/mod_related_items/*
/modules/mod_search/*
/modules/mod_stats/*
/modules/mod_syndicate/*
/modules/mod_tags_popular/*
/modules/mod_tags_similar/*
/modules/mod_users_latest/*
/modules/mod_whosonline/*
/modules/mod_wrapper/*
/plugins/actionlog/joomla/*
/plugins/authentication/cookie/*
/plugins/authentication/example/*
/plugins/authentication/gmail/*
/plugins/authentication/joomla/*
/plugins/authentication/ldap/*
/plugins/captcha/recaptcha/*
/plugins/captcha/recaptcha_invisible/*
/plugins/content/confirmconsent/*
/plugins/content/contact/*
/plugins/content/emailcloak/*
/plugins/content/example/*
/plugins/content/fields/*
/plugins/content/finder/*
/plugins/content/geshi/*
/plugins/content/joomla/*
/plugins/content/loadmodule/*
/plugins/content/pagebreak/*
/plugins/content/pagenavigation/*
/plugins/content/vote/*
/plugins/editors/codemirror/*
/plugins/editors/none/*
/plugins/editors/tinymce/*
/plugins/editors-xtd/article/*
/plugins/editors-xtd/contact/*
/plugins/editors-xtd/fields/*
/plugins/editors-xtd/image/*
/plugins/editors-xtd/menu/*
/plugins/editors-xtd/module/*
/plugins/editors-xtd/pagebreak/*
/plugins/editors-xtd/readmore/*
/plugins/extension/example/*
/plugins/extension/joomla/*
/plugins/fields/calendar/*
/plugins/fields/checkboxes/*
/plugins/fields/color/*
/plugins/fields/editor/*
/plugins/fields/imagelist/*
/plugins/fields/integer/*
/plugins/fields/list/*
/plugins/fields/media/*
/plugins/fields/radio/*
/plugins/fields/repeatable/*
/plugins/fields/sql/*
/plugins/fields/text/*
/plugins/fields/textarea/*
/plugins/fields/url/*
/plugins/fields/user/*
/plugins/fields/usergrouplist/*
/plugins/finder/categories/*
/plugins/finder/contacts/*
/plugins/finder/content/*
/plugins/finder/newsfeeds/*
/plugins/finder/tags/*
/plugins/installer/folderinstaller/*
/plugins/installer/packageinstaller/*
/plugins/installer/urlinstaller/*
/plugins/privacy/actionlogs/*
/plugins/privacy/consents/*
/plugins/privacy/contact/*
/plugins/privacy/content/*
/plugins/privacy/message/*
/plugins/privacy/user/*
/plugins/quickicon/extensionupdate/*
/plugins/quickicon/joomlaupdate/*
/plugins/quickicon/phpversioncheck/*
/plugins/quickicon/privacycheck/*
/plugins/sampledata/blog/*
/plugins/search/categories/*
/plugins/search/contacts/*
/plugins/search/content/*
/plugins/search/newsfeeds/*
/plugins/search/tags/*
/plugins/search/weblinks/*
/plugins/system/actionlogs/*
/plugins/system/cache/*
/plugins/system/debug/*
/plugins/system/fields/*
/plugins/system/highlight/*
/plugins/system/languagecode/*
/plugins/system/languagefilter/*
/plugins/system/log/*
/plugins/system/logout/*
/plugins/system/logrotation/*
/plugins/system/p3p/*
/plugins/system/privacyconsent/*
/plugins/system/redirect/*
/plugins/system/remember/*
/plugins/system/sef/*
/plugins/system/sessiongc/*
/plugins/system/stats/*
/plugins/system/updatenotification/*
/plugins/twofactorauth/totp/*
/plugins/twofactorauth/yubikey/*
/plugins/user/contactcreator/*
/plugins/user/example/*
/plugins/user/joomla/*
/plugins/user/profile/*
/plugins/user/terms/*
/templates/beez3/*
/templates/protostar/*
/templates/system/*
/tmp/*
# SymphonyCMS
manifest/cache/
manifest/logs/
manifest/tmp/
symphony/
workspace/uploads/
# ZendFramework
vendor/
data/logs/
data/cache/
data/sessions/
data/tmp/
temp/
data/DoctrineORMModule/Proxy/
data/DoctrineORMModule/cache/
demos/
extras/documentation
# C
*.dSYM/
# Node
logs
pids
lib-cov
coverage
bower_components
build/Release
node_modules/
jspm_packages/
web_modules/
out
dist
# LemonStand
/config/*
/controllers/*
/init/*
/logs/*
/phproad/*
/temp/*
/uploaded/*
/installer_files/*
/modules/backend/*
/modules/blog/*
/modules/cms/*
/modules/core/*
/modules/session/*
/modules/shop/*
/modules/system/*
/modules/users/*
# Waf
waf-*-*/
waf3-*-*/
# TeX
latex.out/
*-gnuplottex-*
*-tikzDictionary
_minted*
sympy-plots-for-*.tex/
pythontex-files-*/
TSWLatexianTemp*
*~[0-9]*
# Rust
debug/
target/
# Yii
assets/*
protected/runtime/*
themes/classic/views/
# Mercury
Mercury/
# Scrivener
*/QuickLook/
# Clojure
/lib/
/classes/
/target/
/checkouts/
# Phalcon
/cache/
/config/development/
# Typo3
/fileadmin/user_upload/
/fileadmin/_temp_/
/fileadmin/_processed_/
/uploads/
/typo3conf/temp_CACHED*
/typo3conf/ENABLE_INSTALL_TOOL
/FIRST_INSTALL
/typo3
/Packages
/typo3temp/
# JENKINS_HOME
!/jobs
jobs/**
!jobs/**/
builds
indexing
jobs/**/*workspace
# MetaProgrammingSystem
classes_gen
source_gen
test_gen
# CraftCMS
/craft/storage/*
!/craft/storage/rebrand
# CFWheels
plugins/**/*
files
db/sql
javascripts/bundles
stylesheets/bundles
# OCaml
_build/
_opam/
# Ruby
/coverage/
/InstalledFiles
/pkg/
/spec/reports/
/test/tmp/
/test/version_tmp/
/tmp/
build/
build-iPhoneOS/
build-iPhoneSimulator/
/_yardoc/
/doc/
/rdoc/
/vendor/bundle
/lib/bundler/man/
# Perl
!Build/
cover_db/
_build/
Build
inc/
/blib/
/_eumm/
/Makefile
/pm_to_blib
# Elisp
*~
# Jekyll
_site/
# D
docs/
# Packer
packer_cache/
# Umbraco
**/App_Data/Logs/
**/App_Data/[Pp]review/
**/App_Data/TEMP/
**/App_Data/NuGetBackup/
!**/App_Data/[Pp]ackages/*
!**/[Uu]mbraco/[Dd]eveloper/[Pp]ackages/*
!**/[Uu]mbraco/[Vv]iews/[Pp]ackages/*
**/App_Data/cache/
# Kohana
application/cache/*
application/logs/*
# Nanoc
output/
tmp/nanoc/
`;
/***/ }),
/***/ 55086:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const { file: DefaultDCIgnore } = __webpack_require__(78105);
const { file: CustomDCIgnore } = __webpack_require__(54916);
module.exports = {
DefaultDCIgnore,
CustomDCIgnore,
};
/***/ }),
/***/ 98925:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
const f = __webpack_require__(98633)
const DateTime = global.Date
class Date extends DateTime {
constructor (value) {
super(value)
this.isDate = true
}
toISOString () {
return `${this.getUTCFullYear()}-${f(2, this.getUTCMonth() + 1)}-${f(2, this.getUTCDate())}`
}
}
module.exports = value => {
const date = new Date(value)
/* istanbul ignore if */
if (isNaN(date)) {
throw new TypeError('Invalid Datetime')
} else {
return date
}
}
/***/ }),
/***/ 58904:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
const f = __webpack_require__(98633)
class FloatingDateTime extends Date {
constructor (value) {
super(value + 'Z')
this.isFloating = true
}
toISOString () {
const date = `${this.getUTCFullYear()}-${f(2, this.getUTCMonth() + 1)}-${f(2, this.getUTCDate())}`
const time = `${f(2, this.getUTCHours())}:${f(2, this.getUTCMinutes())}:${f(2, this.getUTCSeconds())}.${f(3, this.getUTCMilliseconds())}`
return `${date}T${time}`
}
}
module.exports = value => {
const date = new FloatingDateTime(value)
/* istanbul ignore if */
if (isNaN(date)) {
throw new TypeError('Invalid Datetime')
} else {
return date
}
}
/***/ }),
/***/ 76114:
/***/ ((module) => {
"use strict";
module.exports = value => {
const date = new Date(value)
/* istanbul ignore if */
if (isNaN(date)) {
throw new TypeError('Invalid Datetime')
} else {
return date
}
}
/***/ }),
/***/ 99439:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
const f = __webpack_require__(98633)
class Time extends Date {
constructor (value) {
super(`0000-01-01T${value}Z`)
this.isTime = true
}
toISOString () {
return `${f(2, this.getUTCHours())}:${f(2, this.getUTCMinutes())}:${f(2, this.getUTCSeconds())}.${f(3, this.getUTCMilliseconds())}`
}
}
module.exports = value => {
const date = new Time(value)
/* istanbul ignore if */
if (isNaN(date)) {
throw new TypeError('Invalid Datetime')
} else {
return date
}
}
/***/ }),
/***/ 98633:
/***/ ((module) => {
"use strict";
module.exports = (d, num) => {
num = String(num)
while (num.length < d) num = '0' + num
return num
}
/***/ }),
/***/ 98818:
/***/ ((module) => {
"use strict";
const ParserEND = 0x110000
class ParserError extends Error {
/* istanbul ignore next */
constructor (msg, filename, linenumber) {
super('[ParserError] ' + msg, filename, linenumber)
this.name = 'ParserError'
this.code = 'ParserError'
if (Error.captureStackTrace) Error.captureStackTrace(this, ParserError)
}
}
class State {
constructor (parser) {
this.parser = parser
this.buf = ''
this.returned = null
this.result = null
this.resultTable = null
this.resultArr = null
}
}
class Parser {
constructor () {
this.pos = 0
this.col = 0
this.line = 0
this.obj = {}
this.ctx = this.obj
this.stack = []
this._buf = ''
this.char = null
this.ii = 0
this.state = new State(this.parseStart)
}
parse (str) {
/* istanbul ignore next */
if (str.length === 0 || str.length == null) return
this._buf = String(str)
this.ii = -1
this.char = -1
let getNext
while (getNext === false || this.nextChar()) {
getNext = this.runOne()
}
this._buf = null
}
nextChar () {
if (this.char === 0x0A) {
++this.line
this.col = -1
}
++this.ii
this.char = this._buf.codePointAt(this.ii)
++this.pos
++this.col
return this.haveBuffer()
}
haveBuffer () {
return this.ii < this._buf.length
}
runOne () {
return this.state.parser.call(this, this.state.returned)
}
finish () {
this.char = ParserEND
let last
do {
last = this.state.parser
this.runOne()
} while (this.state.parser !== last)
this.ctx = null
this.state = null
this._buf = null
return this.obj
}
next (fn) {
/* istanbul ignore next */
if (typeof fn !== 'function') throw new ParserError('Tried to set state to non-existent state: ' + JSON.stringify(fn))
this.state.parser = fn
}
goto (fn) {
this.next(fn)
return this.runOne()
}
call (fn, returnWith) {
if (returnWith) this.next(returnWith)
this.stack.push(this.state)
this.state = new State(fn)
}
callNow (fn, returnWith) {
this.call(fn, returnWith)
return this.runOne()
}
return (value) {
/* istanbul ignore next */
if (this.stack.length === 0) throw this.error(new ParserError('Stack underflow'))
if (value === undefined) value = this.state.buf
this.state = this.stack.pop()
this.state.returned = value
}
returnNow (value) {
this.return(value)
return this.runOne()
}
consume () {
/* istanbul ignore next */
if (this.char === ParserEND) throw this.error(new ParserError('Unexpected end-of-buffer'))
this.state.buf += this._buf[this.ii]
}
error (err) {
err.line = this.line
err.col = this.col
err.pos = this.pos
return err
}
/* istanbul ignore next */
parseStart () {
throw new ParserError('Must declare a parseStart method')
}
}
Parser.END = ParserEND
Parser.Error = ParserError
module.exports = Parser
/***/ }),
/***/ 8676:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
/* eslint-disable no-new-wrappers, no-eval, camelcase, operator-linebreak */
module.exports = makeParserClass(__webpack_require__(98818))
module.exports.makeParserClass = makeParserClass
class TomlError extends Error {
constructor (msg) {
super(msg)
this.name = 'TomlError'
/* istanbul ignore next */
if (Error.captureStackTrace) Error.captureStackTrace(this, TomlError)
this.fromTOML = true
this.wrapped = null
}
}
TomlError.wrap = err => {
const terr = new TomlError(err.message)
terr.code = err.code
terr.wrapped = err
return terr
}
module.exports.TomlError = TomlError
const createDateTime = __webpack_require__(76114)
const createDateTimeFloat = __webpack_require__(58904)
const createDate = __webpack_require__(98925)
const createTime = __webpack_require__(99439)
const CTRL_I = 0x09
const CTRL_J = 0x0A
const CTRL_M = 0x0D
const CTRL_CHAR_BOUNDARY = 0x1F // the last non-character in the latin1 region of unicode, except DEL
const CHAR_SP = 0x20
const CHAR_QUOT = 0x22
const CHAR_NUM = 0x23
const CHAR_APOS = 0x27
const CHAR_PLUS = 0x2B
const CHAR_COMMA = 0x2C
const CHAR_HYPHEN = 0x2D
const CHAR_PERIOD = 0x2E
const CHAR_0 = 0x30
const CHAR_1 = 0x31
const CHAR_7 = 0x37
const CHAR_9 = 0x39
const CHAR_COLON = 0x3A
const CHAR_EQUALS = 0x3D
const CHAR_A = 0x41
const CHAR_E = 0x45
const CHAR_F = 0x46
const CHAR_T = 0x54
const CHAR_U = 0x55
const CHAR_Z = 0x5A
const CHAR_LOWBAR = 0x5F
const CHAR_a = 0x61
const CHAR_b = 0x62
const CHAR_e = 0x65
const CHAR_f = 0x66
const CHAR_i = 0x69
const CHAR_l = 0x6C
const CHAR_n = 0x6E
const CHAR_o = 0x6F
const CHAR_r = 0x72
const CHAR_s = 0x73
const CHAR_t = 0x74
const CHAR_u = 0x75
const CHAR_x = 0x78
const CHAR_z = 0x7A
const CHAR_LCUB = 0x7B
const CHAR_RCUB = 0x7D
const CHAR_LSQB = 0x5B
const CHAR_BSOL = 0x5C
const CHAR_RSQB = 0x5D
const CHAR_DEL = 0x7F
const SURROGATE_FIRST = 0xD800
const SURROGATE_LAST = 0xDFFF
const escapes = {
[CHAR_b]: '\u0008',
[CHAR_t]: '\u0009',
[CHAR_n]: '\u000A',
[CHAR_f]: '\u000C',
[CHAR_r]: '\u000D',
[CHAR_QUOT]: '\u0022',
[CHAR_BSOL]: '\u005C'
}
function isDigit (cp) {
return cp >= CHAR_0 && cp <= CHAR_9
}
function isHexit (cp) {
return (cp >= CHAR_A && cp <= CHAR_F) || (cp >= CHAR_a && cp <= CHAR_f) || (cp >= CHAR_0 && cp <= CHAR_9)
}
function isBit (cp) {
return cp === CHAR_1 || cp === CHAR_0
}
function isOctit (cp) {
return (cp >= CHAR_0 && cp <= CHAR_7)
}
function isAlphaNumQuoteHyphen (cp) {
return (cp >= CHAR_A && cp <= CHAR_Z)
|| (cp >= CHAR_a && cp <= CHAR_z)
|| (cp >= CHAR_0 && cp <= CHAR_9)
|| cp === CHAR_APOS
|| cp === CHAR_QUOT
|| cp === CHAR_LOWBAR
|| cp === CHAR_HYPHEN
}
function isAlphaNumHyphen (cp) {
return (cp >= CHAR_A && cp <= CHAR_Z)
|| (cp >= CHAR_a && cp <= CHAR_z)
|| (cp >= CHAR_0 && cp <= CHAR_9)
|| cp === CHAR_LOWBAR
|| cp === CHAR_HYPHEN
}
const _type = Symbol('type')
const _declared = Symbol('declared')
const hasOwnProperty = Object.prototype.hasOwnProperty
const defineProperty = Object.defineProperty
const descriptor = {configurable: true, enumerable: true, writable: true, value: undefined}
function hasKey (obj, key) {
if (hasOwnProperty.call(obj, key)) return true
if (key === '__proto__') defineProperty(obj, '__proto__', descriptor)
return false
}
const INLINE_TABLE = Symbol('inline-table')
function InlineTable () {
return Object.defineProperties({}, {
[_type]: {value: INLINE_TABLE}
})
}
function isInlineTable (obj) {
if (obj === null || typeof (obj) !== 'object') return false
return obj[_type] === INLINE_TABLE
}
const TABLE = Symbol('table')
function Table () {
return Object.defineProperties({}, {
[_type]: {value: TABLE},
[_declared]: {value: false, writable: true}
})
}
function isTable (obj) {
if (obj === null || typeof (obj) !== 'object') return false
return obj[_type] === TABLE
}
const _contentType = Symbol('content-type')
const INLINE_LIST = Symbol('inline-list')
function InlineList (type) {
return Object.defineProperties([], {
[_type]: {value: INLINE_LIST},
[_contentType]: {value: type}
})
}
function isInlineList (obj) {
if (obj === null || typeof (obj) !== 'object') return false
return obj[_type] === INLINE_LIST
}
const LIST = Symbol('list')
function List () {
return Object.defineProperties([], {
[_type]: {value: LIST}
})
}
function isList (obj) {
if (obj === null || typeof (obj) !== 'object') return false
return obj[_type] === LIST
}
// in an eval, to let bundlers not slurp in a util proxy
let _custom
try {
const utilInspect = eval("require('util').inspect")
_custom = utilInspect.custom
} catch (_) {
/* eval require not available in transpiled bundle */
}
/* istanbul ignore next */
const _inspect = _custom || 'inspect'
class BoxedBigInt {
constructor (value) {
try {
this.value = global.BigInt.asIntN(64, value)
} catch (_) {
/* istanbul ignore next */
this.value = null
}
Object.defineProperty(this, _type, {value: INTEGER})
}
isNaN () {
return this.value === null
}
/* istanbul ignore next */
toString () {
return String(this.value)
}
/* istanbul ignore next */
[_inspect] () {
return `[BigInt: ${this.toString()}]}`
}
valueOf () {
return this.value
}
}
const INTEGER = Symbol('integer')
function Integer (value) {
let num = Number(value)
// -0 is a float thing, not an int thing
if (Object.is(num, -0)) num = 0
/* istanbul ignore else */
if (global.BigInt && !Number.isSafeInteger(num)) {
return new BoxedBigInt(value)
} else {
/* istanbul ignore next */
return Object.defineProperties(new Number(num), {
isNaN: {value: function () { return isNaN(this) }},
[_type]: {value: INTEGER},
[_inspect]: {value: () => `[Integer: ${value}]`}
})
}
}
function isInteger (obj) {
if (obj === null || typeof (obj) !== 'object') return false
return obj[_type] === INTEGER
}
const FLOAT = Symbol('float')
function Float (value) {
/* istanbul ignore next */
return Object.defineProperties(new Number(value), {
[_type]: {value: FLOAT},
[_inspect]: {value: () => `[Float: ${value}]`}
})
}
function isFloat (obj) {
if (obj === null || typeof (obj) !== 'object') return false
return obj[_type] === FLOAT
}
function tomlType (value) {
const type = typeof value
if (type === 'object') {
/* istanbul ignore if */
if (value === null) return 'null'
if (value instanceof Date) return 'datetime'
/* istanbul ignore else */
if (_type in value) {
switch (value[_type]) {
case INLINE_TABLE: return 'inline-table'
case INLINE_LIST: return 'inline-list'
/* istanbul ignore next */
case TABLE: return 'table'
/* istanbul ignore next */
case LIST: return 'list'
case FLOAT: return 'float'
case INTEGER: return 'integer'
}
}
}
return type
}
function makeParserClass (Parser) {
class TOMLParser extends Parser {
constructor () {
super()
this.ctx = this.obj = Table()
}
/* MATCH HELPER */
atEndOfWord () {
return this.char === CHAR_NUM || this.char === CTRL_I || this.char === CHAR_SP || this.atEndOfLine()
}
atEndOfLine () {
return this.char === Parser.END || this.char === CTRL_J || this.char === CTRL_M
}
parseStart () {
if (this.char === Parser.END) {
return null
} else if (this.char === CHAR_LSQB) {
return this.call(this.parseTableOrList)
} else if (this.char === CHAR_NUM) {
return this.call(this.parseComment)
} else if (this.char === CTRL_J || this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M) {
return null
} else if (isAlphaNumQuoteHyphen(this.char)) {
return this.callNow(this.parseAssignStatement)
} else {
throw this.error(new TomlError(`Unknown character "${this.char}"`))
}
}
// HELPER, this strips any whitespace and comments to the end of the line
// then RETURNS. Last state in a production.
parseWhitespaceToEOL () {
if (this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M) {
return null
} else if (this.char === CHAR_NUM) {
return this.goto(this.parseComment)
} else if (this.char === Parser.END || this.char === CTRL_J) {
return this.return()
} else {
throw this.error(new TomlError('Unexpected character, expected only whitespace or comments till end of line'))
}
}
/* ASSIGNMENT: key = value */
parseAssignStatement () {
return this.callNow(this.parseAssign, this.recordAssignStatement)
}
recordAssignStatement (kv) {
let target = this.ctx
let finalKey = kv.key.pop()
for (let kw of kv.key) {
if (hasKey(target, kw) && (!isTable(target[kw]) || target[kw][_declared])) {
throw this.error(new TomlError("Can't redefine existing key"))
}
target = target[kw] = target[kw] || Table()
}
if (hasKey(target, finalKey)) {
throw this.error(new TomlError("Can't redefine existing key"))
}
// unbox our numbers
if (isInteger(kv.value) || isFloat(kv.value)) {
target[finalKey] = kv.value.valueOf()
} else {
target[finalKey] = kv.value
}
return this.goto(this.parseWhitespaceToEOL)
}
/* ASSSIGNMENT expression, key = value possibly inside an inline table */
parseAssign () {
return this.callNow(this.parseKeyword, this.recordAssignKeyword)
}
recordAssignKeyword (key) {
if (this.state.resultTable) {
this.state.resultTable.push(key)
} else {
this.state.resultTable = [key]
}
return this.goto(this.parseAssignKeywordPreDot)
}
parseAssignKeywordPreDot () {
if (this.char === CHAR_PERIOD) {
return this.next(this.parseAssignKeywordPostDot)
} else if (this.char !== CHAR_SP && this.char !== CTRL_I) {
return this.goto(this.parseAssignEqual)
}
}
parseAssignKeywordPostDot () {
if (this.char !== CHAR_SP && this.char !== CTRL_I) {
return this.callNow(this.parseKeyword, this.recordAssignKeyword)
}
}
parseAssignEqual () {
if (this.char === CHAR_EQUALS) {
return this.next(this.parseAssignPreValue)
} else {
throw this.error(new TomlError('Invalid character, expected "="'))
}
}
parseAssignPreValue () {
if (this.char === CHAR_SP || this.char === CTRL_I) {
return null
} else {
return this.callNow(this.parseValue, this.recordAssignValue)
}
}
recordAssignValue (value) {
return this.returnNow({key: this.state.resultTable, value: value})
}
/* COMMENTS: #...eol */
parseComment () {
do {
if (this.char === Parser.END || this.char === CTRL_J) {
return this.return()
}
} while (this.nextChar())
}
/* TABLES AND LISTS, [foo] and [[foo]] */
parseTableOrList () {
if (this.char === CHAR_LSQB) {
this.next(this.parseList)
} else {
return this.goto(this.parseTable)
}
}
/* TABLE [foo.bar.baz] */
parseTable () {
this.ctx = this.obj
return this.goto(this.parseTableNext)
}
parseTableNext () {
if (this.char === CHAR_SP || this.char === CTRL_I) {
return null
} else {
return this.callNow(this.parseKeyword, this.parseTableMore)
}
}
parseTableMore (keyword) {
if (this.char === CHAR_SP || this.char === CTRL_I) {
return null
} else if (this.char === CHAR_RSQB) {
if (hasKey(this.ctx, keyword) && (!isTable(this.ctx[keyword]) || this.ctx[keyword][_declared])) {
throw this.error(new TomlError("Can't redefine existing key"))
} else {
this.ctx = this.ctx[keyword] = this.ctx[keyword] || Table()
this.ctx[_declared] = true
}
return this.next(this.parseWhitespaceToEOL)
} else if (this.char === CHAR_PERIOD) {
if (!hasKey(this.ctx, keyword)) {
this.ctx = this.ctx[keyword] = Table()
} else if (isTable(this.ctx[keyword])) {
this.ctx = this.ctx[keyword]
} else if (isList(this.ctx[keyword])) {
this.ctx = this.ctx[keyword][this.ctx[keyword].length - 1]
} else {
throw this.error(new TomlError("Can't redefine existing key"))
}
return this.next(this.parseTableNext)
} else {
throw this.error(new TomlError('Unexpected character, expected whitespace, . or ]'))
}
}
/* LIST [[a.b.c]] */
parseList () {
this.ctx = this.obj
return this.goto(this.parseListNext)
}
parseListNext () {
if (this.char === CHAR_SP || this.char === CTRL_I) {
return null
} else {
return this.callNow(this.parseKeyword, this.parseListMore)
}
}
parseListMore (keyword) {
if (this.char === CHAR_SP || this.char === CTRL_I) {
return null
} else if (this.char === CHAR_RSQB) {
if (!hasKey(this.ctx, keyword)) {
this.ctx[keyword] = List()
}
if (isInlineList(this.ctx[keyword])) {
throw this.error(new TomlError("Can't extend an inline array"))
} else if (isList(this.ctx[keyword])) {
const next = Table()
this.ctx[keyword].push(next)
this.ctx = next
} else {
throw this.error(new TomlError("Can't redefine an existing key"))
}
return this.next(this.parseListEnd)
} else if (this.char === CHAR_PERIOD) {
if (!hasKey(this.ctx, keyword)) {
this.ctx = this.ctx[keyword] = Table()
} else if (isInlineList(this.ctx[keyword])) {
throw this.error(new TomlError("Can't extend an inline array"))
} else if (isInlineTable(this.ctx[keyword])) {
throw this.error(new TomlError("Can't extend an inline table"))
} else if (isList(this.ctx[keyword])) {
this.ctx = this.ctx[keyword][this.ctx[keyword].length - 1]
} else if (isTable(this.ctx[keyword])) {
this.ctx = this.ctx[keyword]
} else {
throw this.error(new TomlError("Can't redefine an existing key"))
}
return this.next(this.parseListNext)
} else {
throw this.error(new TomlError('Unexpected character, expected whitespace, . or ]'))
}
}
parseListEnd (keyword) {
if (this.char === CHAR_RSQB) {
return this.next(this.parseWhitespaceToEOL)
} else {
throw this.error(new TomlError('Unexpected character, expected whitespace, . or ]'))
}
}
/* VALUE string, number, boolean, inline list, inline object */
parseValue () {
if (this.char === Parser.END) {
throw this.error(new TomlError('Key without value'))
} else if (this.char === CHAR_QUOT) {
return this.next(this.parseDoubleString)
} if (this.char === CHAR_APOS) {
return this.next(this.parseSingleString)
} else if (this.char === CHAR_HYPHEN || this.char === CHAR_PLUS) {
return this.goto(this.parseNumberSign)
} else if (this.char === CHAR_i) {
return this.next(this.parseInf)
} else if (this.char === CHAR_n) {
return this.next(this.parseNan)
} else if (isDigit(this.char)) {
return this.goto(this.parseNumberOrDateTime)
} else if (this.char === CHAR_t || this.char === CHAR_f) {
return this.goto(this.parseBoolean)
} else if (this.char === CHAR_LSQB) {
return this.call(this.parseInlineList, this.recordValue)
} else if (this.char === CHAR_LCUB) {
return this.call(this.parseInlineTable, this.recordValue)
} else {
throw this.error(new TomlError('Unexpected character, expecting string, number, datetime, boolean, inline array or inline table'))
}
}
recordValue (value) {
return this.returnNow(value)
}
parseInf () {
if (this.char === CHAR_n) {
return this.next(this.parseInf2)
} else {
throw this.error(new TomlError('Unexpected character, expected "inf", "+inf" or "-inf"'))
}
}
parseInf2 () {
if (this.char === CHAR_f) {
if (this.state.buf === '-') {
return this.return(-Infinity)
} else {
return this.return(Infinity)
}
} else {
throw this.error(new TomlError('Unexpected character, expected "inf", "+inf" or "-inf"'))
}
}
parseNan () {
if (this.char === CHAR_a) {
return this.next(this.parseNan2)
} else {
throw this.error(new TomlError('Unexpected character, expected "nan"'))
}
}
parseNan2 () {
if (this.char === CHAR_n) {
return this.return(NaN)
} else {
throw this.error(new TomlError('Unexpected character, expected "nan"'))
}
}
/* KEYS, barewords or basic, literal, or dotted */
parseKeyword () {
if (this.char === CHAR_QUOT) {
return this.next(this.parseBasicString)
} else if (this.char === CHAR_APOS) {
return this.next(this.parseLiteralString)
} else {
return this.goto(this.parseBareKey)
}
}
/* KEYS: barewords */
parseBareKey () {
do {
if (this.char === Parser.END) {
throw this.error(new TomlError('Key ended without value'))
} else if (isAlphaNumHyphen(this.char)) {
this.consume()
} else if (this.state.buf.length === 0) {
throw this.error(new TomlError('Empty bare keys are not allowed'))
} else {
return this.returnNow()
}
} while (this.nextChar())
}
/* STRINGS, single quoted (literal) */
parseSingleString () {
if (this.char === CHAR_APOS) {
return this.next(this.parseLiteralMultiStringMaybe)
} else {
return this.goto(this.parseLiteralString)
}
}
parseLiteralString () {
do {
if (this.char === CHAR_APOS) {
return this.return()
} else if (this.atEndOfLine()) {
throw this.error(new TomlError('Unterminated string'))
} else if (this.char === CHAR_DEL || (this.char <= CTRL_CHAR_BOUNDARY && this.char !== CTRL_I)) {
throw this.errorControlCharInString()
} else {
this.consume()
}
} while (this.nextChar())
}
parseLiteralMultiStringMaybe () {
if (this.char === CHAR_APOS) {
return this.next(this.parseLiteralMultiString)
} else {
return this.returnNow()
}
}
parseLiteralMultiString () {
if (this.char === CTRL_M) {
return null
} else if (this.char === CTRL_J) {
return this.next(this.parseLiteralMultiStringContent)
} else {
return this.goto(this.parseLiteralMultiStringContent)
}
}
parseLiteralMultiStringContent () {
do {
if (this.char === CHAR_APOS) {
return this.next(this.parseLiteralMultiEnd)
} else if (this.char === Parser.END) {
throw this.error(new TomlError('Unterminated multi-line string'))
} else if (this.char === CHAR_DEL || (this.char <= CTRL_CHAR_BOUNDARY && this.char !== CTRL_I && this.char !== CTRL_J && this.char !== CTRL_M)) {
throw this.errorControlCharInString()
} else {
this.consume()
}
} while (this.nextChar())
}
parseLiteralMultiEnd () {
if (this.char === CHAR_APOS) {
return this.next(this.parseLiteralMultiEnd2)
} else {
this.state.buf += "'"
return this.goto(this.parseLiteralMultiStringContent)
}
}
parseLiteralMultiEnd2 () {
if (this.char === CHAR_APOS) {
return this.return()
} else {
this.state.buf += "''"
return this.goto(this.parseLiteralMultiStringContent)
}
}
/* STRINGS double quoted */
parseDoubleString () {
if (this.char === CHAR_QUOT) {
return this.next(this.parseMultiStringMaybe)
} else {
return this.goto(this.parseBasicString)
}
}
parseBasicString () {
do {
if (this.char === CHAR_BSOL) {
return this.call(this.parseEscape, this.recordEscapeReplacement)
} else if (this.char === CHAR_QUOT) {
return this.return()
} else if (this.atEndOfLine()) {
throw this.error(new TomlError('Unterminated string'))
} else if (this.char === CHAR_DEL || (this.char <= CTRL_CHAR_BOUNDARY && this.char !== CTRL_I)) {
throw this.errorControlCharInString()
} else {
this.consume()
}
} while (this.nextChar())
}
recordEscapeReplacement (replacement) {
this.state.buf += replacement
return this.goto(this.parseBasicString)
}
parseMultiStringMaybe () {
if (this.char === CHAR_QUOT) {
return this.next(this.parseMultiString)
} else {
return this.returnNow()
}
}
parseMultiString () {
if (this.char === CTRL_M) {
return null
} else if (this.char === CTRL_J) {
return this.next(this.parseMultiStringContent)
} else {
return this.goto(this.parseMultiStringContent)
}
}
parseMultiStringContent () {
do {
if (this.char === CHAR_BSOL) {
return this.call(this.parseMultiEscape, this.recordMultiEscapeReplacement)
} else if (this.char === CHAR_QUOT) {
return this.next(this.parseMultiEnd)
} else if (this.char === Parser.END) {
throw this.error(new TomlError('Unterminated multi-line string'))
} else if (this.char === CHAR_DEL || (this.char <= CTRL_CHAR_BOUNDARY && this.char !== CTRL_I && this.char !== CTRL_J && this.char !== CTRL_M)) {
throw this.errorControlCharInString()
} else {
this.consume()
}
} while (this.nextChar())
}
errorControlCharInString () {
let displayCode = '\\u00'
if (this.char < 16) {
displayCode += '0'
}
displayCode += this.char.toString(16)
return this.error(new TomlError(`Control characters (codes < 0x1f and 0x7f) are not allowed in strings, use ${displayCode} instead`))
}
recordMultiEscapeReplacement (replacement) {
this.state.buf += replacement
return this.goto(this.parseMultiStringContent)
}
parseMultiEnd () {
if (this.char === CHAR_QUOT) {
return this.next(this.parseMultiEnd2)
} else {
this.state.buf += '"'
return this.goto(this.parseMultiStringContent)
}
}
parseMultiEnd2 () {
if (this.char === CHAR_QUOT) {
return this.return()
} else {
this.state.buf += '""'
return this.goto(this.parseMultiStringContent)
}
}
parseMultiEscape () {
if (this.char === CTRL_M || this.char === CTRL_J) {
return this.next(this.parseMultiTrim)
} else if (this.char === CHAR_SP || this.char === CTRL_I) {
return this.next(this.parsePreMultiTrim)
} else {
return this.goto(this.parseEscape)
}
}
parsePreMultiTrim () {
if (this.char === CHAR_SP || this.char === CTRL_I) {
return null
} else if (this.char === CTRL_M || this.char === CTRL_J) {
return this.next(this.parseMultiTrim)
} else {
throw this.error(new TomlError("Can't escape whitespace"))
}
}
parseMultiTrim () {
// explicitly whitespace here, END should follow the same path as chars
if (this.char === CTRL_J || this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M) {
return null
} else {
return this.returnNow()
}
}
parseEscape () {
if (this.char in escapes) {
return this.return(escapes[this.char])
} else if (this.char === CHAR_u) {
return this.call(this.parseSmallUnicode, this.parseUnicodeReturn)
} else if (this.char === CHAR_U) {
return this.call(this.parseLargeUnicode, this.parseUnicodeReturn)
} else {
throw this.error(new TomlError('Unknown escape character: ' + this.char))
}
}
parseUnicodeReturn (char) {
try {
const codePoint = parseInt(char, 16)
if (codePoint >= SURROGATE_FIRST && codePoint <= SURROGATE_LAST) {
throw this.error(new TomlError('Invalid unicode, character in range 0xD800 - 0xDFFF is reserved'))
}
return this.returnNow(String.fromCodePoint(codePoint))
} catch (err) {
throw this.error(TomlError.wrap(err))
}
}
parseSmallUnicode () {
if (!isHexit(this.char)) {
throw this.error(new TomlError('Invalid character in unicode sequence, expected hex'))
} else {
this.consume()
if (this.state.buf.length >= 4) return this.return()
}
}
parseLargeUnicode () {
if (!isHexit(this.char)) {
throw this.error(new TomlError('Invalid character in unicode sequence, expected hex'))
} else {
this.consume()
if (this.state.buf.length >= 8) return this.return()
}
}
/* NUMBERS */
parseNumberSign () {
this.consume()
return this.next(this.parseMaybeSignedInfOrNan)
}
parseMaybeSignedInfOrNan () {
if (this.char === CHAR_i) {
return this.next(this.parseInf)
} else if (this.char === CHAR_n) {
return this.next(this.parseNan)
} else {
return this.callNow(this.parseNoUnder, this.parseNumberIntegerStart)
}
}
parseNumberIntegerStart () {
if (this.char === CHAR_0) {
this.consume()
return this.next(this.parseNumberIntegerExponentOrDecimal)
} else {
return this.goto(this.parseNumberInteger)
}
}
parseNumberIntegerExponentOrDecimal () {
if (this.char === CHAR_PERIOD) {
this.consume()
return this.call(this.parseNoUnder, this.parseNumberFloat)
} else if (this.char === CHAR_E || this.char === CHAR_e) {
this.consume()
return this.next(this.parseNumberExponentSign)
} else {
return this.returnNow(Integer(this.state.buf))
}
}
parseNumberInteger () {
if (isDigit(this.char)) {
this.consume()
} else if (this.char === CHAR_LOWBAR) {
return this.call(this.parseNoUnder)
} else if (this.char === CHAR_E || this.char === CHAR_e) {
this.consume()
return this.next(this.parseNumberExponentSign)
} else if (this.char === CHAR_PERIOD) {
this.consume()
return this.call(this.parseNoUnder, this.parseNumberFloat)
} else {
const result = Integer(this.state.buf)
/* istanbul ignore if */
if (result.isNaN()) {
throw this.error(new TomlError('Invalid number'))
} else {
return this.returnNow(result)
}
}
}
parseNoUnder () {
if (this.char === CHAR_LOWBAR || this.char === CHAR_PERIOD || this.char === CHAR_E || this.char === CHAR_e) {
throw this.error(new TomlError('Unexpected character, expected digit'))
} else if (this.atEndOfWord()) {
throw this.error(new TomlError('Incomplete number'))
}
return this.returnNow()
}
parseNoUnderHexOctBinLiteral () {
if (this.char === CHAR_LOWBAR || this.char === CHAR_PERIOD) {
throw this.error(new TomlError('Unexpected character, expected digit'))
} else if (this.atEndOfWord()) {
throw this.error(new TomlError('Incomplete number'))
}
return this.returnNow()
}
parseNumberFloat () {
if (this.char === CHAR_LOWBAR) {
return this.call(this.parseNoUnder, this.parseNumberFloat)
} else if (isDigit(this.char)) {
this.consume()
} else if (this.char === CHAR_E || this.char === CHAR_e) {
this.consume()
return this.next(this.parseNumberExponentSign)
} else {
return this.returnNow(Float(this.state.buf))
}
}
parseNumberExponentSign () {
if (isDigit(this.char)) {
return this.goto(this.parseNumberExponent)
} else if (this.char === CHAR_HYPHEN || this.char === CHAR_PLUS) {
this.consume()
this.call(this.parseNoUnder, this.parseNumberExponent)
} else {
throw this.error(new TomlError('Unexpected character, expected -, + or digit'))
}
}
parseNumberExponent () {
if (isDigit(this.char)) {
this.consume()
} else if (this.char === CHAR_LOWBAR) {
return this.call(this.parseNoUnder)
} else {
return this.returnNow(Float(this.state.buf))
}
}
/* NUMBERS or DATETIMES */
parseNumberOrDateTime () {
if (this.char === CHAR_0) {
this.consume()
return this.next(this.parseNumberBaseOrDateTime)
} else {
return this.goto(this.parseNumberOrDateTimeOnly)
}
}
parseNumberOrDateTimeOnly () {
// note, if two zeros are in a row then it MUST be a date
if (this.char === CHAR_LOWBAR) {
return this.call(this.parseNoUnder, this.parseNumberInteger)
} else if (isDigit(this.char)) {
this.consume()
if (this.state.buf.length > 4) this.next(this.parseNumberInteger)
} else if (this.char === CHAR_E || this.char === CHAR_e) {
this.consume()
return this.next(this.parseNumberExponentSign)
} else if (this.char === CHAR_PERIOD) {
this.consume()
return this.call(this.parseNoUnder, this.parseNumberFloat)
} else if (this.char === CHAR_HYPHEN) {
return this.goto(this.parseDateTime)
} else if (this.char === CHAR_COLON) {
return this.goto(this.parseOnlyTimeHour)
} else {
return this.returnNow(Integer(this.state.buf))
}
}
parseDateTimeOnly () {
if (this.state.buf.length < 4) {
if (isDigit(this.char)) {
return this.consume()
} else if (this.char === CHAR_COLON) {
return this.goto(this.parseOnlyTimeHour)
} else {
throw this.error(new TomlError('Expected digit while parsing year part of a date'))
}
} else {
if (this.char === CHAR_HYPHEN) {
return this.goto(this.parseDateTime)
} else {
throw this.error(new TomlError('Expected hyphen (-) while parsing year part of date'))
}
}
}
parseNumberBaseOrDateTime () {
if (this.char === CHAR_b) {
this.consume()
return this.call(this.parseNoUnderHexOctBinLiteral, this.parseIntegerBin)
} else if (this.char === CHAR_o) {
this.consume()
return this.call(this.parseNoUnderHexOctBinLiteral, this.parseIntegerOct)
} else if (this.char === CHAR_x) {
this.consume()
return this.call(this.parseNoUnderHexOctBinLiteral, this.parseIntegerHex)
} else if (this.char === CHAR_PERIOD) {
return this.goto(this.parseNumberInteger)
} else if (isDigit(this.char)) {
return this.goto(this.parseDateTimeOnly)
} else {
return this.returnNow(Integer(this.state.buf))
}
}
parseIntegerHex () {
if (isHexit(this.char)) {
this.consume()
} else if (this.char === CHAR_LOWBAR) {
return this.call(this.parseNoUnderHexOctBinLiteral)
} else {
const result = Integer(this.state.buf)
/* istanbul ignore if */
if (result.isNaN()) {
throw this.error(new TomlError('Invalid number'))
} else {
return this.returnNow(result)
}
}
}
parseIntegerOct () {
if (isOctit(this.char)) {
this.consume()
} else if (this.char === CHAR_LOWBAR) {
return this.call(this.parseNoUnderHexOctBinLiteral)
} else {
const result = Integer(this.state.buf)
/* istanbul ignore if */
if (result.isNaN()) {
throw this.error(new TomlError('Invalid number'))
} else {
return this.returnNow(result)
}
}
}
parseIntegerBin () {
if (isBit(this.char)) {
this.consume()
} else if (this.char === CHAR_LOWBAR) {
return this.call(this.parseNoUnderHexOctBinLiteral)
} else {
const result = Integer(this.state.buf)
/* istanbul ignore if */
if (result.isNaN()) {
throw this.error(new TomlError('Invalid number'))
} else {
return this.returnNow(result)
}
}
}
/* DATETIME */
parseDateTime () {
// we enter here having just consumed the year and about to consume the hyphen
if (this.state.buf.length < 4) {
throw this.error(new TomlError('Years less than 1000 must be zero padded to four characters'))
}
this.state.result = this.state.buf
this.state.buf = ''
return this.next(this.parseDateMonth)
}
parseDateMonth () {
if (this.char === CHAR_HYPHEN) {
if (this.state.buf.length < 2) {
throw this.error(new TomlError('Months less than 10 must be zero padded to two characters'))
}
this.state.result += '-' + this.state.buf
this.state.buf = ''
return this.next(this.parseDateDay)
} else if (isDigit(this.char)) {
this.consume()
} else {
throw this.error(new TomlError('Incomplete datetime'))
}
}
parseDateDay () {
if (this.char === CHAR_T || this.char === CHAR_SP) {
if (this.state.buf.length < 2) {
throw this.error(new TomlError('Days less than 10 must be zero padded to two characters'))
}
this.state.result += '-' + this.state.buf
this.state.buf = ''
return this.next(this.parseStartTimeHour)
} else if (this.atEndOfWord()) {
return this.returnNow(createDate(this.state.result + '-' + this.state.buf))
} else if (isDigit(this.char)) {
this.consume()
} else {
throw this.error(new TomlError('Incomplete datetime'))
}
}
parseStartTimeHour () {
if (this.atEndOfWord()) {
return this.returnNow(createDate(this.state.result))
} else {
return this.goto(this.parseTimeHour)
}
}
parseTimeHour () {
if (this.char === CHAR_COLON) {
if (this.state.buf.length < 2) {
throw this.error(new TomlError('Hours less than 10 must be zero padded to two characters'))
}
this.state.result += 'T' + this.state.buf
this.state.buf = ''
return this.next(this.parseTimeMin)
} else if (isDigit(this.char)) {
this.consume()
} else {
throw this.error(new TomlError('Incomplete datetime'))
}
}
parseTimeMin () {
if (this.state.buf.length < 2 && isDigit(this.char)) {
this.consume()
} else if (this.state.buf.length === 2 && this.char === CHAR_COLON) {
this.state.result += ':' + this.state.buf
this.state.buf = ''
return this.next(this.parseTimeSec)
} else {
throw this.error(new TomlError('Incomplete datetime'))
}
}
parseTimeSec () {
if (isDigit(this.char)) {
this.consume()
if (this.state.buf.length === 2) {
this.state.result += ':' + this.state.buf
this.state.buf = ''
return this.next(this.parseTimeZoneOrFraction)
}
} else {
throw this.error(new TomlError('Incomplete datetime'))
}
}
parseOnlyTimeHour () {
/* istanbul ignore else */
if (this.char === CHAR_COLON) {
if (this.state.buf.length < 2) {
throw this.error(new TomlError('Hours less than 10 must be zero padded to two characters'))
}
this.state.result = this.state.buf
this.state.buf = ''
return this.next(this.parseOnlyTimeMin)
} else {
throw this.error(new TomlError('Incomplete time'))
}
}
parseOnlyTimeMin () {
if (this.state.buf.length < 2 && isDigit(this.char)) {
this.consume()
} else if (this.state.buf.length === 2 && this.char === CHAR_COLON) {
this.state.result += ':' + this.state.buf
this.state.buf = ''
return this.next(this.parseOnlyTimeSec)
} else {
throw this.error(new TomlError('Incomplete time'))
}
}
parseOnlyTimeSec () {
if (isDigit(this.char)) {
this.consume()
if (this.state.buf.length === 2) {
return this.next(this.parseOnlyTimeFractionMaybe)
}
} else {
throw this.error(new TomlError('Incomplete time'))
}
}
parseOnlyTimeFractionMaybe () {
this.state.result += ':' + this.state.buf
if (this.char === CHAR_PERIOD) {
this.state.buf = ''
this.next(this.parseOnlyTimeFraction)
} else {
return this.return(createTime(this.state.result))
}
}
parseOnlyTimeFraction () {
if (isDigit(this.char)) {
this.consume()
} else if (this.atEndOfWord()) {
if (this.state.buf.length === 0) throw this.error(new TomlError('Expected digit in milliseconds'))
return this.returnNow(createTime(this.state.result + '.' + this.state.buf))
} else {
throw this.error(new TomlError('Unexpected character in datetime, expected period (.), minus (-), plus (+) or Z'))
}
}
parseTimeZoneOrFraction () {
if (this.char === CHAR_PERIOD) {
this.consume()
this.next(this.parseDateTimeFraction)
} else if (this.char === CHAR_HYPHEN || this.char === CHAR_PLUS) {
this.consume()
this.next(this.parseTimeZoneHour)
} else if (this.char === CHAR_Z) {
this.consume()
return this.return(createDateTime(this.state.result + this.state.buf))
} else if (this.atEndOfWord()) {
return this.returnNow(createDateTimeFloat(this.state.result + this.state.buf))
} else {
throw this.error(new TomlError('Unexpected character in datetime, expected period (.), minus (-), plus (+) or Z'))
}
}
parseDateTimeFraction () {
if (isDigit(this.char)) {
this.consume()
} else if (this.state.buf.length === 1) {
throw this.error(new TomlError('Expected digit in milliseconds'))
} else if (this.char === CHAR_HYPHEN || this.char === CHAR_PLUS) {
this.consume()
this.next(this.parseTimeZoneHour)
} else if (this.char === CHAR_Z) {
this.consume()
return this.return(createDateTime(this.state.result + this.state.buf))
} else if (this.atEndOfWord()) {
return this.returnNow(createDateTimeFloat(this.state.result + this.state.buf))
} else {
throw this.error(new TomlError('Unexpected character in datetime, expected period (.), minus (-), plus (+) or Z'))
}
}
parseTimeZoneHour () {
if (isDigit(this.char)) {
this.consume()
// FIXME: No more regexps
if (/\d\d$/.test(this.state.buf)) return this.next(this.parseTimeZoneSep)
} else {
throw this.error(new TomlError('Unexpected character in datetime, expected digit'))
}
}
parseTimeZoneSep () {
if (this.char === CHAR_COLON) {
this.consume()
this.next(this.parseTimeZoneMin)
} else {
throw this.error(new TomlError('Unexpected character in datetime, expected colon'))
}
}
parseTimeZoneMin () {
if (isDigit(this.char)) {
this.consume()
if (/\d\d$/.test(this.state.buf)) return this.return(createDateTime(this.state.result + this.state.buf))
} else {
throw this.error(new TomlError('Unexpected character in datetime, expected digit'))
}
}
/* BOOLEAN */
parseBoolean () {
/* istanbul ignore else */
if (this.char === CHAR_t) {
this.consume()
return this.next(this.parseTrue_r)
} else if (this.char === CHAR_f) {
this.consume()
return this.next(this.parseFalse_a)
}
}
parseTrue_r () {
if (this.char === CHAR_r) {
this.consume()
return this.next(this.parseTrue_u)
} else {
throw this.error(new TomlError('Invalid boolean, expected true or false'))
}
}
parseTrue_u () {
if (this.char === CHAR_u) {
this.consume()
return this.next(this.parseTrue_e)
} else {
throw this.error(new TomlError('Invalid boolean, expected true or false'))
}
}
parseTrue_e () {
if (this.char === CHAR_e) {
return this.return(true)
} else {
throw this.error(new TomlError('Invalid boolean, expected true or false'))
}
}
parseFalse_a () {
if (this.char === CHAR_a) {
this.consume()
return this.next(this.parseFalse_l)
} else {
throw this.error(new TomlError('Invalid boolean, expected true or false'))
}
}
parseFalse_l () {
if (this.char === CHAR_l) {
this.consume()
return this.next(this.parseFalse_s)
} else {
throw this.error(new TomlError('Invalid boolean, expected true or false'))
}
}
parseFalse_s () {
if (this.char === CHAR_s) {
this.consume()
return this.next(this.parseFalse_e)
} else {
throw this.error(new TomlError('Invalid boolean, expected true or false'))
}
}
parseFalse_e () {
if (this.char === CHAR_e) {
return this.return(false)
} else {
throw this.error(new TomlError('Invalid boolean, expected true or false'))
}
}
/* INLINE LISTS */
parseInlineList () {
if (this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M || this.char === CTRL_J) {
return null
} else if (this.char === Parser.END) {
throw this.error(new TomlError('Unterminated inline array'))
} else if (this.char === CHAR_NUM) {
return this.call(this.parseComment)
} else if (this.char === CHAR_RSQB) {
return this.return(this.state.resultArr || InlineList())
} else {
return this.callNow(this.parseValue, this.recordInlineListValue)
}
}
recordInlineListValue (value) {
if (this.state.resultArr) {
const listType = this.state.resultArr[_contentType]
const valueType = tomlType(value)
if (listType !== valueType) {
throw this.error(new TomlError(`Inline lists must be a single type, not a mix of ${listType} and ${valueType}`))
}
} else {
this.state.resultArr = InlineList(tomlType(value))
}
if (isFloat(value) || isInteger(value)) {
// unbox now that we've verified they're ok
this.state.resultArr.push(value.valueOf())
} else {
this.state.resultArr.push(value)
}
return this.goto(this.parseInlineListNext)
}
parseInlineListNext () {
if (this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M || this.char === CTRL_J) {
return null
} else if (this.char === CHAR_NUM) {
return this.call(this.parseComment)
} else if (this.char === CHAR_COMMA) {
return this.next(this.parseInlineList)
} else if (this.char === CHAR_RSQB) {
return this.goto(this.parseInlineList)
} else {
throw this.error(new TomlError('Invalid character, expected whitespace, comma (,) or close bracket (])'))
}
}
/* INLINE TABLE */
parseInlineTable () {
if (this.char === CHAR_SP || this.char === CTRL_I) {
return null
} else if (this.char === Parser.END || this.char === CHAR_NUM || this.char === CTRL_J || this.char === CTRL_M) {
throw this.error(new TomlError('Unterminated inline array'))
} else if (this.char === CHAR_RCUB) {
return this.return(this.state.resultTable || InlineTable())
} else {
if (!this.state.resultTable) this.state.resultTable = InlineTable()
return this.callNow(this.parseAssign, this.recordInlineTableValue)
}
}
recordInlineTableValue (kv) {
let target = this.state.resultTable
let finalKey = kv.key.pop()
for (let kw of kv.key) {
if (hasKey(target, kw) && (!isTable(target[kw]) || target[kw][_declared])) {
throw this.error(new TomlError("Can't redefine existing key"))
}
target = target[kw] = target[kw] || Table()
}
if (hasKey(target, finalKey)) {
throw this.error(new TomlError("Can't redefine existing key"))
}
if (isInteger(kv.value) || isFloat(kv.value)) {
target[finalKey] = kv.value.valueOf()
} else {
target[finalKey] = kv.value
}
return this.goto(this.parseInlineTableNext)
}
parseInlineTableNext () {
if (this.char === CHAR_SP || this.char === CTRL_I) {
return null
} else if (this.char === Parser.END || this.char === CHAR_NUM || this.char === CTRL_J || this.char === CTRL_M) {
throw this.error(new TomlError('Unterminated inline array'))
} else if (this.char === CHAR_COMMA) {
return this.next(this.parseInlineTable)
} else if (this.char === CHAR_RCUB) {
return this.goto(this.parseInlineTable)
} else {
throw this.error(new TomlError('Invalid character, expected whitespace, comma (,) or close bracket (])'))
}
}
}
return TOMLParser
}
/***/ }),
/***/ 22950:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
module.exports = parseAsync
const TOMLParser = __webpack_require__(8676)
const prettyError = __webpack_require__(22418)
function parseAsync (str, opts) {
if (!opts) opts = {}
const index = 0
const blocksize = opts.blocksize || 40960
const parser = new TOMLParser()
return new Promise((resolve, reject) => {
setImmediate(parseAsyncNext, index, blocksize, resolve, reject)
})
function parseAsyncNext (index, blocksize, resolve, reject) {
if (index >= str.length) {
try {
return resolve(parser.finish())
} catch (err) {
return reject(prettyError(err, str))
}
}
try {
parser.parse(str.slice(index, index + blocksize))
setImmediate(parseAsyncNext, index + blocksize, blocksize, resolve, reject)
} catch (err) {
reject(prettyError(err, str))
}
}
}
/***/ }),
/***/ 22418:
/***/ ((module) => {
"use strict";
module.exports = prettyError
function prettyError (err, buf) {
/* istanbul ignore if */
if (err.pos == null || err.line == null) return err
let msg = err.message
msg += ` at row ${err.line + 1}, col ${err.col + 1}, pos ${err.pos}:\n`
/* istanbul ignore else */
if (buf && buf.split) {
const lines = buf.split(/\n/)
const lineNumWidth = String(Math.min(lines.length, err.line + 3)).length
let linePadding = ' '
while (linePadding.length < lineNumWidth) linePadding += ' '
for (let ii = Math.max(0, err.line - 1); ii < Math.min(lines.length, err.line + 2); ++ii) {
let lineNum = String(ii + 1)
if (lineNum.length < lineNumWidth) lineNum = ' ' + lineNum
if (err.line === ii) {
msg += lineNum + '> ' + lines[ii] + '\n'
msg += linePadding + ' '
for (let hh = 0; hh < err.col; ++hh) {
msg += ' '
}
msg += '^\n'
} else {
msg += lineNum + ': ' + lines[ii] + '\n'
}
}
}
err.message = msg + '\n'
return err
}
/***/ }),
/***/ 6435:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
module.exports = parseStream
const stream = __webpack_require__(92413)
const TOMLParser = __webpack_require__(8676)
function parseStream (stm) {
if (stm) {
return parseReadable(stm)
} else {
return parseTransform(stm)
}
}
function parseReadable (stm) {
const parser = new TOMLParser()
stm.setEncoding('utf8')
return new Promise((resolve, reject) => {
let readable
let ended = false
let errored = false
function finish () {
ended = true
if (readable) return
try {
resolve(parser.finish())
} catch (err) {
reject(err)
}
}
function error (err) {
errored = true
reject(err)
}
stm.once('end', finish)
stm.once('error', error)
readNext()
function readNext () {
readable = true
let data
while ((data = stm.read()) !== null) {
try {
parser.parse(data)
} catch (err) {
return error(err)
}
}
readable = false
/* istanbul ignore if */
if (ended) return finish()
/* istanbul ignore if */
if (errored) return
stm.once('readable', readNext)
}
})
}
function parseTransform () {
const parser = new TOMLParser()
return new stream.Transform({
objectMode: true,
transform (chunk, encoding, cb) {
try {
parser.parse(chunk.toString(encoding))
} catch (err) {
this.emit('error', err)
}
cb()
},
flush (cb) {
try {
this.push(parser.finish())
} catch (err) {
this.emit('error', err)
}
cb()
}
})
}
/***/ }),
/***/ 56530:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
module.exports = parseString
const TOMLParser = __webpack_require__(8676)
const prettyError = __webpack_require__(22418)
function parseString (str) {
if (global.Buffer && global.Buffer.isBuffer(str)) {
str = str.toString('utf8')
}
const parser = new TOMLParser()
try {
parser.parse(str)
return parser.finish()
} catch (err) {
throw prettyError(err, str)
}
}
/***/ }),
/***/ 83512:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
module.exports = __webpack_require__(56530)
module.exports.async = __webpack_require__(22950)
module.exports.stream = __webpack_require__(6435)
module.exports.prettyError = __webpack_require__(22418)
/***/ }),
/***/ 36921:
/***/ ((module) => {
"use strict";
module.exports = stringify
module.exports.value = stringifyInline
function stringify (obj) {
if (obj === null) throw typeError('null')
if (obj === void (0)) throw typeError('undefined')
if (typeof obj !== 'object') throw typeError(typeof obj)
if (typeof obj.toJSON === 'function') obj = obj.toJSON()
if (obj == null) return null
const type = tomlType(obj)
if (type !== 'table') throw typeError(type)
return stringifyObject('', '', obj)
}
function typeError (type) {
return new Error('Can only stringify objects, not ' + type)
}
function arrayOneTypeError () {
return new Error("Array values can't have mixed types")
}
function getInlineKeys (obj) {
return Object.keys(obj).filter(key => isInline(obj[key]))
}
function getComplexKeys (obj) {
return Object.keys(obj).filter(key => !isInline(obj[key]))
}
function toJSON (obj) {
let nobj = Array.isArray(obj) ? [] : Object.prototype.hasOwnProperty.call(obj, '__proto__') ? {['__proto__']: undefined} : {}
for (let prop of Object.keys(obj)) {
if (obj[prop] && typeof obj[prop].toJSON === 'function' && !('toISOString' in obj[prop])) {
nobj[prop] = obj[prop].toJSON()
} else {
nobj[prop] = obj[prop]
}
}
return nobj
}
function stringifyObject (prefix, indent, obj) {
obj = toJSON(obj)
var inlineKeys
var complexKeys
inlineKeys = getInlineKeys(obj)
complexKeys = getComplexKeys(obj)
var result = []
var inlineIndent = indent || ''
inlineKeys.forEach(key => {
var type = tomlType(obj[key])
if (type !== 'undefined' && type !== 'null') {
result.push(inlineIndent + stringifyKey(key) + ' = ' + stringifyAnyInline(obj[key], true))
}
})
if (result.length > 0) result.push('')
var complexIndent = prefix && inlineKeys.length > 0 ? indent + ' ' : ''
complexKeys.forEach(key => {
result.push(stringifyComplex(prefix, complexIndent, key, obj[key]))
})
return result.join('\n')
}
function isInline (value) {
switch (tomlType(value)) {
case 'undefined':
case 'null':
case 'integer':
case 'nan':
case 'float':
case 'boolean':
case 'string':
case 'datetime':
return true
case 'array':
return value.length === 0 || tomlType(value[0]) !== 'table'
case 'table':
return Object.keys(value).length === 0
/* istanbul ignore next */
default:
return false
}
}
function tomlType (value) {
if (value === undefined) {
return 'undefined'
} else if (value === null) {
return 'null'
/* eslint-disable valid-typeof */
} else if (typeof value === 'bigint' || (Number.isInteger(value) && !Object.is(value, -0))) {
return 'integer'
} else if (typeof value === 'number') {
return 'float'
} else if (typeof value === 'boolean') {
return 'boolean'
} else if (typeof value === 'string') {
return 'string'
} else if ('toISOString' in value) {
return isNaN(value) ? 'undefined' : 'datetime'
} else if (Array.isArray(value)) {
return 'array'
} else {
return 'table'
}
}
function stringifyKey (key) {
var keyStr = String(key)
if (/^[-A-Za-z0-9_]+$/.test(keyStr)) {
return keyStr
} else {
return stringifyBasicString(keyStr)
}
}
function stringifyBasicString (str) {
return '"' + escapeString(str).replace(/"/g, '\\"') + '"'
}
function stringifyLiteralString (str) {
return "'" + str + "'"
}
function numpad (num, str) {
while (str.length < num) str = '0' + str
return str
}
function escapeString (str) {
return str.replace(/\\/g, '\\\\')
.replace(/[\b]/g, '\\b')
.replace(/\t/g, '\\t')
.replace(/\n/g, '\\n')
.replace(/\f/g, '\\f')
.replace(/\r/g, '\\r')
/* eslint-disable no-control-regex */
.replace(/([\u0000-\u001f\u007f])/, c => '\\u' + numpad(4, c.codePointAt(0).toString(16)))
/* eslint-enable no-control-regex */
}
function stringifyMultilineString (str) {
let escaped = str.split(/\n/).map(str => {
return escapeString(str).replace(/"(?="")/g, '\\"')
}).join('\n')
if (escaped.slice(-1) === '"') escaped += '\\\n'
return '"""\n' + escaped + '"""'
}
function stringifyAnyInline (value, multilineOk) {
let type = tomlType(value)
if (type === 'string') {
if (multilineOk && /\n/.test(value)) {
type = 'string-multiline'
} else if (!/[\b\t\n\f\r']/.test(value) && /"/.test(value)) {
type = 'string-literal'
}
}
return stringifyInline(value, type)
}
function stringifyInline (value, type) {
/* istanbul ignore if */
if (!type) type = tomlType(value)
switch (type) {
case 'string-multiline':
return stringifyMultilineString(value)
case 'string':
return stringifyBasicString(value)
case 'string-literal':
return stringifyLiteralString(value)
case 'integer':
return stringifyInteger(value)
case 'float':
return stringifyFloat(value)
case 'boolean':
return stringifyBoolean(value)
case 'datetime':
return stringifyDatetime(value)
case 'array':
return stringifyInlineArray(value.filter(_ => tomlType(_) !== 'null' && tomlType(_) !== 'undefined' && tomlType(_) !== 'nan'))
case 'table':
return stringifyInlineTable(value)
/* istanbul ignore next */
default:
throw typeError(type)
}
}
function stringifyInteger (value) {
/* eslint-disable security/detect-unsafe-regex */
return String(value).replace(/\B(?=(\d{3})+(?!\d))/g, '_')
}
function stringifyFloat (value) {
if (value === Infinity) {
return 'inf'
} else if (value === -Infinity) {
return '-inf'
} else if (Object.is(value, NaN)) {
return 'nan'
} else if (Object.is(value, -0)) {
return '-0.0'
}
var chunks = String(value).split('.')
var int = chunks[0]
var dec = chunks[1] || 0
return stringifyInteger(int) + '.' + dec
}
function stringifyBoolean (value) {
return String(value)
}
function stringifyDatetime (value) {
return value.toISOString()
}
function isNumber (type) {
return type === 'float' || type === 'integer'
}
function arrayType (values) {
var contentType = tomlType(values[0])
if (values.every(_ => tomlType(_) === contentType)) return contentType
// mixed integer/float, emit as floats
if (values.every(_ => isNumber(tomlType(_)))) return 'float'
return 'mixed'
}
function validateArray (values) {
const type = arrayType(values)
if (type === 'mixed') {
throw arrayOneTypeError()
}
return type
}
function stringifyInlineArray (values) {
values = toJSON(values)
const type = validateArray(values)
var result = '['
var stringified = values.map(_ => stringifyInline(_, type))
if (stringified.join(', ').length > 60 || /\n/.test(stringified)) {
result += '\n ' + stringified.join(',\n ') + '\n'
} else {
result += ' ' + stringified.join(', ') + (stringified.length > 0 ? ' ' : '')
}
return result + ']'
}
function stringifyInlineTable (value) {
value = toJSON(value)
var result = []
Object.keys(value).forEach(key => {
result.push(stringifyKey(key) + ' = ' + stringifyAnyInline(value[key], false))
})
return '{ ' + result.join(', ') + (result.length > 0 ? ' ' : '') + '}'
}
function stringifyComplex (prefix, indent, key, value) {
var valueType = tomlType(value)
/* istanbul ignore else */
if (valueType === 'array') {
return stringifyArrayOfTables(prefix, indent, key, value)
} else if (valueType === 'table') {
return stringifyComplexTable(prefix, indent, key, value)
} else {
throw typeError(valueType)
}
}
function stringifyArrayOfTables (prefix, indent, key, values) {
values = toJSON(values)
validateArray(values)
var firstValueType = tomlType(values[0])
/* istanbul ignore if */
if (firstValueType !== 'table') throw typeError(firstValueType)
var fullKey = prefix + stringifyKey(key)
var result = ''
values.forEach(table => {
if (result.length > 0) result += '\n'
result += indent + '[[' + fullKey + ']]\n'
result += stringifyObject(fullKey + '.', indent, table)
})
return result
}
function stringifyComplexTable (prefix, indent, key, value) {
var fullKey = prefix + stringifyKey(key)
var result = ''
if (getInlineKeys(value).length > 0) {
result += indent + '[' + fullKey + ']\n'
}
return result + stringifyObject(fullKey + '.', indent, value)
}
/***/ }),
/***/ 5022:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
exports.parse = __webpack_require__(83512)
exports.stringify = __webpack_require__(36921)
/***/ }),
/***/ 18185:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = __webpack_require__(35747);
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync,
readdir: fs.readdir,
readdirSync: fs.readdirSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;
/***/ }),
/***/ 91107:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;
const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');
if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) {
throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`);
}
const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);
const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);
const SUPPORTED_MAJOR_VERSION = 10;
const SUPPORTED_MINOR_VERSION = 10;
const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;
const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;
/**
* IS `true` for Node.js 10.10 and greater.
*/
exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;
/***/ }),
/***/ 55923:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Settings = exports.scandirSync = exports.scandir = void 0;
const async = __webpack_require__(31411);
const sync = __webpack_require__(63953);
const settings_1 = __webpack_require__(36913);
exports.Settings = settings_1.default;
function scandir(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async.read(path, getSettings(), optionsOrSettingsOrCallback);
return;
}
async.read(path, getSettings(optionsOrSettingsOrCallback), callback);
}
exports.scandir = scandir;
function scandirSync(path, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
return sync.read(path, settings);
}
exports.scandirSync = scandirSync;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1.default) {
return settingsOrOptions;
}
return new settings_1.default(settingsOrOptions);
}
/***/ }),
/***/ 31411:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.readdir = exports.readdirWithFileTypes = exports.read = void 0;
const fsStat = __webpack_require__(66203);
const rpl = __webpack_require__(54595);
const constants_1 = __webpack_require__(91107);
const utils = __webpack_require__(66582);
const common = __webpack_require__(34587);
function read(directory, settings, callback) {
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
readdirWithFileTypes(directory, settings, callback);
return;
}
readdir(directory, settings, callback);
}
exports.read = read;
function readdirWithFileTypes(directory, settings, callback) {
settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {
if (readdirError !== null) {
callFailureCallback(callback, readdirError);
return;
}
const entries = dirents.map((dirent) => ({
dirent,
name: dirent.name,
path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
}));
if (!settings.followSymbolicLinks) {
callSuccessCallback(callback, entries);
return;
}
const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));
rpl(tasks, (rplError, rplEntries) => {
if (rplError !== null) {
callFailureCallback(callback, rplError);
return;
}
callSuccessCallback(callback, rplEntries);
});
});
}
exports.readdirWithFileTypes = readdirWithFileTypes;
function makeRplTaskEntry(entry, settings) {
return (done) => {
if (!entry.dirent.isSymbolicLink()) {
done(null, entry);
return;
}
settings.fs.stat(entry.path, (statError, stats) => {
if (statError !== null) {
if (settings.throwErrorOnBrokenSymbolicLink) {
done(statError);
return;
}
done(null, entry);
return;
}
entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);
done(null, entry);
});
};
}
function readdir(directory, settings, callback) {
settings.fs.readdir(directory, (readdirError, names) => {
if (readdirError !== null) {
callFailureCallback(callback, readdirError);
return;
}
const tasks = names.map((name) => {
const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);
return (done) => {
fsStat.stat(path, settings.fsStatSettings, (error, stats) => {
if (error !== null) {
done(error);
return;
}
const entry = {
name,
path,
dirent: utils.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
done(null, entry);
});
};
});
rpl(tasks, (rplError, entries) => {
if (rplError !== null) {
callFailureCallback(callback, rplError);
return;
}
callSuccessCallback(callback, entries);
});
});
}
exports.readdir = readdir;
function callFailureCallback(callback, error) {
callback(error);
}
function callSuccessCallback(callback, result) {
callback(null, result);
}
/***/ }),
/***/ 34587:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.joinPathSegments = void 0;
function joinPathSegments(a, b, separator) {
/**
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
*/
if (a.endsWith(separator)) {
return a + b;
}
return a + separator + b;
}
exports.joinPathSegments = joinPathSegments;
/***/ }),
/***/ 63953:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.readdir = exports.readdirWithFileTypes = exports.read = void 0;
const fsStat = __webpack_require__(66203);
const constants_1 = __webpack_require__(91107);
const utils = __webpack_require__(66582);
const common = __webpack_require__(34587);
function read(directory, settings) {
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
return readdirWithFileTypes(directory, settings);
}
return readdir(directory, settings);
}
exports.read = read;
function readdirWithFileTypes(directory, settings) {
const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });
return dirents.map((dirent) => {
const entry = {
dirent,
name: dirent.name,
path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
};
if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {
try {
const stats = settings.fs.statSync(entry.path);
entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);
}
catch (error) {
if (settings.throwErrorOnBrokenSymbolicLink) {
throw error;
}
}
}
return entry;
});
}
exports.readdirWithFileTypes = readdirWithFileTypes;
function readdir(directory, settings) {
const names = settings.fs.readdirSync(directory);
return names.map((name) => {
const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);
const stats = fsStat.statSync(entryPath, settings.fsStatSettings);
const entry = {
name,
path: entryPath,
dirent: utils.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
return entry;
});
}
exports.readdir = readdir;
/***/ }),
/***/ 36913:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const path = __webpack_require__(85622);
const fsStat = __webpack_require__(66203);
const fs = __webpack_require__(18185);
class Settings {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
this.fs = fs.createFileSystemAdapter(this._options.fs);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);
this.stats = this._getValue(this._options.stats, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
this.fsStatSettings = new fsStat.Settings({
followSymbolicLink: this.followSymbolicLinks,
fs: this.fs,
throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink
});
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
}
exports.default = Settings;
/***/ }),
/***/ 70322:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createDirentFromStats = void 0;
class DirentFromStats {
constructor(name, stats) {
this.name = name;
this.isBlockDevice = stats.isBlockDevice.bind(stats);
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
this.isDirectory = stats.isDirectory.bind(stats);
this.isFIFO = stats.isFIFO.bind(stats);
this.isFile = stats.isFile.bind(stats);
this.isSocket = stats.isSocket.bind(stats);
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
}
function createDirentFromStats(name, stats) {
return new DirentFromStats(name, stats);
}
exports.createDirentFromStats = createDirentFromStats;
/***/ }),
/***/ 66582:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.fs = void 0;
const fs = __webpack_require__(70322);
exports.fs = fs;
/***/ }),
/***/ 98980:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = __webpack_require__(35747);
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;
/***/ }),
/***/ 66203:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.statSync = exports.stat = exports.Settings = void 0;
const async = __webpack_require__(2654);
const sync = __webpack_require__(88946);
const settings_1 = __webpack_require__(18328);
exports.Settings = settings_1.default;
function stat(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async.read(path, getSettings(), optionsOrSettingsOrCallback);
return;
}
async.read(path, getSettings(optionsOrSettingsOrCallback), callback);
}
exports.stat = stat;
function statSync(path, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
return sync.read(path, settings);
}
exports.statSync = statSync;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1.default) {
return settingsOrOptions;
}
return new settings_1.default(settingsOrOptions);
}
/***/ }),
/***/ 2654:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.read = void 0;
function read(path, settings, callback) {
settings.fs.lstat(path, (lstatError, lstat) => {
if (lstatError !== null) {
callFailureCallback(callback, lstatError);
return;
}
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
callSuccessCallback(callback, lstat);
return;
}
settings.fs.stat(path, (statError, stat) => {
if (statError !== null) {
if (settings.throwErrorOnBrokenSymbolicLink) {
callFailureCallback(callback, statError);
return;
}
callSuccessCallback(callback, lstat);
return;
}
if (settings.markSymbolicLink) {
stat.isSymbolicLink = () => true;
}
callSuccessCallback(callback, stat);
});
});
}
exports.read = read;
function callFailureCallback(callback, error) {
callback(error);
}
function callSuccessCallback(callback, result) {
callback(null, result);
}
/***/ }),
/***/ 88946:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.read = void 0;
function read(path, settings) {
const lstat = settings.fs.lstatSync(path);
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
return lstat;
}
try {
const stat = settings.fs.statSync(path);
if (settings.markSymbolicLink) {
stat.isSymbolicLink = () => true;
}
return stat;
}
catch (error) {
if (!settings.throwErrorOnBrokenSymbolicLink) {
return lstat;
}
throw error;
}
}
exports.read = read;
/***/ }),
/***/ 18328:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const fs = __webpack_require__(98980);
class Settings {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);
this.fs = fs.createFileSystemAdapter(this._options.fs);
this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
}
exports.default = Settings;
/***/ }),
/***/ 45439:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0;
const async_1 = __webpack_require__(9346);
const stream_1 = __webpack_require__(40215);
const sync_1 = __webpack_require__(64078);
const settings_1 = __webpack_require__(48690);
exports.Settings = settings_1.default;
function walk(directory, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback);
return;
}
new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);
}
exports.walk = walk;
function walkSync(directory, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
const provider = new sync_1.default(directory, settings);
return provider.read();
}
exports.walkSync = walkSync;
function walkStream(directory, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
const provider = new stream_1.default(directory, settings);
return provider.read();
}
exports.walkStream = walkStream;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1.default) {
return settingsOrOptions;
}
return new settings_1.default(settingsOrOptions);
}
/***/ }),
/***/ 9346:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const async_1 = __webpack_require__(34714);
class AsyncProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new async_1.default(this._root, this._settings);
this._storage = [];
}
read(callback) {
this._reader.onError((error) => {
callFailureCallback(callback, error);
});
this._reader.onEntry((entry) => {
this._storage.push(entry);
});
this._reader.onEnd(() => {
callSuccessCallback(callback, this._storage);
});
this._reader.read();
}
}
exports.default = AsyncProvider;
function callFailureCallback(callback, error) {
callback(error);
}
function callSuccessCallback(callback, entries) {
callback(null, entries);
}
/***/ }),
/***/ 40215:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const stream_1 = __webpack_require__(92413);
const async_1 = __webpack_require__(34714);
class StreamProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new async_1.default(this._root, this._settings);
this._stream = new stream_1.Readable({
objectMode: true,
read: () => { },
destroy: () => {
if (!this._reader.isDestroyed) {
this._reader.destroy();
}
}
});
}
read() {
this._reader.onError((error) => {
this._stream.emit('error', error);
});
this._reader.onEntry((entry) => {
this._stream.push(entry);
});
this._reader.onEnd(() => {
this._stream.push(null);
});
this._reader.read();
return this._stream;
}
}
exports.default = StreamProvider;
/***/ }),
/***/ 64078:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const sync_1 = __webpack_require__(79555);
class SyncProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new sync_1.default(this._root, this._settings);
}
read() {
return this._reader.read();
}
}
exports.default = SyncProvider;
/***/ }),
/***/ 34714:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const events_1 = __webpack_require__(28614);
const fsScandir = __webpack_require__(55923);
const fastq = __webpack_require__(10373);
const common = __webpack_require__(26865);
const reader_1 = __webpack_require__(85274);
class AsyncReader extends reader_1.default {
constructor(_root, _settings) {
super(_root, _settings);
this._settings = _settings;
this._scandir = fsScandir.scandir;
this._emitter = new events_1.EventEmitter();
this._queue = fastq(this._worker.bind(this), this._settings.concurrency);
this._isFatalError = false;
this._isDestroyed = false;
this._queue.drain = () => {
if (!this._isFatalError) {
this._emitter.emit('end');
}
};
}
read() {
this._isFatalError = false;
this._isDestroyed = false;
setImmediate(() => {
this._pushToQueue(this._root, this._settings.basePath);
});
return this._emitter;
}
get isDestroyed() {
return this._isDestroyed;
}
destroy() {
if (this._isDestroyed) {
throw new Error('The reader is already destroyed');
}
this._isDestroyed = true;
this._queue.killAndDrain();
}
onEntry(callback) {
this._emitter.on('entry', callback);
}
onError(callback) {
this._emitter.once('error', callback);
}
onEnd(callback) {
this._emitter.once('end', callback);
}
_pushToQueue(directory, base) {
const queueItem = { directory, base };
this._queue.push(queueItem, (error) => {
if (error !== null) {
this._handleError(error);
}
});
}
_worker(item, done) {
this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {
if (error !== null) {
done(error, undefined);
return;
}
for (const entry of entries) {
this._handleEntry(entry, item.base);
}
done(null, undefined);
});
}
_handleError(error) {
if (this._isDestroyed || !common.isFatalError(this._settings, error)) {
return;
}
this._isFatalError = true;
this._isDestroyed = true;
this._emitter.emit('error', error);
}
_handleEntry(entry, base) {
if (this._isDestroyed || this._isFatalError) {
return;
}
const fullpath = entry.path;
if (base !== undefined) {
entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
}
if (common.isAppliedFilter(this._settings.entryFilter, entry)) {
this._emitEntry(entry);
}
if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
}
}
_emitEntry(entry) {
this._emitter.emit('entry', entry);
}
}
exports.default = AsyncReader;
/***/ }),
/***/ 26865:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0;
function isFatalError(settings, error) {
if (settings.errorFilter === null) {
return true;
}
return !settings.errorFilter(error);
}
exports.isFatalError = isFatalError;
function isAppliedFilter(filter, value) {
return filter === null || filter(value);
}
exports.isAppliedFilter = isAppliedFilter;
function replacePathSegmentSeparator(filepath, separator) {
return filepath.split(/[/\\]/).join(separator);
}
exports.replacePathSegmentSeparator = replacePathSegmentSeparator;
function joinPathSegments(a, b, separator) {
if (a === '') {
return b;
}
/**
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
*/
if (a.endsWith(separator)) {
return a + b;
}
return a + separator + b;
}
exports.joinPathSegments = joinPathSegments;
/***/ }),
/***/ 85274:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const common = __webpack_require__(26865);
class Reader {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);
}
}
exports.default = Reader;
/***/ }),
/***/ 79555:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const fsScandir = __webpack_require__(55923);
const common = __webpack_require__(26865);
const reader_1 = __webpack_require__(85274);
class SyncReader extends reader_1.default {
constructor() {
super(...arguments);
this._scandir = fsScandir.scandirSync;
this._storage = [];
this._queue = new Set();
}
read() {
this._pushToQueue(this._root, this._settings.basePath);
this._handleQueue();
return this._storage;
}
_pushToQueue(directory, base) {
this._queue.add({ directory, base });
}
_handleQueue() {
for (const item of this._queue.values()) {
this._handleDirectory(item.directory, item.base);
}
}
_handleDirectory(directory, base) {
try {
const entries = this._scandir(directory, this._settings.fsScandirSettings);
for (const entry of entries) {
this._handleEntry(entry, base);
}
}
catch (error) {
this._handleError(error);
}
}
_handleError(error) {
if (!common.isFatalError(this._settings, error)) {
return;
}
throw error;
}
_handleEntry(entry, base) {
const fullpath = entry.path;
if (base !== undefined) {
entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
}
if (common.isAppliedFilter(this._settings.entryFilter, entry)) {
this._pushToStorage(entry);
}
if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
}
}
_pushToStorage(entry) {
this._storage.push(entry);
}
}
exports.default = SyncReader;
/***/ }),
/***/ 48690:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const path = __webpack_require__(85622);
const fsScandir = __webpack_require__(55923);
class Settings {
constructor(_options = {}) {
this._options = _options;
this.basePath = this._getValue(this._options.basePath, undefined);
this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY);
this.deepFilter = this._getValue(this._options.deepFilter, null);
this.entryFilter = this._getValue(this._options.entryFilter, null);
this.errorFilter = this._getValue(this._options.errorFilter, null);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);
this.fsScandirSettings = new fsScandir.Settings({
followSymbolicLinks: this._options.followSymbolicLinks,
fs: this._options.fs,
pathSegmentSeparator: this._options.pathSegmentSeparator,
stats: this._options.stats,
throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink
});
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
}
exports.default = Settings;
/***/ }),
/***/ 80657:
/***/ ((module, exports) => {
"use strict";
///
///
///
Object.defineProperty(exports, "__esModule", ({ value: true }));
const typedArrayTypeNames = [
'Int8Array',
'Uint8Array',
'Uint8ClampedArray',
'Int16Array',
'Uint16Array',
'Int32Array',
'Uint32Array',
'Float32Array',
'Float64Array',
'BigInt64Array',
'BigUint64Array'
];
function isTypedArrayName(name) {
return typedArrayTypeNames.includes(name);
}
const objectTypeNames = [
'Function',
'Generator',
'AsyncGenerator',
'GeneratorFunction',
'AsyncGeneratorFunction',
'AsyncFunction',
'Observable',
'Array',
'Buffer',
'Object',
'RegExp',
'Date',
'Error',
'Map',
'Set',
'WeakMap',
'WeakSet',
'ArrayBuffer',
'SharedArrayBuffer',
'DataView',
'Promise',
'URL',
'HTMLElement',
...typedArrayTypeNames
];
function isObjectTypeName(name) {
return objectTypeNames.includes(name);
}
const primitiveTypeNames = [
'null',
'undefined',
'string',
'number',
'bigint',
'boolean',
'symbol'
];
function isPrimitiveTypeName(name) {
return primitiveTypeNames.includes(name);
}
// eslint-disable-next-line @typescript-eslint/ban-types
function isOfType(type) {
return (value) => typeof value === type;
}
const { toString } = Object.prototype;
const getObjectType = (value) => {
const objectTypeName = toString.call(value).slice(8, -1);
if (/HTML\w+Element/.test(objectTypeName) && is.domElement(value)) {
return 'HTMLElement';
}
if (isObjectTypeName(objectTypeName)) {
return objectTypeName;
}
return undefined;
};
const isObjectOfType = (type) => (value) => getObjectType(value) === type;
function is(value) {
if (value === null) {
return 'null';
}
switch (typeof value) {
case 'undefined':
return 'undefined';
case 'string':
return 'string';
case 'number':
return 'number';
case 'boolean':
return 'boolean';
case 'function':
return 'Function';
case 'bigint':
return 'bigint';
case 'symbol':
return 'symbol';
default:
}
if (is.observable(value)) {
return 'Observable';
}
if (is.array(value)) {
return 'Array';
}
if (is.buffer(value)) {
return 'Buffer';
}
const tagType = getObjectType(value);
if (tagType) {
return tagType;
}
if (value instanceof String || value instanceof Boolean || value instanceof Number) {
throw new TypeError('Please don\'t use object wrappers for primitive types');
}
return 'Object';
}
is.undefined = isOfType('undefined');
is.string = isOfType('string');
const isNumberType = isOfType('number');
is.number = (value) => isNumberType(value) && !is.nan(value);
is.bigint = isOfType('bigint');
// eslint-disable-next-line @typescript-eslint/ban-types
is.function_ = isOfType('function');
is.null_ = (value) => value === null;
is.class_ = (value) => is.function_(value) && value.toString().startsWith('class ');
is.boolean = (value) => value === true || value === false;
is.symbol = isOfType('symbol');
is.numericString = (value) => is.string(value) && !is.emptyStringOrWhitespace(value) && !Number.isNaN(Number(value));
is.array = (value, assertion) => {
if (!Array.isArray(value)) {
return false;
}
if (!is.function_(assertion)) {
return true;
}
return value.every(assertion);
};
is.buffer = (value) => { var _a, _b, _c, _d; return (_d = (_c = (_b = (_a = value) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.isBuffer) === null || _c === void 0 ? void 0 : _c.call(_b, value)) !== null && _d !== void 0 ? _d : false; };
is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value);
is.object = (value) => !is.null_(value) && (typeof value === 'object' || is.function_(value));
is.iterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.iterator]); };
is.asyncIterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.asyncIterator]); };
is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw);
is.asyncGenerator = (value) => is.asyncIterable(value) && is.function_(value.next) && is.function_(value.throw);
is.nativePromise = (value) => isObjectOfType('Promise')(value);
const hasPromiseAPI = (value) => {
var _a, _b;
return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a.then) &&
is.function_((_b = value) === null || _b === void 0 ? void 0 : _b.catch);
};
is.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value);
is.generatorFunction = isObjectOfType('GeneratorFunction');
is.asyncGeneratorFunction = (value) => getObjectType(value) === 'AsyncGeneratorFunction';
is.asyncFunction = (value) => getObjectType(value) === 'AsyncFunction';
// eslint-disable-next-line no-prototype-builtins, @typescript-eslint/ban-types
is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype');
is.regExp = isObjectOfType('RegExp');
is.date = isObjectOfType('Date');
is.error = isObjectOfType('Error');
is.map = (value) => isObjectOfType('Map')(value);
is.set = (value) => isObjectOfType('Set')(value);
is.weakMap = (value) => isObjectOfType('WeakMap')(value);
is.weakSet = (value) => isObjectOfType('WeakSet')(value);
is.int8Array = isObjectOfType('Int8Array');
is.uint8Array = isObjectOfType('Uint8Array');
is.uint8ClampedArray = isObjectOfType('Uint8ClampedArray');
is.int16Array = isObjectOfType('Int16Array');
is.uint16Array = isObjectOfType('Uint16Array');
is.int32Array = isObjectOfType('Int32Array');
is.uint32Array = isObjectOfType('Uint32Array');
is.float32Array = isObjectOfType('Float32Array');
is.float64Array = isObjectOfType('Float64Array');
is.bigInt64Array = isObjectOfType('BigInt64Array');
is.bigUint64Array = isObjectOfType('BigUint64Array');
is.arrayBuffer = isObjectOfType('ArrayBuffer');
is.sharedArrayBuffer = isObjectOfType('SharedArrayBuffer');
is.dataView = isObjectOfType('DataView');
is.directInstanceOf = (instance, class_) => Object.getPrototypeOf(instance) === class_.prototype;
is.urlInstance = (value) => isObjectOfType('URL')(value);
is.urlString = (value) => {
if (!is.string(value)) {
return false;
}
try {
new URL(value); // eslint-disable-line no-new
return true;
}
catch (_a) {
return false;
}
};
// TODO: Use the `not` operator with a type guard here when it's available.
// Example: `is.truthy = (value: unknown): value is (not false | not 0 | not '' | not undefined | not null) => Boolean(value);`
is.truthy = (value) => Boolean(value);
// Example: `is.falsy = (value: unknown): value is (not true | 0 | '' | undefined | null) => Boolean(value);`
is.falsy = (value) => !value;
is.nan = (value) => Number.isNaN(value);
is.primitive = (value) => is.null_(value) || isPrimitiveTypeName(typeof value);
is.integer = (value) => Number.isInteger(value);
is.safeInteger = (value) => Number.isSafeInteger(value);
is.plainObject = (value) => {
// From: https://github.com/sindresorhus/is-plain-obj/blob/main/index.js
if (toString.call(value) !== '[object Object]') {
return false;
}
const prototype = Object.getPrototypeOf(value);
return prototype === null || prototype === Object.getPrototypeOf({});
};
is.typedArray = (value) => isTypedArrayName(getObjectType(value));
const isValidLength = (value) => is.safeInteger(value) && value >= 0;
is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length);
is.inRange = (value, range) => {
if (is.number(range)) {
return value >= Math.min(0, range) && value <= Math.max(range, 0);
}
if (is.array(range) && range.length === 2) {
return value >= Math.min(...range) && value <= Math.max(...range);
}
throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);
};
const NODE_TYPE_ELEMENT = 1;
const DOM_PROPERTIES_TO_CHECK = [
'innerHTML',
'ownerDocument',
'style',
'attributes',
'nodeValue'
];
is.domElement = (value) => {
return is.object(value) &&
value.nodeType === NODE_TYPE_ELEMENT &&
is.string(value.nodeName) &&
!is.plainObject(value) &&
DOM_PROPERTIES_TO_CHECK.every(property => property in value);
};
is.observable = (value) => {
var _a, _b, _c, _d;
if (!value) {
return false;
}
// eslint-disable-next-line no-use-extend-native/no-use-extend-native
if (value === ((_b = (_a = value)[Symbol.observable]) === null || _b === void 0 ? void 0 : _b.call(_a))) {
return true;
}
if (value === ((_d = (_c = value)['@@observable']) === null || _d === void 0 ? void 0 : _d.call(_c))) {
return true;
}
return false;
};
is.nodeStream = (value) => is.object(value) && is.function_(value.pipe) && !is.observable(value);
is.infinite = (value) => value === Infinity || value === -Infinity;
const isAbsoluteMod2 = (remainder) => (value) => is.integer(value) && Math.abs(value % 2) === remainder;
is.evenInteger = isAbsoluteMod2(0);
is.oddInteger = isAbsoluteMod2(1);
is.emptyArray = (value) => is.array(value) && value.length === 0;
is.nonEmptyArray = (value) => is.array(value) && value.length > 0;
is.emptyString = (value) => is.string(value) && value.length === 0;
// TODO: Use `not ''` when the `not` operator is available.
is.nonEmptyString = (value) => is.string(value) && value.length > 0;
const isWhiteSpaceString = (value) => is.string(value) && !/\S/.test(value);
is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value);
is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0;
// TODO: Use `not` operator here to remove `Map` and `Set` from type guard:
// - https://github.com/Microsoft/TypeScript/pull/29317
is.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0;
is.emptySet = (value) => is.set(value) && value.size === 0;
is.nonEmptySet = (value) => is.set(value) && value.size > 0;
is.emptyMap = (value) => is.map(value) && value.size === 0;
is.nonEmptyMap = (value) => is.map(value) && value.size > 0;
const predicateOnArray = (method, predicate, values) => {
if (!is.function_(predicate)) {
throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);
}
if (values.length === 0) {
throw new TypeError('Invalid number of values');
}
return method.call(values, predicate);
};
is.any = (predicate, ...values) => {
const predicates = is.array(predicate) ? predicate : [predicate];
return predicates.some(singlePredicate => predicateOnArray(Array.prototype.some, singlePredicate, values));
};
is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values);
const assertType = (condition, description, value, options = {}) => {
if (!condition) {
const { multipleValues } = options;
const valuesMessage = multipleValues ?
`received values of types ${[
...new Set(value.map(singleValue => `\`${is(singleValue)}\``))
].join(', ')}` :
`received value of type \`${is(value)}\``;
throw new TypeError(`Expected value which is \`${description}\`, ${valuesMessage}.`);
}
};
exports.assert = {
// Unknowns.
undefined: (value) => assertType(is.undefined(value), 'undefined', value),
string: (value) => assertType(is.string(value), 'string', value),
number: (value) => assertType(is.number(value), 'number', value),
bigint: (value) => assertType(is.bigint(value), 'bigint', value),
// eslint-disable-next-line @typescript-eslint/ban-types
function_: (value) => assertType(is.function_(value), 'Function', value),
null_: (value) => assertType(is.null_(value), 'null', value),
class_: (value) => assertType(is.class_(value), "Class" /* class_ */, value),
boolean: (value) => assertType(is.boolean(value), 'boolean', value),
symbol: (value) => assertType(is.symbol(value), 'symbol', value),
numericString: (value) => assertType(is.numericString(value), "string with a number" /* numericString */, value),
array: (value, assertion) => {
const assert = assertType;
assert(is.array(value), 'Array', value);
if (assertion) {
value.forEach(assertion);
}
},
buffer: (value) => assertType(is.buffer(value), 'Buffer', value),
nullOrUndefined: (value) => assertType(is.nullOrUndefined(value), "null or undefined" /* nullOrUndefined */, value),
object: (value) => assertType(is.object(value), 'Object', value),
iterable: (value) => assertType(is.iterable(value), "Iterable" /* iterable */, value),
asyncIterable: (value) => assertType(is.asyncIterable(value), "AsyncIterable" /* asyncIterable */, value),
generator: (value) => assertType(is.generator(value), 'Generator', value),
asyncGenerator: (value) => assertType(is.asyncGenerator(value), 'AsyncGenerator', value),
nativePromise: (value) => assertType(is.nativePromise(value), "native Promise" /* nativePromise */, value),
promise: (value) => assertType(is.promise(value), 'Promise', value),
generatorFunction: (value) => assertType(is.generatorFunction(value), 'GeneratorFunction', value),
asyncGeneratorFunction: (value) => assertType(is.asyncGeneratorFunction(value), 'AsyncGeneratorFunction', value),
// eslint-disable-next-line @typescript-eslint/ban-types
asyncFunction: (value) => assertType(is.asyncFunction(value), 'AsyncFunction', value),
// eslint-disable-next-line @typescript-eslint/ban-types
boundFunction: (value) => assertType(is.boundFunction(value), 'Function', value),
regExp: (value) => assertType(is.regExp(value), 'RegExp', value),
date: (value) => assertType(is.date(value), 'Date', value),
error: (value) => assertType(is.error(value), 'Error', value),
map: (value) => assertType(is.map(value), 'Map', value),
set: (value) => assertType(is.set(value), 'Set', value),
weakMap: (value) => assertType(is.weakMap(value), 'WeakMap', value),
weakSet: (value) => assertType(is.weakSet(value), 'WeakSet', value),
int8Array: (value) => assertType(is.int8Array(value), 'Int8Array', value),
uint8Array: (value) => assertType(is.uint8Array(value), 'Uint8Array', value),
uint8ClampedArray: (value) => assertType(is.uint8ClampedArray(value), 'Uint8ClampedArray', value),
int16Array: (value) => assertType(is.int16Array(value), 'Int16Array', value),
uint16Array: (value) => assertType(is.uint16Array(value), 'Uint16Array', value),
int32Array: (value) => assertType(is.int32Array(value), 'Int32Array', value),
uint32Array: (value) => assertType(is.uint32Array(value), 'Uint32Array', value),
float32Array: (value) => assertType(is.float32Array(value), 'Float32Array', value),
float64Array: (value) => assertType(is.float64Array(value), 'Float64Array', value),
bigInt64Array: (value) => assertType(is.bigInt64Array(value), 'BigInt64Array', value),
bigUint64Array: (value) => assertType(is.bigUint64Array(value), 'BigUint64Array', value),
arrayBuffer: (value) => assertType(is.arrayBuffer(value), 'ArrayBuffer', value),
sharedArrayBuffer: (value) => assertType(is.sharedArrayBuffer(value), 'SharedArrayBuffer', value),
dataView: (value) => assertType(is.dataView(value), 'DataView', value),
urlInstance: (value) => assertType(is.urlInstance(value), 'URL', value),
urlString: (value) => assertType(is.urlString(value), "string with a URL" /* urlString */, value),
truthy: (value) => assertType(is.truthy(value), "truthy" /* truthy */, value),
falsy: (value) => assertType(is.falsy(value), "falsy" /* falsy */, value),
nan: (value) => assertType(is.nan(value), "NaN" /* nan */, value),
primitive: (value) => assertType(is.primitive(value), "primitive" /* primitive */, value),
integer: (value) => assertType(is.integer(value), "integer" /* integer */, value),
safeInteger: (value) => assertType(is.safeInteger(value), "integer" /* safeInteger */, value),
plainObject: (value) => assertType(is.plainObject(value), "plain object" /* plainObject */, value),
typedArray: (value) => assertType(is.typedArray(value), "TypedArray" /* typedArray */, value),
arrayLike: (value) => assertType(is.arrayLike(value), "array-like" /* arrayLike */, value),
domElement: (value) => assertType(is.domElement(value), "HTMLElement" /* domElement */, value),
observable: (value) => assertType(is.observable(value), 'Observable', value),
nodeStream: (value) => assertType(is.nodeStream(value), "Node.js Stream" /* nodeStream */, value),
infinite: (value) => assertType(is.infinite(value), "infinite number" /* infinite */, value),
emptyArray: (value) => assertType(is.emptyArray(value), "empty array" /* emptyArray */, value),
nonEmptyArray: (value) => assertType(is.nonEmptyArray(value), "non-empty array" /* nonEmptyArray */, value),
emptyString: (value) => assertType(is.emptyString(value), "empty string" /* emptyString */, value),
nonEmptyString: (value) => assertType(is.nonEmptyString(value), "non-empty string" /* nonEmptyString */, value),
emptyStringOrWhitespace: (value) => assertType(is.emptyStringOrWhitespace(value), "empty string or whitespace" /* emptyStringOrWhitespace */, value),
emptyObject: (value) => assertType(is.emptyObject(value), "empty object" /* emptyObject */, value),
nonEmptyObject: (value) => assertType(is.nonEmptyObject(value), "non-empty object" /* nonEmptyObject */, value),
emptySet: (value) => assertType(is.emptySet(value), "empty set" /* emptySet */, value),
nonEmptySet: (value) => assertType(is.nonEmptySet(value), "non-empty set" /* nonEmptySet */, value),
emptyMap: (value) => assertType(is.emptyMap(value), "empty map" /* emptyMap */, value),
nonEmptyMap: (value) => assertType(is.nonEmptyMap(value), "non-empty map" /* nonEmptyMap */, value),
// Numbers.
evenInteger: (value) => assertType(is.evenInteger(value), "even integer" /* evenInteger */, value),
oddInteger: (value) => assertType(is.oddInteger(value), "odd integer" /* oddInteger */, value),
// Two arguments.
directInstanceOf: (instance, class_) => assertType(is.directInstanceOf(instance, class_), "T" /* directInstanceOf */, instance),
inRange: (value, range) => assertType(is.inRange(value, range), "in range" /* inRange */, value),
// Variadic functions.
any: (predicate, ...values) => {
return assertType(is.any(predicate, ...values), "predicate returns truthy for any value" /* any */, values, { multipleValues: true });
},
all: (predicate, ...values) => assertType(is.all(predicate, ...values), "predicate returns truthy for all values" /* all */, values, { multipleValues: true })
};
// Some few keywords are reserved, but we'll populate them for Node.js users
// See https://github.com/Microsoft/TypeScript/issues/2536
Object.defineProperties(is, {
class: {
value: is.class_
},
function: {
value: is.function_
},
null: {
value: is.null_
}
});
Object.defineProperties(exports.assert, {
class: {
value: exports.assert.class_
},
function: {
value: exports.assert.function_
},
null: {
value: exports.assert.null_
}
});
exports.default = is;
// For CommonJS default export support
module.exports = is;
module.exports.default = is;
module.exports.assert = exports.assert;
/***/ }),
/***/ 65266:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.legacyCommon = exports.legacyMonitor = exports.legacyPlugin = void 0;
const legacy_1 = __webpack_require__(83888);
Object.defineProperty(exports, "legacyPlugin", ({ enumerable: true, get: function () { return legacy_1.plugin; } }));
Object.defineProperty(exports, "legacyMonitor", ({ enumerable: true, get: function () { return legacy_1.monitor; } }));
Object.defineProperty(exports, "legacyCommon", ({ enumerable: true, get: function () { return legacy_1.common; } }));
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 78752:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
//# sourceMappingURL=common.js.map
/***/ }),
/***/ 83888:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.common = exports.monitor = exports.plugin = void 0;
const plugin = __webpack_require__(88904);
exports.plugin = plugin;
const monitor = __webpack_require__(65861);
exports.monitor = monitor;
const common = __webpack_require__(78752);
exports.common = common;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 65861:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
//# sourceMappingURL=monitor.js.map
/***/ }),
/***/ 88904:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.isMultiResult = exports.isMultiSubProject = exports.adaptSingleProjectPlugin = void 0;
function adaptSingleProjectPlugin(plugin) {
return { inspect: (root, targetFile, options) => {
if (options && isMultiSubProject(options)) {
const name = plugin.pluginName ? plugin.pluginName() : '[unknown]';
throw new Error(`Plugin ${name} does not support scanning multiple sub-projects`);
}
else {
return plugin.inspect(root, targetFile, options);
}
} };
}
exports.adaptSingleProjectPlugin = adaptSingleProjectPlugin;
function isMultiSubProject(options) {
return options.allSubProjects;
}
exports.isMultiSubProject = isMultiSubProject;
function isMultiResult(res) {
return !!res.scannedProjects;
}
exports.isMultiResult = isMultiResult;
//# sourceMappingURL=plugin.js.map
/***/ }),
/***/ 84537:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const lockfile_parser_1 = __webpack_require__(7023);
exports.LockfileParser = lockfile_parser_1.default;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 7023:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const fs = __webpack_require__(35747);
const path = __webpack_require__(85622);
const yaml = __webpack_require__(93320);
const dep_graph_1 = __webpack_require__(71479);
const utils_1 = __webpack_require__(43033);
class LockfileParser {
constructor(hash, rootPkgInfo) {
this.rootPkgInfo = undefined;
this.rootPkgInfo = rootPkgInfo;
this.internalData = hash;
}
static async readFile(lockfilePath) {
const rootName = path.basename(path.dirname(path.resolve(lockfilePath)));
return new Promise((resolve, reject) => {
fs.readFile(lockfilePath, { encoding: 'utf8' }, (err, fileContents) => {
if (err) {
reject(err);
}
try {
const parser = this.readContents(fileContents, {
name: rootName,
version: '0.0.0',
});
resolve(parser);
}
catch (err) {
reject(err);
}
});
});
}
static readFileSync(lockfilePath) {
const fileContents = fs.readFileSync(lockfilePath, 'utf8');
const rootName = path.basename(path.dirname(path.resolve(lockfilePath)));
return this.readContents(fileContents, {
name: rootName,
version: '0.0.0',
});
}
static readContents(contents, rootPkgInfo) {
return new LockfileParser(yaml.safeLoad(contents), rootPkgInfo);
}
toDepGraph() {
const builder = new dep_graph_1.DepGraphBuilder(this.pkgManager, this.rootPkgInfo);
const allDeps = {};
// Add all package nodes first, but collect dependencies
this.internalData.PODS.forEach((elem) => {
let pkgInfo;
let pkgDeps;
if (typeof elem === 'string') {
// When there are NO dependencies. This equals in yaml e.g.
// - Expecta (1.0.5)
pkgInfo = utils_1.pkgInfoFromSpecificationString(elem);
pkgDeps = [];
}
else {
// When there are dependencies. This equals in yaml e.g.
// - React/Core (0.59.2):
// - yoga (= 0.59.2.React)
const objKey = Object.keys(elem)[0];
pkgInfo = utils_1.pkgInfoFromSpecificationString(objKey);
pkgDeps = elem[objKey].map(utils_1.pkgInfoFromDependencyString);
}
const nodeId = this.nodeIdForPkgInfo(pkgInfo);
builder.addPkgNode(pkgInfo, nodeId, {
labels: this.nodeInfoLabelsForPod(pkgInfo.name),
});
allDeps[nodeId] = pkgDeps;
});
// Connect explicitly in the manifest (`Podfile`)
// declared dependencies to the root node.
this.internalData.DEPENDENCIES.map(utils_1.pkgInfoFromDependencyString).forEach((pkgInfo) => {
builder.connectDep(builder.rootNodeId, this.nodeIdForPkgInfo(pkgInfo));
});
// Now we can start to connect dependencies
Object.entries(allDeps).forEach(([nodeId, pkgDeps]) => pkgDeps.forEach((pkgInfo) => {
const depNodeId = this.nodeIdForPkgInfo(pkgInfo);
if (!allDeps[depNodeId]) {
// The pod is not a direct dependency of any targets of the integration,
// which can happen for platform-specific transitives, when their platform
// is not used in any target. (e.g. PromiseKit/UIKit is iOS-specific and is
// a transitive of PromiseKit, but won't be included for a macOS project.)
return;
}
builder.connectDep(nodeId, depNodeId);
}));
return builder.build();
}
/// CocoaPods guarantees that every pod is only present in one version,
/// so we can use just the pod name as node ID.
nodeIdForPkgInfo(pkgInfo) {
return pkgInfo.name;
}
/// Gathers relevant info from the lockfile and transform
/// them into the expected labels data structure.
nodeInfoLabelsForPod(podName) {
let nodeInfoLabels = {
checksum: this.checksumForPod(podName),
};
const repository = this.repositoryForPod(podName);
if (repository) {
nodeInfoLabels = Object.assign(Object.assign({}, nodeInfoLabels), { repository });
}
const externalSourceInfo = this.externalSourceInfoForPod(podName);
if (externalSourceInfo) {
nodeInfoLabels = Object.assign(Object.assign({}, nodeInfoLabels), { externalSourcePodspec: externalSourceInfo[':podspec'], externalSourcePath: externalSourceInfo[':path'], externalSourceGit: externalSourceInfo[':git'], externalSourceTag: externalSourceInfo[':tag'], externalSourceCommit: externalSourceInfo[':commit'], externalSourceBranch: externalSourceInfo[':branch'] });
}
const checkoutOptions = this.checkoutOptionsForPod(podName);
if (checkoutOptions) {
nodeInfoLabels = Object.assign(Object.assign({}, nodeInfoLabels), { checkoutOptionsPodspec: checkoutOptions[':podspec'], checkoutOptionsPath: checkoutOptions[':path'], checkoutOptionsGit: checkoutOptions[':git'], checkoutOptionsTag: checkoutOptions[':tag'], checkoutOptionsCommit: checkoutOptions[':commit'], checkoutOptionsBranch: checkoutOptions[':branch'] });
}
// Sanitize labels by removing null fields
// (as they don't survive a serialization/parse cycle and break tests)
Object.entries(nodeInfoLabels).forEach(([key, value]) => {
if (value === null || value === undefined) {
delete nodeInfoLabels[key];
}
});
return nodeInfoLabels;
}
/// The checksum of the pod.
checksumForPod(podName) {
const rootName = utils_1.rootSpecName(podName);
return this.internalData['SPEC CHECKSUMS'][rootName];
}
/// This can be either an URL or the local repository name.
repositoryForPod(podName) {
// Older Podfile.lock might not have this section yet.
const specRepos = this.internalData['SPEC REPOS'];
if (!specRepos) {
return undefined;
}
const rootName = utils_1.rootSpecName(podName);
const specRepoEntry = Object.entries(specRepos).find(([, deps]) => deps.includes(rootName));
if (specRepoEntry) {
return specRepoEntry[0];
}
return undefined;
}
/// Extracts the external source info for a given pod, if there is any.
externalSourceInfoForPod(podName) {
// Older Podfile.lock might not have this section yet.
const externalSources = this.internalData['EXTERNAL SOURCES'];
if (!externalSources) {
return undefined;
}
const externalSourceEntry = externalSources[utils_1.rootSpecName(podName)];
if (externalSourceEntry) {
return externalSourceEntry;
}
return undefined;
}
/// Extracts the checkout options for a given pod, if there is any.
checkoutOptionsForPod(podName) {
// Older Podfile.lock might not have this section yet.
const checkoutOptions = this.internalData['CHECKOUT OPTIONS'];
if (!checkoutOptions) {
return undefined;
}
const checkoutOptionsEntry = checkoutOptions[utils_1.rootSpecName(podName)];
if (checkoutOptionsEntry) {
return checkoutOptionsEntry;
}
return undefined;
}
get repositories() {
// Older Podfile.lock might not have this section yet.
const specRepos = this.internalData['SPEC REPOS'];
if (!specRepos) {
return [];
}
return Object.keys(specRepos).map((nameOrUrl) => {
return { alias: nameOrUrl };
});
}
get pkgManager() {
return {
name: 'cocoapods',
version: this.cocoapodsVersion,
repositories: this.repositories,
};
}
/// The CocoaPods version encoded in the lockfile which was used to
/// create this resolution.
get cocoapodsVersion() {
return this.internalData.COCOAPODS || 'unknown';
}
/// The checksum of the Podfile, which was used when resolving this integration.
/// - Note: this was not tracked by earlier versions of CocoaPods.
get podfileChecksum() {
return this.internalData['PODFILE CHECKSUM'];
}
}
exports.default = LockfileParser;
//# sourceMappingURL=lockfile-parser.js.map
/***/ }),
/***/ 43033:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
/// e.g. Adjust (4.17.1)
/// Adjust/Core (4.17.1)
function pkgInfoFromSpecificationString(stringRepresentation) {
const match = stringRepresentation.match(/^((?:\s?[^\s(])+)(?: \((.+)\))?$/);
if (!match) {
throw new Error('Invalid string representation for a ' +
`specification: \`${stringRepresentation}\`. ` +
'The string representation should include the name and ' +
'optionally the version of the Pod.');
}
return { name: match[1], version: match[2] };
}
exports.pkgInfoFromSpecificationString = pkgInfoFromSpecificationString;
/// e.g. Expecta
/// ReactiveObjC (~> 2.0)
/// Pulley (from `https://github.com/l2succes/Pulley.git`, branch `master`)
function pkgInfoFromDependencyString(stringRepresentation) {
const match = stringRepresentation.match(/^((?:\s?[^\s(])+)(?: \((.+)\))?$/);
if (!match) {
throw new Error('Invalid string representation for a ' +
`dependency: \`${stringRepresentation}\`. ` +
'The string representation should include the name and ' +
'a requirement of which version of the Pod should be used.');
}
if (!match[2] || match[2].match(/from `(.*)(`|')/)) {
return { name: match[1] };
}
return { name: match[1], version: match[2] };
}
exports.pkgInfoFromDependencyString = pkgInfoFromDependencyString;
/// Returns the root spec name, if the given specification name
/// is a subspec or just the same name.
function rootSpecName(specName) {
return specName.split('/')[0];
}
exports.rootSpecName = rootSpecName;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 54151:
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.extendAnalysis = exports.analyzeFolders = exports.analyzeBundle = void 0;
/* eslint-disable no-await-in-loop */
const lodash_omit_1 = __importDefault(__webpack_require__(76427));
const uuid_1 = __webpack_require__(42277);
const files_1 = __webpack_require__(32083);
const constants_1 = __webpack_require__(65765);
const http_1 = __webpack_require__(60776);
const bundles_1 = __webpack_require__(86639);
const emitter_1 = __webpack_require__(37544);
const sleep = (duration) => new Promise(resolve => setTimeout(resolve, duration));
async function pollAnalysis(options) {
let analysisResponse;
let analysisData;
emitter_1.emitter.analyseProgress({
status: http_1.AnalysisStatus.waiting,
progress: 0,
});
// eslint-disable-next-line no-constant-condition
while (true) {
analysisResponse = await (0, http_1.getAnalysis)(options);
if (analysisResponse.type === 'error') {
return analysisResponse;
}
analysisData = analysisResponse.value;
if (analysisData.status === http_1.AnalysisStatus.waiting ||
analysisData.status === http_1.AnalysisStatus.fetching ||
analysisData.status === http_1.AnalysisStatus.analyzing ||
analysisData.status === http_1.AnalysisStatus.done) {
// Report progress of fetching
emitter_1.emitter.analyseProgress(analysisData);
}
else if (analysisData.status === http_1.AnalysisStatus.complete) {
// Return data of analysis
return analysisResponse;
// deepcode ignore DuplicateIfBody: false positive it seems that interface is not taken into account
}
else if (analysisData.status === http_1.AnalysisStatus.failed) {
// Report failure of analysing
return analysisResponse;
}
await sleep(constants_1.POLLING_INTERVAL);
}
}
async function analyzeBundle(options) {
// Call remote bundle for analysis results and emit intermediate progress
const analysisData = await pollAnalysis(options);
if (analysisData.type === 'error') {
throw analysisData.error;
}
else if (analysisData.value.status === http_1.AnalysisStatus.failed) {
throw new Error('Analysis has failed');
}
return analysisData.value;
}
exports.analyzeBundle = analyzeBundle;
function normalizeResultFiles(files, baseDir) {
return Object.entries(files).reduce((obj, [path, positions]) => {
const filePath = (0, files_1.resolveBundleFilePath)(baseDir, path);
obj[filePath] = positions;
return obj;
}, {});
}
async function analyzeFolders(options) {
if (!options.connection.requestId) {
options.connection.requestId = (0, uuid_1.v4)();
}
const fileBundle = await (0, bundles_1.createBundleFromFolders)({
...options.connection,
...options.fileOptions,
languages: options.languages,
});
if (fileBundle === null)
return null;
// Analyze bundle
const analysisResults = await analyzeBundle({
bundleHash: fileBundle.bundleHash,
...options.connection,
...options.analysisOptions,
shard: (0, files_1.calcHash)(fileBundle.baseDir),
...(options.analysisContext ? { analysisContext: options.analysisContext } : {}),
});
if (analysisResults.type === 'legacy') {
// expand relative file names to absolute ones only for legacy results
analysisResults.files = normalizeResultFiles(analysisResults.files, fileBundle.baseDir);
}
return { fileBundle, analysisResults, ...options };
}
exports.analyzeFolders = analyzeFolders;
function mergeBundleResults(oldAnalysisResults, newAnalysisResults, limitToFiles, removedFiles = [], baseDir) {
if (newAnalysisResults.type == 'sarif') {
return mergeSarifResults(oldAnalysisResults, newAnalysisResults, limitToFiles, removedFiles);
}
return mergeLegacyResults(oldAnalysisResults, newAnalysisResults, limitToFiles, removedFiles, baseDir);
}
function mergeSarifResults(oldAnalysisResults, newAnalysisResults, limitToFiles, removedFiles = []) {
// Start from the new analysis results
// For each finding of the old analysis,
// if it's location is not part of the limitToFiles or removedFiles (removedFiles should also be checked against condeFlow),
// append the finding to the new analysis and check if the rule must be added as well
const changedFiles = [...limitToFiles, ...removedFiles];
const sarifResults = (newAnalysisResults.sarif.runs[0].results || []).filter(res => {
var _a, _b, _c;
// TODO: This should not be necessary in theory but, in case of two identical files,
// Bundle Server returns the finding in both files even if limitToFiles only reports one
const loc = (_c = (_b = (_a = res.locations) === null || _a === void 0 ? void 0 : _a[0].physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation) === null || _c === void 0 ? void 0 : _c.uri;
return loc && changedFiles.includes(loc);
});
const sarifRules = newAnalysisResults.sarif.runs[0].tool.driver.rules || [];
const oldResults = oldAnalysisResults.sarif.runs[0].results || [];
for (const res of oldResults) {
// NOTE: Node 10 doesn't support the more readable .flatMap, so we need to use .reduce, but the behaviour would be the following:
// const locations: string[] = (res.locations || []).flatMap(
// loc => !!loc.physicalLocation?.artifactLocation?.uri ? [loc.physicalLocation.artifactLocation.uri] : []
// );
// const codeFlowLocations: string[] = (res.codeFlows || []).flatMap(
// cf => (cf.threadFlows || []).flatMap(
// tf => (tf.locations || []).flatMap(
// loc => !!loc.location?.physicalLocation?.artifactLocation?.uri ? [loc.location.physicalLocation.artifactLocation.uri] : []
// )
// )
// );
const locations = (res.locations || []).reduce((acc, loc) => {
var _a, _b;
if ((_b = (_a = loc.physicalLocation) === null || _a === void 0 ? void 0 : _a.artifactLocation) === null || _b === void 0 ? void 0 : _b.uri) {
acc.push(loc.physicalLocation.artifactLocation.uri);
}
return acc;
}, []);
const codeFlowLocations = (res.codeFlows || []).reduce((acc1, cf) => {
acc1.push(...(cf.threadFlows || []).reduce((acc2, tf) => {
acc2.push(...(tf.locations || []).reduce((acc3, loc) => {
var _a, _b, _c;
if ((_c = (_b = (_a = loc.location) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation) === null || _c === void 0 ? void 0 : _c.uri) {
acc3.push(loc.location.physicalLocation.artifactLocation.uri);
}
return acc3;
}, []));
return acc2;
}, []));
return acc1;
}, []);
if (locations.some(loc => changedFiles.includes(loc)) || codeFlowLocations.some(loc => removedFiles.includes(loc)))
continue;
let ruleIndex = sarifRules.findIndex(rule => rule.id === res.ruleId);
if (ruleIndex === -1 &&
res.ruleIndex &&
oldAnalysisResults.sarif.runs[0].tool.driver.rules &&
oldAnalysisResults.sarif.runs[0].tool.driver.rules[res.ruleIndex]) {
const newLength = sarifRules.push(oldAnalysisResults.sarif.runs[0].tool.driver.rules[res.ruleIndex]);
ruleIndex = newLength - 1;
}
res.ruleIndex = ruleIndex;
sarifResults.push(res);
}
newAnalysisResults.sarif.runs[0].results = sarifResults;
newAnalysisResults.sarif.runs[0].tool.driver.rules = sarifRules;
return newAnalysisResults;
}
const moveSuggestionIndexes = (suggestionIndex, suggestions) => {
const entries = Object.entries(suggestions);
return entries.reduce((obj, [i, s]) => {
obj[`${parseInt(i, 10) + suggestionIndex + 1}`] = s;
return obj;
}, {});
};
function mergeLegacyResults(oldAnalysisResults, newAnalysisResults, limitToFiles, removedFiles = [], baseDir) {
// expand relative file names to absolute ones only for legacy results
newAnalysisResults.files = normalizeResultFiles(newAnalysisResults.files, baseDir);
// Determine max suggestion index in our data
const suggestionIndex = Math.max(...Object.keys(oldAnalysisResults.suggestions).map(i => parseInt(i, 10))) || -1;
// Addup all new suggestions' indexes
const newSuggestions = moveSuggestionIndexes(suggestionIndex, newAnalysisResults.suggestions);
const suggestions = { ...oldAnalysisResults.suggestions, ...newSuggestions };
const newFiles = Object.entries(newAnalysisResults.files).reduce((obj, [fn, s]) => {
obj[fn] = moveSuggestionIndexes(suggestionIndex, s);
return obj;
}, {});
// expand relative file names to absolute ones only for legacy results
const changedFiles = [...limitToFiles, ...removedFiles].map(path => (0, files_1.resolveBundleFilePath)(baseDir, path));
const files = {
...(0, lodash_omit_1.default)(oldAnalysisResults.files, changedFiles),
...newFiles,
};
return {
...newAnalysisResults,
files,
suggestions,
};
}
async function extendAnalysis(options) {
const { files, removedFiles } = await (0, files_1.prepareExtendingBundle)(options.fileBundle.baseDir, options.fileBundle.supportedFiles, options.fileBundle.fileIgnores, options.files, options.fileOptions.symlinksEnabled);
if (!files.length && !removedFiles.length) {
return null; // nothing to extend, just return null
}
// Extend remote bundle
const remoteBundle = await (0, bundles_1.remoteBundleFactory)({
...options.connection,
bundleHash: options.fileBundle.bundleHash,
baseDir: options.fileBundle.baseDir,
files,
removedFiles,
});
if (remoteBundle === null)
return null;
const fileBundle = {
...options.fileBundle,
...remoteBundle,
};
const limitToFiles = files.map(f => f.bundlePath);
let analysisResults = await analyzeBundle({
bundleHash: remoteBundle.bundleHash,
...options.connection,
...options.analysisOptions,
shard: (0, files_1.calcHash)(fileBundle.baseDir),
limitToFiles,
});
analysisResults = mergeBundleResults(options.analysisResults, analysisResults, limitToFiles, removedFiles, options.fileBundle.baseDir);
return { ...options, fileBundle, analysisResults };
}
exports.extendAnalysis = extendAnalysis;
//# sourceMappingURL=analysis.js.map
/***/ }),
/***/ 86639:
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createBundleFromFolders = exports.remoteBundleFactory = exports.uploadRemoteBundle = void 0;
/* eslint-disable no-await-in-loop */
const lodash_pick_1 = __importDefault(__webpack_require__(25030));
const lodash_omit_1 = __importDefault(__webpack_require__(76427));
const p_map_1 = __importDefault(__webpack_require__(49503));
const files_1 = __webpack_require__(32083);
const http_1 = __webpack_require__(60776);
const constants_1 = __webpack_require__(65765);
const emitter_1 = __webpack_require__(37544);
async function* prepareRemoteBundle(options) {
let response;
let { bundleHash } = options;
let cumulativeProgress = 0;
emitter_1.emitter.createBundleProgress(cumulativeProgress, options.files.length);
for (const chunkedFiles of (0, files_1.composeFilePayloads)(options.files, constants_1.MAX_PAYLOAD)) {
const apiParams = {
...(0, lodash_pick_1.default)(options, ['baseURL', 'sessionToken', 'source', 'removedFiles', 'requestId', 'base64Encoding']),
files: chunkedFiles.reduce((d, f) => {
// deepcode ignore PrototypePollution: FP this is an internal code
d[f.bundlePath] = f.hash;
return d;
}, {}),
};
if (!bundleHash) {
// eslint-disable-next-line no-await-in-loop
response = await (0, http_1.createBundle)(apiParams);
}
else {
// eslint-disable-next-line no-await-in-loop
response = await (0, http_1.extendBundle)({ bundleHash, ...apiParams });
}
cumulativeProgress += chunkedFiles.length;
emitter_1.emitter.createBundleProgress(cumulativeProgress, options.files.length);
if (response.type === 'error') {
// TODO: process Error
yield response;
break;
}
bundleHash = response.value.bundleHash;
yield response;
}
}
/**
* Splits files in buckets and upload in parallel
* @param baseURL
* @param sessionToken
* @param remoteBundle
*/
async function uploadRemoteBundle(options) {
let uploadedFiles = 0;
emitter_1.emitter.uploadBundleProgress(0, options.files.length);
const apiParams = (0, lodash_pick_1.default)(options, ['baseURL', 'sessionToken', 'source', 'bundleHash', 'requestId', 'base64Encoding']);
const uploadFileChunks = async (bucketFiles) => {
// Note: we specifically create __new__ isolated bundles here to faster files upload
const resp = await (0, http_1.createBundle)({
...apiParams,
files: bucketFiles.reduce((d, f) => {
d[f.bundlePath] = (0, lodash_pick_1.default)(f, ['hash', 'content']);
return d;
}, {}),
});
if (resp.type !== 'error') {
uploadedFiles += bucketFiles.length;
emitter_1.emitter.uploadBundleProgress(uploadedFiles, options.files.length);
}
};
const files = [];
for (const bucketFiles of (0, files_1.composeFilePayloads)(options.files, constants_1.MAX_PAYLOAD)) {
files.push(bucketFiles);
}
await (0, p_map_1.default)(files, async (task) => await uploadFileChunks(task), {
concurrency: constants_1.UPLOAD_CONCURRENCY,
});
}
exports.uploadRemoteBundle = uploadRemoteBundle;
async function fullfillRemoteBundle(options) {
// Fulfill remote bundle by uploading only missing files (splitted in chunks)
// Check remove bundle to make sure no missing files left
let attempts = 0;
let { remoteBundle } = options;
const connectionOptions = (0, lodash_pick_1.default)(options, ['baseURL', 'sessionToken', 'source', 'requestId', 'base64Encoding']);
while (remoteBundle.missingFiles.length && attempts < (options.maxAttempts || constants_1.MAX_UPLOAD_ATTEMPTS)) {
const missingFiles = await (0, files_1.resolveBundleFiles)(options.baseDir, remoteBundle.missingFiles);
await uploadRemoteBundle({
...connectionOptions,
bundleHash: remoteBundle.bundleHash,
files: missingFiles,
});
const bundleResponse = await (0, http_1.checkBundle)({ ...connectionOptions, bundleHash: remoteBundle.bundleHash });
if (bundleResponse.type === 'error') {
throw new Error('Failed to get remote bundle');
}
// eslint-disable-next-line no-param-reassign
remoteBundle = bundleResponse.value;
attempts += 1;
}
return remoteBundle;
}
async function remoteBundleFactory(options) {
let remoteBundle = null;
const baseOptions = (0, lodash_pick_1.default)(options, ['baseURL', 'sessionToken', 'source', 'baseDir', 'requestId', 'base64Encoding']);
const bundleFactory = prepareRemoteBundle((0, lodash_omit_1.default)(options, ['baseDir']));
for await (const response of bundleFactory) {
if (response.type === 'error') {
throw response.error;
}
remoteBundle = await fullfillRemoteBundle({ ...baseOptions, remoteBundle: response.value });
if (remoteBundle.missingFiles.length) {
throw new Error(`Failed to upload # files: ${remoteBundle.missingFiles.length}`);
}
}
return remoteBundle;
}
exports.remoteBundleFactory = remoteBundleFactory;
/**
* Get supported filters and test baseURL for correctness and availability
*
* @param baseURL
* @param source
* @returns
*/
async function getSupportedFiles(baseURL, source, requestId, languages) {
emitter_1.emitter.supportedFilesLoaded(null);
const resp = await (0, http_1.getFilters)(baseURL, source, undefined, requestId);
if (resp.type === 'error') {
throw resp.error;
}
const supportedFilesFromApi = resp.value;
//Given supported languages from 'registy'
if (languages) {
const supportedFiles = {};
supportedFiles.configFiles = supportedFilesFromApi.configFiles;
supportedFiles.extensions = languages;
//For verification only
supportedFiles.extensions = supportedFiles.extensions.filter(langExtension => supportedFilesFromApi.extensions.includes(langExtension));
emitter_1.emitter.supportedFilesLoaded(supportedFiles);
return supportedFiles;
}
emitter_1.emitter.supportedFilesLoaded(supportedFilesFromApi);
return supportedFilesFromApi;
}
/**
* Creates a remote bundle and returns response from the bundle API
*
* @param {CreateBundleFromFoldersOptions} options
* @returns {Promise}
*/
async function createBundleFromFolders(options) {
const baseDir = (0, files_1.determineBaseDir)(options.paths);
const [supportedFiles, fileIgnores] = await Promise.all([
// Fetch supporte files to save network traffic
getSupportedFiles(options.baseURL, options.source, options.requestId, options.languages),
// Scan for custom ignore rules
(0, files_1.collectIgnoreRules)(options.paths, options.symlinksEnabled, options.defaultFileIgnores),
]);
emitter_1.emitter.scanFilesProgress(0);
const bundleFiles = [];
const skippedOversizedFiles = [];
let totalFiles = 0;
const bundleFileCollector = (0, files_1.collectBundleFiles)({
...(0, lodash_pick_1.default)(options, ['paths', 'symlinksEnabled']),
baseDir,
fileIgnores,
supportedFiles,
});
for await (const f of bundleFileCollector) {
typeof f == 'string' ? skippedOversizedFiles.push(f) : bundleFiles.push(f);
totalFiles += 1;
emitter_1.emitter.scanFilesProgress(totalFiles);
}
const bundleOptions = {
...(0, lodash_pick_1.default)(options, ['baseURL', 'sessionToken', 'source', 'requestId', 'base64Encoding']),
baseDir,
files: bundleFiles,
};
// Create remote bundle
if (!bundleFiles.length)
return null;
const remoteBundle = await remoteBundleFactory(bundleOptions);
if (remoteBundle === null)
return null;
return {
...remoteBundle,
baseDir,
supportedFiles,
fileIgnores,
skippedOversizedFiles,
};
}
exports.createBundleFromFolders = createBundleFromFolders;
//# sourceMappingURL=bundles.js.map
/***/ }),
/***/ 27234:
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.readJSON = exports.Cache = void 0;
//This is our own implementation of flat-cache without the use of flattened as we do not need cicular JSON support
//and the executable for flattened was broken
const path_1 = __importDefault(__webpack_require__(85622));
const fs_1 = __importDefault(__webpack_require__(35747));
class Cache {
constructor(docId, cacheDir) {
this.visited = {};
this.persisted = {};
this.pathToFile = '';
this.pathToFile = cacheDir ? path_1.default.resolve(cacheDir, docId) : path_1.default.resolve(__dirname, '../.cache/', docId);
if (fs_1.default.existsSync(this.pathToFile)) {
this.persisted = tryParse(this.pathToFile, {});
}
}
save(noPrune = false) {
!noPrune && this.prune();
writeJSON(this.pathToFile, this.persisted);
}
getKey(key) {
this.visited[key] = true;
return this.persisted[key];
}
setKey(key, value) {
this.visited[key] = true;
this.persisted[key] = value;
}
prune() {
const obj = {};
const keys = Object.keys(this.visited);
// no keys visited for either get or set value
if (keys.length === 0) {
return;
}
keys.forEach(key => {
obj[key] = this.persisted[key];
});
this.visited = {};
this.persisted = obj;
}
}
exports.Cache = Cache;
function writeJSON(filePath, data) {
fs_1.default.mkdirSync(path_1.default.dirname(filePath), {
recursive: true,
});
fs_1.default.writeFileSync(filePath, JSON.stringify(data));
}
function tryParse(filePath, defaultValue) {
let result;
try {
result = readJSON(filePath);
}
catch (ex) {
result = defaultValue;
}
return result;
}
function readJSON(filePath) {
return JSON.parse(fs_1.default.readFileSync(filePath, {
encoding: 'utf8',
}));
}
exports.readJSON = readJSON;
//# sourceMappingURL=cache.js.map
/***/ }),
/***/ 65765:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DEFAULT_ERROR_MESSAGES = exports.NETWORK_ERRORS = exports.ErrorCodes = exports.DCIGNORE_DRAFTS = exports.IGNORE_FILES_NAMES = exports.IGNORES_DEFAULT = exports.REQUEST_RETRY_DELAY = exports.MAX_RETRY_ATTEMPTS = exports.POLLING_INTERVAL = exports.UPLOAD_CONCURRENCY = exports.MAX_UPLOAD_ATTEMPTS = exports.CACHE_KEY = exports.EXCLUDED_NAMES = exports.DOTSNYK_FILENAME = exports.DCIGNORE_FILENAME = exports.GITIGNORE_FILENAME = exports.GIT_FILENAME = exports.ENCODE_TYPE = exports.HASH_ALGORITHM = exports.MAX_FILE_SIZE = exports.MAX_PAYLOAD = void 0;
const dcignore_1 = __webpack_require__(55086);
exports.MAX_PAYLOAD = 4 * 1024 * 1024;
exports.MAX_FILE_SIZE = 1024 * 1024;
exports.HASH_ALGORITHM = 'sha256';
exports.ENCODE_TYPE = 'hex';
exports.GIT_FILENAME = '.git';
exports.GITIGNORE_FILENAME = '.gitignore';
exports.DCIGNORE_FILENAME = '.dcignore';
exports.DOTSNYK_FILENAME = '.snyk';
exports.EXCLUDED_NAMES = [exports.GIT_FILENAME, exports.GITIGNORE_FILENAME, exports.DCIGNORE_FILENAME];
exports.CACHE_KEY = '.dccache';
exports.MAX_UPLOAD_ATTEMPTS = 10;
exports.UPLOAD_CONCURRENCY = 2;
exports.POLLING_INTERVAL = 500;
exports.MAX_RETRY_ATTEMPTS = 10; // Request retries on network errors
exports.REQUEST_RETRY_DELAY = 5 * 1000; // delay between retries in milliseconds
exports.IGNORES_DEFAULT = [`**/${exports.GIT_FILENAME}/**`];
exports.IGNORE_FILES_NAMES = [exports.GITIGNORE_FILENAME, exports.DCIGNORE_FILENAME, exports.DOTSNYK_FILENAME];
exports.DCIGNORE_DRAFTS = {
custom: dcignore_1.CustomDCIgnore,
default: dcignore_1.DefaultDCIgnore,
};
// eslint-disable-next-line no-shadow
var ErrorCodes;
(function (ErrorCodes) {
ErrorCodes[ErrorCodes["loginInProgress"] = 304] = "loginInProgress";
ErrorCodes[ErrorCodes["badRequest"] = 400] = "badRequest";
ErrorCodes[ErrorCodes["unauthorizedUser"] = 401] = "unauthorizedUser";
ErrorCodes[ErrorCodes["unauthorizedBundleAccess"] = 403] = "unauthorizedBundleAccess";
ErrorCodes[ErrorCodes["notFound"] = 404] = "notFound";
ErrorCodes[ErrorCodes["bigPayload"] = 413] = "bigPayload";
ErrorCodes[ErrorCodes["connectionRefused"] = 421] = "connectionRefused";
ErrorCodes[ErrorCodes["dnsNotFound"] = 452] = "dnsNotFound";
ErrorCodes[ErrorCodes["serverError"] = 500] = "serverError";
ErrorCodes[ErrorCodes["badGateway"] = 502] = "badGateway";
ErrorCodes[ErrorCodes["serviceUnavailable"] = 503] = "serviceUnavailable";
ErrorCodes[ErrorCodes["timeout"] = 504] = "timeout";
})(ErrorCodes = exports.ErrorCodes || (exports.ErrorCodes = {}));
exports.NETWORK_ERRORS = {
ETIMEDOUT: ErrorCodes.timeout,
ECONNREFUSED: ErrorCodes.connectionRefused,
ECONNRESET: ErrorCodes.connectionRefused,
ENETUNREACH: ErrorCodes.connectionRefused,
ENOTFOUND: ErrorCodes.dnsNotFound,
};
exports.DEFAULT_ERROR_MESSAGES = {
[ErrorCodes.serverError]: 'Unexpected server error',
[ErrorCodes.badGateway]: 'Bad gateway',
[ErrorCodes.serviceUnavailable]: 'Service unavailable',
[ErrorCodes.timeout]: 'Timeout occured. Try again later.',
[ErrorCodes.dnsNotFound]: '[Connection issue] Could not resolve domain',
[ErrorCodes.connectionRefused]: '[Connection issue] Connection refused',
[ErrorCodes.loginInProgress]: 'Login has not been confirmed yet',
[ErrorCodes.badRequest]: 'Bad request',
[ErrorCodes.unauthorizedUser]: 'Missing, revoked or inactive token',
[ErrorCodes.unauthorizedBundleAccess]: 'Unauthorized access to requested bundle analysis',
[ErrorCodes.notFound]: 'Not found',
[ErrorCodes.bigPayload]: `Payload too large (max is ${exports.MAX_PAYLOAD}b)`,
};
//# sourceMappingURL=constants.js.map
/***/ }),
/***/ 37544:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.emitter = exports.EmitterDC = void 0;
const events_1 = __webpack_require__(28614);
// eslint-disable-next-line no-shadow
var CUSTOM_EVENTS;
(function (CUSTOM_EVENTS) {
CUSTOM_EVENTS["supportedFilesLoaded"] = "supportedFilesLoaded";
CUSTOM_EVENTS["scanFilesProgress"] = "scanFilesProgress";
CUSTOM_EVENTS["createBundleProgress"] = "createBundleProgress";
CUSTOM_EVENTS["uploadBundleProgress"] = "uploadBundleProgress";
CUSTOM_EVENTS["analyseProgress"] = "analyseProgress";
CUSTOM_EVENTS["apiRequestLog"] = "apiRequestLog";
CUSTOM_EVENTS["error"] = "error";
})(CUSTOM_EVENTS || (CUSTOM_EVENTS = {}));
class EmitterDC extends events_1.EventEmitter {
constructor() {
super(...arguments);
this.events = CUSTOM_EVENTS;
}
supportedFilesLoaded(data) {
this.emit(CUSTOM_EVENTS.supportedFilesLoaded, data);
}
scanFilesProgress(processed) {
this.emit(CUSTOM_EVENTS.scanFilesProgress, processed);
}
createBundleProgress(processed, total) {
this.emit(CUSTOM_EVENTS.createBundleProgress, processed, total);
}
uploadBundleProgress(processed, total) {
this.emit(CUSTOM_EVENTS.uploadBundleProgress, processed, total);
}
analyseProgress(data) {
this.emit(CUSTOM_EVENTS.analyseProgress, data);
}
sendError(error) {
this.emit(CUSTOM_EVENTS.error, error);
}
apiRequestLog(message) {
this.emit(CUSTOM_EVENTS.apiRequestLog, message);
}
}
exports.EmitterDC = EmitterDC;
exports.emitter = new EmitterDC();
//# sourceMappingURL=emitter.js.map
/***/ }),
/***/ 32083:
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.isMatch = exports.composeFilePayloads = exports.resolveBundleFilePath = exports.resolveBundleFiles = exports.getFileInfo = exports.calcHash = exports.getBundleFilePath = exports.prepareExtendingBundle = exports.collectBundleFiles = exports.determineBaseDir = exports.collectIgnoreRules = exports.getGlobPatterns = exports.parseFileIgnores = exports.notEmpty = void 0;
const nodePath = __importStar(__webpack_require__(85622));
const fs = __importStar(__webpack_require__(35747));
const fast_glob_1 = __importDefault(__webpack_require__(8381));
const multimatch_1 = __importDefault(__webpack_require__(23424));
const crypto_1 = __importDefault(__webpack_require__(76417));
const yaml_1 = __webpack_require__(6792);
const lodash_union_1 = __importDefault(__webpack_require__(96744));
const util_1 = __importDefault(__webpack_require__(31669));
const cache_1 = __webpack_require__(27234);
const constants_1 = __webpack_require__(65765);
const isWindows = nodePath.sep === '\\';
const asyncLStat = util_1.default.promisify(fs.lstat);
const lStat = async (path) => {
let fileStats = null;
try {
// eslint-disable-next-line no-await-in-loop
fileStats = await asyncLStat(path);
}
catch (err) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
if (err.code === 'EACCES' || err.code === 'EPERM') {
console.log(`${path} is not accessible. Please check permissions and adjust .dcignore file to not even test this file`);
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
if (err.code === 'ENOENT') {
console.log(`no such file or directory: ${path}`);
}
}
return fileStats;
};
function notEmpty(value) {
return value !== null && value !== undefined;
}
exports.notEmpty = notEmpty;
const multiMatchOptions = { matchBase: true, dot: true };
const fgOptions = {
dot: true,
absolute: true,
baseNameMatch: true,
onlyFiles: true,
suppressErrors: true,
};
function filterSupportedFiles(files, supportedFiles) {
const patters = getGlobPatterns(supportedFiles);
return (0, multimatch_1.default)(files, patters, multiMatchOptions);
}
function parseIgnoreRulesToGlobs(rules, baseDir) {
// Mappings from .gitignore format to glob format:
// `/foo/` => `/foo/**` (meaning: Ignore root (not sub) foo dir and its paths underneath.)
// `/foo` => `/foo/**`, `/foo` (meaning: Ignore root (not sub) file and dir and its paths underneath.)
// `foo/` => `**/foo/**` (meaning: Ignore (root/sub) foo dirs and their paths underneath.)
// `foo` => `**/foo/**`, `foo` (meaning: Ignore (root/sub) foo files and dirs and their paths underneath.)
return rules.reduce((results, rule) => {
let prefix = '';
if (rule.startsWith('!')) {
// eslint-disable-next-line no-param-reassign
rule = rule.substring(1);
prefix = '!';
}
const startingSlash = rule.startsWith('/');
const startingGlobstar = rule.startsWith('**');
const endingSlash = rule.endsWith('/');
const endingGlobstar = rule.endsWith('**');
if (startingSlash || startingGlobstar) {
// case `/foo/`, `/foo` => `{baseDir}/foo/**`
// case `**/foo/`, `**/foo` => `{baseDir}/**/foo/**`
if (!endingGlobstar)
results.push(prefix + nodePath.posix.join(baseDir, rule, '**'));
// case `/foo` => `{baseDir}/foo`
// case `**/foo` => `{baseDir}/**/foo`
// case `/foo/**` => `{baseDir}/foo/**`
// case `**/foo/**` => `{baseDir}/**/foo/**`
if (!endingSlash)
results.push(prefix + nodePath.posix.join(baseDir, rule));
}
else {
// case `foo/`, `foo` => `{baseDir}/**/foo/**`
if (!endingGlobstar)
results.push(prefix + nodePath.posix.join(baseDir, '**', rule, '**'));
// case `foo` => `{baseDir}/**/foo`
// case `foo/**` => `{baseDir}/**/foo/**`
if (!endingSlash)
results.push(prefix + nodePath.posix.join(baseDir, '**', rule));
}
return results;
}, []);
}
function parseFileIgnores(path) {
let rules = [];
const dirname = nodePath.dirname(path);
try {
const f = fs.readFileSync(path, { encoding: 'utf8' });
if (path.includes(constants_1.DOTSNYK_FILENAME)) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const parsed = (0, yaml_1.parse)(f);
const codeIgnoredPaths = parsed.exclude.code || [];
const globalIgnoredPaths = parsed.exclude.global || [];
rules = [...codeIgnoredPaths, ...globalIgnoredPaths];
}
else {
rules = f
.split('\n')
.map(l => l.trim())
.filter(l => !!l && !l.startsWith('#'));
}
}
catch (err) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
if (err.code === 'EACCES' || err.code === 'EPERM') {
console.log(`${path} is not accessible. Please check permissions and adjust .dcignore file to not even test this file`);
}
}
return parseIgnoreRulesToGlobs(rules, dirname);
}
exports.parseFileIgnores = parseFileIgnores;
function getGlobPatterns(supportedFiles) {
return [
...supportedFiles.extensions.map(e => `*${e}`),
...supportedFiles.configFiles.filter(e => !constants_1.IGNORE_FILES_NAMES.includes(e)),
];
}
exports.getGlobPatterns = getGlobPatterns;
async function collectIgnoreRules(dirs, symlinksEnabled = false, fileIgnores = constants_1.IGNORES_DEFAULT) {
const tasks = dirs.map(async (folder) => {
const fileStats = await lStat(folder);
// Check if symlink and exclude if requested
if (!fileStats || (fileStats.isSymbolicLink() && !symlinksEnabled) || fileStats.isFile())
return [];
// Find ignore files inside this directory
const localIgnoreFiles = await (0, fast_glob_1.default)(constants_1.IGNORE_FILES_NAMES.map(i => `*${i}`), {
...fgOptions,
cwd: folder,
followSymbolicLinks: symlinksEnabled,
});
// Read ignore files and merge new patterns
return (0, lodash_union_1.default)(...localIgnoreFiles.map(parseFileIgnores));
});
const customRules = await Promise.all(tasks);
return (0, lodash_union_1.default)(fileIgnores, ...customRules);
}
exports.collectIgnoreRules = collectIgnoreRules;
function determineBaseDir(paths) {
if (paths.length === 1) {
const path = paths[0];
const stats = fs.lstatSync(path);
if (stats.isFile()) {
return nodePath.dirname(path);
}
return path;
}
return '';
}
exports.determineBaseDir = determineBaseDir;
async function* searchFiles(patterns, cwd, symlinksEnabled, ignores) {
const positiveIgnores = ignores.filter(rule => !rule.startsWith('!'));
const negativeIgnores = ignores.filter(rule => rule.startsWith('!')).map(rule => rule.substring(1));
// We need to use the ignore rules directly in the stream. Otherwise we would expand all the branches of the file system
// that should be ignored, leading to performance issues (the parser would look stuck while analyzing each ignored file).
// However, fast-glob doesn't address the negative rules in the ignore option correctly.
// As a compromise between correctness and performance, we split the search in two streams, the first one using the
// extension patterns as a search term and the positive ignore rules in the options, while the second that manually
// expands those branches that should be excluded from the ignore rules throught the negative ignores as search term
// and then matches the extensions as a second step to exclude any file that should not be analyzed.
const positiveSearcher = fast_glob_1.default.stream(patterns, {
...fgOptions,
cwd,
followSymbolicLinks: symlinksEnabled,
ignore: positiveIgnores,
});
for await (const filePath of positiveSearcher) {
yield filePath;
}
// TODO: This is incorrect because the .gitignore format allows to specify exceptions to previous rules, therefore
// the separation between positive and negative ignores is incorrect in a scenario with 2+ exeptions like the one below:
// `node_module/` <= ignores everything in a `node_module` folder and it's relative subfolders
// `!node_module/my_module/` <= excludes the `my_module` subfolder from the ignore
// `node_module/my_module/build/` <= re-includes the `build` subfolder in the ignore
if (negativeIgnores.length) {
const negativeSearcher = fast_glob_1.default.stream(negativeIgnores, {
...fgOptions,
cwd,
followSymbolicLinks: symlinksEnabled,
baseNameMatch: false,
});
for await (const filePath of negativeSearcher) {
if (isMatch(filePath.toString(), patterns.map(p => `**/${p}`)))
yield filePath;
}
}
}
/**
* Returns bundle files from requested paths
* If a file exceeds the maximum file size, it returns a string with its path
* */
async function* collectBundleFiles({ symlinksEnabled = false, baseDir, fileIgnores, paths, supportedFiles, }) {
const cache = new cache_1.Cache(constants_1.CACHE_KEY, baseDir);
const files = [];
const dirs = [];
// Split into directories and files and exclude symlinks if needed
for (const path of paths) {
// eslint-disable-next-line no-await-in-loop
const fileStats = await lStat(path);
// Check if symlink and exclude if requested
if (!fileStats || (fileStats.isSymbolicLink() && !symlinksEnabled))
continue;
if (fileStats.isFile()) {
fileStats.size <= constants_1.MAX_FILE_SIZE ? files.push(path) : yield path;
}
else if (fileStats.isDirectory()) {
dirs.push(path);
}
}
// Scan folders
const globPatterns = getGlobPatterns(supportedFiles);
for (const folder of dirs) {
const searcher = searchFiles(globPatterns, folder, symlinksEnabled, fileIgnores);
// eslint-disable-next-line no-await-in-loop
for await (const filePath of searcher) {
const fileInfo = await getFileInfo(filePath.toString(), baseDir, false, cache);
// dc ignore AttrAccessOnNull: false positive, there is a precondition with &&
if (fileInfo) {
fileInfo.size <= constants_1.MAX_FILE_SIZE ? yield fileInfo : yield fileInfo.bundlePath;
}
}
}
// Sanitize files
if (files.length) {
const searcher = searchFiles(filterSupportedFiles(files, supportedFiles), baseDir, symlinksEnabled, fileIgnores);
for await (const filePath of searcher) {
const fileInfo = await getFileInfo(filePath.toString(), baseDir, false, cache);
// dc ignore AttrAccessOnNull: false positive, there is a precondition with &&
if (fileInfo) {
fileInfo.size <= constants_1.MAX_FILE_SIZE ? yield fileInfo : yield fileInfo.bundlePath;
}
}
}
cache.save();
}
exports.collectBundleFiles = collectBundleFiles;
async function prepareExtendingBundle(baseDir, supportedFiles, fileIgnores = constants_1.IGNORES_DEFAULT, files, symlinksEnabled = false) {
let removedFiles = [];
let bundleFiles = [];
const cache = new cache_1.Cache(constants_1.CACHE_KEY, baseDir);
// Filter for supported extensions/files only
let processingFiles = filterSupportedFiles(files, supportedFiles);
// Exclude files to be ignored based on ignore rules. We assume here, that ignore rules have not been changed.
processingFiles = processingFiles.map(f => resolveBundleFilePath(baseDir, f)).filter(f => !isMatch(f, fileIgnores));
if (processingFiles.length) {
// Determine existing files (minus removed)
if (isWindows) {
processingFiles = processingFiles.map(f => f.replace(/\\/g, '/')); // fg requires forward-slashes in Windows globs
}
const entries = await (0, fast_glob_1.default)(processingFiles, {
...fgOptions,
cwd: baseDir,
followSymbolicLinks: symlinksEnabled,
objectMode: true,
stats: true,
});
let foundFiles = new Set(); // This initialization is needed to help Typescript checker
foundFiles = entries.reduce((s, e) => {
if (e.stats && e.stats.size <= constants_1.MAX_FILE_SIZE) {
s.add(e.path);
}
return s;
}, foundFiles);
removedFiles = processingFiles.reduce((s, p) => {
if (!foundFiles.has(p)) {
s.push(getBundleFilePath(p, baseDir));
}
return s;
}, []);
if (foundFiles.size) {
bundleFiles = (await Promise.all([...foundFiles].map((p) => getFileInfo(p, baseDir, false, cache)))).filter(notEmpty);
}
}
return {
files: bundleFiles,
removedFiles,
};
}
exports.prepareExtendingBundle = prepareExtendingBundle;
function getBundleFilePath(filePath, baseDir) {
const relPath = baseDir ? nodePath.relative(baseDir, filePath) : filePath; // relPath without explicit base makes no sense
const posixPath = !isWindows ? relPath : relPath.replace(/\\/g, '/');
return encodeURI(posixPath);
}
exports.getBundleFilePath = getBundleFilePath;
function calcHash(content) {
return crypto_1.default.createHash(constants_1.HASH_ALGORITHM).update(content).digest(constants_1.ENCODE_TYPE);
}
exports.calcHash = calcHash;
async function getFileInfo(filePath, baseDir, withContent = false, cache = null) {
const fileStats = await lStat(filePath);
if (fileStats === null) {
return fileStats;
}
const bundlePath = getBundleFilePath(filePath, baseDir);
let fileContent = '';
let fileHash = '';
if (!withContent && !!cache) {
// Try to get hash from cache
const cachedData = cache.getKey(filePath);
if (cachedData) {
if (cachedData[0] === fileStats.size && cachedData[1] === fileStats.mtimeMs) {
fileHash = cachedData[2];
}
else {
// console.log(`did not match cache for: ${filePath} | ${cachedData} !== ${[fileStats.size, fileStats.mtime]}`);
}
}
}
if (!fileHash) {
try {
fileContent = fs.readFileSync(filePath, { encoding: 'utf8' });
fileHash = calcHash(fileContent);
cache === null || cache === void 0 ? void 0 : cache.setKey(filePath, [fileStats.size, fileStats.mtimeMs, fileHash]);
}
catch (err) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
if (err.code === 'EACCES' || err.code === 'EPERM') {
console.log(`${filePath} is not accessible. Please check permissions and adjust .dcignore file to not even test this file`);
}
}
}
return {
filePath,
bundlePath,
size: fileStats.size,
hash: fileHash,
content: withContent ? fileContent : undefined,
};
}
exports.getFileInfo = getFileInfo;
async function resolveBundleFiles(baseDir, bundleMissingFiles) {
const cache = new cache_1.Cache('.dccache', baseDir);
const tasks = bundleMissingFiles.map(mf => {
const filePath = resolveBundleFilePath(baseDir, mf);
return getFileInfo(filePath, baseDir, true, cache);
});
const res = (await Promise.all(tasks)).filter(notEmpty);
cache.save(true);
return res;
}
exports.resolveBundleFiles = resolveBundleFiles;
function resolveBundleFilePath(baseDir, bundleFilePath) {
let relPath = bundleFilePath;
if (isWindows) {
relPath = relPath.replace(/\//g, '\\');
}
if (baseDir) {
return nodePath.resolve(baseDir, decodeURI(relPath));
}
return decodeURI(relPath);
}
exports.resolveBundleFilePath = resolveBundleFilePath;
function* composeFilePayloads(files, bucketSize = constants_1.MAX_PAYLOAD) {
const buckets = [{ size: bucketSize, files: [] }];
let bucketIndex = -1;
const getFileDataPayloadSize = (fileData) => { var _a; return (((_a = fileData.content) === null || _a === void 0 ? void 0 : _a.length) || 0) + fileData.bundlePath.length + fileData.hash.length; };
const isLowerSize = (size, fileData) => size >= getFileDataPayloadSize(fileData);
for (const fileData of files) {
// This file is empty or too large to send, it should be skipped.
if (!fileData.size || !isLowerSize(bucketSize, fileData))
continue;
// Find suitable bucket
bucketIndex = buckets.findIndex(b => isLowerSize(b.size, fileData));
if (bucketIndex === -1) {
// Create a new bucket
buckets.push({ size: bucketSize, files: [] });
bucketIndex = buckets.length - 1;
}
buckets[bucketIndex].files.push(fileData);
buckets[bucketIndex].size -= getFileDataPayloadSize(fileData);
if (buckets[bucketIndex].size < bucketSize * 0.01) {
yield buckets[bucketIndex].files; // Give bucket to requester
buckets.splice(bucketIndex); // Remove it as fullfilled
}
}
// Send all left-over buckets
for (const bucket of buckets.filter(b => b.files.length)) {
yield bucket.files;
}
}
exports.composeFilePayloads = composeFilePayloads;
function isMatch(filePath, rules) {
return !!(0, multimatch_1.default)([filePath], rules, { ...multiMatchOptions, matchBase: false }).length;
}
exports.isMatch = isMatch;
//# sourceMappingURL=files.js.map
/***/ }),
/***/ 60776:
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getVerifyCallbackUrl = exports.getAnalysis = exports.AnalysisStatus = exports.extendBundle = exports.checkBundle = exports.createBundle = exports.getFilters = exports.checkSession = exports.getIpFamily = exports.startSession = exports.compressAndEncode = exports.setBase64Encoding = void 0;
const uuid_1 = __webpack_require__(42277);
const lodash_pick_1 = __importDefault(__webpack_require__(25030));
const zlib_1 = __webpack_require__(78761);
const util_1 = __webpack_require__(31669);
const constants_1 = __webpack_require__(65765);
const needle_1 = __webpack_require__(14577);
const url_1 = __webpack_require__(78835);
// The trick to typecast union type alias
function isSubsetErrorCode(code, messages) {
if (code in messages) {
return true;
}
return false;
}
function generateError(errorCode, messages, apiName) {
if (!isSubsetErrorCode(errorCode, messages)) {
throw { errorCode, messages, apiName };
}
const statusCode = errorCode;
const statusText = messages[errorCode];
return {
type: 'error',
error: {
apiName,
statusCode,
statusText,
},
};
}
const GENERIC_ERROR_MESSAGES = {
[constants_1.ErrorCodes.serverError]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.serverError],
[constants_1.ErrorCodes.badGateway]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.badGateway],
[constants_1.ErrorCodes.serviceUnavailable]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.serviceUnavailable],
[constants_1.ErrorCodes.timeout]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.timeout],
[constants_1.ErrorCodes.dnsNotFound]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.dnsNotFound],
[constants_1.ErrorCodes.connectionRefused]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.connectionRefused],
};
function setBase64Encoding(options) {
if (!options.base64Encoding) {
const { hostname } = new url_1.URL(options.baseURL);
const rg = new RegExp('^(|dev.)snyk.io');
if (rg.test(hostname.slice(hostname.indexOf('.') + 1))) {
return options.base64Encoding;
}
else {
return true;
}
}
else {
return options.base64Encoding;
}
}
exports.setBase64Encoding = setBase64Encoding;
async function compressAndEncode(payload) {
// encode payload and compress;
const deflate = (0, util_1.promisify)(zlib_1.gzip);
const compressedPayload = await deflate(Buffer.from(JSON.stringify(payload)).toString('base64'));
return compressedPayload;
}
exports.compressAndEncode = compressAndEncode;
function startSession(options) {
const { source, authHost } = options;
const draftToken = (0, uuid_1.v4)();
return {
draftToken,
loginURL: `${authHost}/login?token=${draftToken}&utm_medium=${source}&utm_source=${source}&utm_campaign=${source}&docker=false`,
};
}
exports.startSession = startSession;
/**
* Dispatches a FORCED IPv6 request to test client's ISP and network capability.
*
* @return {number} IP family number used by the client.
*/
async function getIpFamily(authHost) {
const family = 6;
// Dispatch a FORCED IPv6 request to test client's ISP and network capability
const res = await (0, needle_1.makeRequest)({
url: getVerifyCallbackUrl(authHost),
method: 'post',
family, // family param forces the handler to dispatch a request using IP at "family" version
}, 0);
const ipv6Incompatible = res.error;
return ipv6Incompatible ? undefined : family;
}
exports.getIpFamily = getIpFamily;
const CHECK_SESSION_ERROR_MESSAGES = {
...GENERIC_ERROR_MESSAGES,
[constants_1.ErrorCodes.unauthorizedUser]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.unauthorizedUser],
[constants_1.ErrorCodes.loginInProgress]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.loginInProgress],
};
async function checkSession(options) {
const defaultValue = {
type: 'success',
value: '',
};
const res = await (0, needle_1.makeRequest)({
url: getVerifyCallbackUrl(options.authHost),
body: {
token: options.draftToken,
},
family: options.ipFamily,
method: 'post',
});
if (res.success) {
return { ...defaultValue, value: (res.body.ok && res.body.api) || '' };
}
else if ([constants_1.ErrorCodes.loginInProgress, constants_1.ErrorCodes.badRequest, constants_1.ErrorCodes.unauthorizedUser].includes(res.errorCode)) {
return defaultValue;
}
return generateError(res.errorCode, CHECK_SESSION_ERROR_MESSAGES, 'checkSession');
}
exports.checkSession = checkSession;
async function getFilters(baseURL, source, attempts = constants_1.MAX_RETRY_ATTEMPTS, requestId) {
const apiName = 'filters';
const res = await (0, needle_1.makeRequest)({
headers: { source, ...(requestId && { 'snyk-request-id': requestId }) },
url: `${baseURL}/${apiName}`,
method: 'get',
}, attempts);
if (res.success) {
return { type: 'success', value: res.body };
}
return generateError(res.errorCode, GENERIC_ERROR_MESSAGES, apiName);
}
exports.getFilters = getFilters;
function prepareTokenHeaders(sessionToken) {
return {
'Session-Token': sessionToken,
// We need to be able to test code-client without deepcode locally
Authorization: `Bearer ${sessionToken}`,
};
}
const CREATE_BUNDLE_ERROR_MESSAGES = {
...GENERIC_ERROR_MESSAGES,
[constants_1.ErrorCodes.unauthorizedUser]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.unauthorizedUser],
[constants_1.ErrorCodes.unauthorizedBundleAccess]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.unauthorizedBundleAccess],
[constants_1.ErrorCodes.bigPayload]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.bigPayload],
[constants_1.ErrorCodes.badRequest]: `Request payload doesn't match the specifications`,
[constants_1.ErrorCodes.notFound]: 'Unable to resolve requested oid',
};
async function createBundle(options) {
const base64Encoding = setBase64Encoding(options);
let payloadBody;
if (base64Encoding) {
payloadBody = await compressAndEncode(options.files);
}
else {
payloadBody = options.files;
}
const payload = {
headers: {
...prepareTokenHeaders(options.sessionToken),
source: options.source,
...(options.requestId && { 'snyk-request-id': options.requestId }),
...(base64Encoding ? { 'content-type': 'application/octet-stream', 'content-encoding': 'gzip' } : null),
},
url: `${options.baseURL}/bundle`,
method: 'post',
body: payloadBody,
isJson: base64Encoding ? false : true,
};
const res = await (0, needle_1.makeRequest)(payload);
if (res.success) {
return { type: 'success', value: res.body };
}
return generateError(res.errorCode, CREATE_BUNDLE_ERROR_MESSAGES, 'createBundle');
}
exports.createBundle = createBundle;
const CHECK_BUNDLE_ERROR_MESSAGES = {
...GENERIC_ERROR_MESSAGES,
[constants_1.ErrorCodes.unauthorizedUser]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.unauthorizedUser],
[constants_1.ErrorCodes.unauthorizedBundleAccess]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.unauthorizedBundleAccess],
[constants_1.ErrorCodes.notFound]: 'Uploaded bundle has expired',
};
async function checkBundle(options) {
const res = await (0, needle_1.makeRequest)({
headers: {
...prepareTokenHeaders(options.sessionToken),
source: options.source,
...(options.requestId && { 'snyk-request-id': options.requestId }),
},
url: `${options.baseURL}/bundle/${options.bundleHash}`,
method: 'get',
});
if (res.success)
return { type: 'success', value: res.body };
return generateError(res.errorCode, CHECK_BUNDLE_ERROR_MESSAGES, 'checkBundle');
}
exports.checkBundle = checkBundle;
const EXTEND_BUNDLE_ERROR_MESSAGES = {
...GENERIC_ERROR_MESSAGES,
[constants_1.ErrorCodes.unauthorizedUser]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.unauthorizedUser],
[constants_1.ErrorCodes.bigPayload]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.bigPayload],
[constants_1.ErrorCodes.badRequest]: `Bad request`,
[constants_1.ErrorCodes.unauthorizedBundleAccess]: 'Unauthorized access to parent bundle',
[constants_1.ErrorCodes.notFound]: 'Parent bundle has expired',
};
async function extendBundle(options) {
const base64Encoding = setBase64Encoding(options);
let payloadBody;
if (base64Encoding) {
payloadBody = await compressAndEncode((0, lodash_pick_1.default)(options, ['files', 'removedFiles']));
}
else {
payloadBody = (0, lodash_pick_1.default)(options, ['files', 'removedFiles']);
}
const res = await (0, needle_1.makeRequest)({
headers: {
...prepareTokenHeaders(options.sessionToken),
source: options.source,
...(options.requestId && { 'snyk-request-id': options.requestId }),
...(base64Encoding ? { 'content-type': 'application/octet-stream', 'content-encoding': 'gzip' } : null),
},
url: `${options.baseURL}/bundle/${options.bundleHash}`,
method: 'put',
body: payloadBody,
isJson: base64Encoding ? false : true,
});
if (res.success)
return { type: 'success', value: res.body };
return generateError(res.errorCode, EXTEND_BUNDLE_ERROR_MESSAGES, 'extendBundle');
}
exports.extendBundle = extendBundle;
// eslint-disable-next-line no-shadow
var AnalysisStatus;
(function (AnalysisStatus) {
AnalysisStatus["waiting"] = "WAITING";
AnalysisStatus["fetching"] = "FETCHING";
AnalysisStatus["analyzing"] = "ANALYZING";
AnalysisStatus["done"] = "DONE";
AnalysisStatus["failed"] = "FAILED";
AnalysisStatus["complete"] = "COMPLETE";
})(AnalysisStatus = exports.AnalysisStatus || (exports.AnalysisStatus = {}));
const GET_ANALYSIS_ERROR_MESSAGES = {
...GENERIC_ERROR_MESSAGES,
[constants_1.ErrorCodes.unauthorizedUser]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.unauthorizedUser],
[constants_1.ErrorCodes.unauthorizedBundleAccess]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.unauthorizedBundleAccess],
[constants_1.ErrorCodes.notFound]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.notFound],
[constants_1.ErrorCodes.badRequest]: constants_1.DEFAULT_ERROR_MESSAGES[constants_1.ErrorCodes.badRequest],
[constants_1.ErrorCodes.serverError]: 'Getting analysis failed',
};
async function getAnalysis(options) {
const config = {
headers: {
...prepareTokenHeaders(options.sessionToken),
source: options.source,
...(options.requestId && { 'snyk-request-id': options.requestId }),
},
url: `${options.baseURL}/analysis`,
method: 'post',
body: {
key: {
type: 'file',
hash: options.bundleHash,
limitToFiles: options.limitToFiles || [],
...(options.shard ? { shard: options.shard } : null),
},
...(0, lodash_pick_1.default)(options, ['severity', 'prioritized', 'legacy', 'analysisContext']),
},
};
const res = await (0, needle_1.makeRequest)(config);
if (res.success)
return { type: 'success', value: res.body };
return generateError(res.errorCode, GET_ANALYSIS_ERROR_MESSAGES, 'getAnalysis');
}
exports.getAnalysis = getAnalysis;
function getVerifyCallbackUrl(authHost) {
return `${authHost}/api/verify/callback`;
}
exports.getVerifyCallbackUrl = getVerifyCallbackUrl;
//# sourceMappingURL=http.js.map
/***/ }),
/***/ 95951:
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getIpFamily = exports.checkSession = exports.startSession = exports.getAnalysis = exports.AnalysisSeverity = exports.constants = exports.MAX_FILE_SIZE = exports.emitter = exports.extendAnalysis = exports.createBundleFromFolders = exports.analyzeFolders = exports.getGlobPatterns = void 0;
const analysis_1 = __webpack_require__(54151);
Object.defineProperty(exports, "analyzeFolders", ({ enumerable: true, get: function () { return analysis_1.analyzeFolders; } }));
Object.defineProperty(exports, "extendAnalysis", ({ enumerable: true, get: function () { return analysis_1.extendAnalysis; } }));
const bundles_1 = __webpack_require__(86639);
Object.defineProperty(exports, "createBundleFromFolders", ({ enumerable: true, get: function () { return bundles_1.createBundleFromFolders; } }));
const emitter_1 = __webpack_require__(37544);
Object.defineProperty(exports, "emitter", ({ enumerable: true, get: function () { return emitter_1.emitter; } }));
const http_1 = __webpack_require__(60776);
Object.defineProperty(exports, "startSession", ({ enumerable: true, get: function () { return http_1.startSession; } }));
Object.defineProperty(exports, "checkSession", ({ enumerable: true, get: function () { return http_1.checkSession; } }));
Object.defineProperty(exports, "getAnalysis", ({ enumerable: true, get: function () { return http_1.getAnalysis; } }));
Object.defineProperty(exports, "getIpFamily", ({ enumerable: true, get: function () { return http_1.getIpFamily; } }));
const constants_1 = __webpack_require__(65765);
Object.defineProperty(exports, "MAX_FILE_SIZE", ({ enumerable: true, get: function () { return constants_1.MAX_FILE_SIZE; } }));
const constants = __importStar(__webpack_require__(65765));
exports.constants = constants;
const files_1 = __webpack_require__(32083);
Object.defineProperty(exports, "getGlobPatterns", ({ enumerable: true, get: function () { return files_1.getGlobPatterns; } }));
const analysis_options_interface_1 = __webpack_require__(4775);
Object.defineProperty(exports, "AnalysisSeverity", ({ enumerable: true, get: function () { return analysis_options_interface_1.AnalysisSeverity; } }));
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 4775:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.AnalysisSeverity = void 0;
// eslint-disable-next-line import/prefer-default-export, no-shadow
var AnalysisSeverity;
(function (AnalysisSeverity) {
AnalysisSeverity[AnalysisSeverity["info"] = 1] = "info";
AnalysisSeverity[AnalysisSeverity["warning"] = 2] = "warning";
AnalysisSeverity[AnalysisSeverity["critical"] = 3] = "critical";
})(AnalysisSeverity = exports.AnalysisSeverity || (exports.AnalysisSeverity = {}));
//# sourceMappingURL=analysis-options.interface.js.map
/***/ }),
/***/ 14577:
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.makeRequest = void 0;
/* eslint-disable camelcase */
const http_1 = __importDefault(__webpack_require__(98605));
const needle_1 = __importDefault(__webpack_require__(11642));
const querystring = __importStar(__webpack_require__(71191));
const https_1 = __importDefault(__webpack_require__(57211));
const url_1 = __webpack_require__(78835);
const emitter_1 = __webpack_require__(37544);
const constants_1 = __webpack_require__(65765);
const sleep = (duration) => new Promise(resolve => setTimeout(resolve, duration));
const TIMEOUT_DEFAULT = 600000;
const agentOptions = {
keepAlive: true,
keepAliveMsecs: 1000,
maxSockets: 256,
maxFreeSockets: 256,
freeSocketTimeout: 60000,
socketActiveTTL: 1000 * 60 * 10,
rejectUnauthorized: !global.ignoreUnknownCA,
};
async function makeRequest(payload, attempts = constants_1.MAX_RETRY_ATTEMPTS) {
let data;
if (!payload.isJson && payload.body) {
data = payload.body;
}
else {
data = JSON.stringify(payload.body);
}
const parsedUrl = new url_1.URL(payload.url);
const agent = parsedUrl.protocol === 'http:' ? new http_1.default.Agent(agentOptions) : new https_1.default.Agent(agentOptions);
const method = (payload.method || 'get').toLowerCase();
let { url } = payload;
if (payload.qs) {
// Parse the URL and append the search part - this will take care of adding the '/?' part if it's missing
const urlObject = new url_1.URL(url);
urlObject.search = querystring.stringify(payload.qs);
url = urlObject.toString();
delete payload.qs;
}
const options = {
headers: payload.headers,
open_timeout: TIMEOUT_DEFAULT,
response_timeout: payload.timeout || TIMEOUT_DEFAULT,
read_timeout: payload.timeout || TIMEOUT_DEFAULT,
family: payload.family,
json: payload.isJson || true,
compressed: true,
follow_max: 5,
rejectUnauthorized: !global.ignoreUnknownCA,
agent,
};
emitter_1.emitter.apiRequestLog(`=> HTTP ${method === null || method === void 0 ? void 0 : method.toUpperCase()} ${url} ${data !== null && data !== void 0 ? data : ''}`.slice(0, 399));
do {
let errorCode;
let error;
let response;
try {
response = await (0, needle_1.default)(method, url, data, options);
emitter_1.emitter.apiRequestLog(`<= Response: ${response.statusCode} ${JSON.stringify(response.body)}`);
const success = !!(response.statusCode && response.statusCode >= 200 && response.statusCode < 300);
if (success)
return { success, body: response.body };
errorCode = response.statusCode;
}
catch (err) {
error = err; // do not swallow the error, pass further to the caller instead
errorCode = constants_1.NETWORK_ERRORS[err.code || err.errno];
emitter_1.emitter.apiRequestLog(`Requested url --> ${url} | error --> ${err}`);
}
errorCode = errorCode !== null && errorCode !== void 0 ? errorCode : constants_1.ErrorCodes.serviceUnavailable;
// Try to avoid breaking requests due to temporary network errors
if (attempts > 1 &&
[
constants_1.ErrorCodes.serviceUnavailable,
constants_1.ErrorCodes.badGateway,
constants_1.ErrorCodes.connectionRefused,
constants_1.ErrorCodes.timeout,
constants_1.ErrorCodes.dnsNotFound,
constants_1.ErrorCodes.serverError,
].includes(errorCode)) {
attempts--;
await sleep(constants_1.REQUEST_RETRY_DELAY);
}
else {
attempts = 0;
return { success: false, errorCode, error };
}
} while (attempts > 0);
return { success: false, errorCode: constants_1.ErrorCodes.serviceUnavailable, error: undefined };
}
exports.makeRequest = makeRequest;
//# sourceMappingURL=needle.js.map
/***/ }),
/***/ 59642:
/***/ ((module, exports, __webpack_require__) => {
"use strict";
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
/* eslint-env browser */
/**
* This is the web browser implementation of `debug()`.
*/
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.storage = localstorage();
/**
* Colors.
*/
exports.colors = ['#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33'];
/**
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
* and the Firebug extension (any Firefox version) are known
* to support "%c" CSS customizations.
*
* TODO: add a `localStorage` variable to explicitly enable/disable colors
*/
// eslint-disable-next-line complexity
function useColors() {
// NB: In an Electron preload script, document will be defined but not fully
// initialized. Since we know we're in Chrome, we'll just detect this case
// explicitly
if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
return true;
} // Internet Explorer and Edge do not support colors.
if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
return false;
} // Is webkit? http://stackoverflow.com/a/16459606/376773
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
return typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773
typeof window !== 'undefined' && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31?
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker
typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/);
}
/**
* Colorize log arguments if enabled.
*
* @api public
*/
function formatArgs(args) {
args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff);
if (!this.useColors) {
return;
}
var c = 'color: ' + this.color;
args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other
// arguments passed either before or after the %c, so we need to
// figure out the correct index to insert the CSS into
var index = 0;
var lastC = 0;
args[0].replace(/%[a-zA-Z%]/g, function (match) {
if (match === '%%') {
return;
}
index++;
if (match === '%c') {
// We only are interested in the *last* %c
// (the user may have provided their own)
lastC = index;
}
});
args.splice(lastC, 0, c);
}
/**
* Invokes `console.log()` when available.
* No-op when `console.log` is not a "function".
*
* @api public
*/
function log() {
var _console;
// This hackery is required for IE8/9, where
// the `console.log` function doesn't have 'apply'
return (typeof console === "undefined" ? "undefined" : _typeof(console)) === 'object' && console.log && (_console = console).log.apply(_console, arguments);
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
try {
if (namespaces) {
exports.storage.setItem('debug', namespaces);
} else {
exports.storage.removeItem('debug');
}
} catch (error) {// Swallow
// XXX (@Qix-) should we be logging these?
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
var r;
try {
r = exports.storage.getItem('debug');
} catch (error) {} // Swallow
// XXX (@Qix-) should we be logging these?
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
if (!r && typeof process !== 'undefined' && 'env' in process) {
r = process.env.DEBUG;
}
return r;
}
/**
* Localstorage attempts to return the localstorage.
*
* This is necessary because safari throws
* when a user disables cookies/localstorage
* and you attempt to access it.
*
* @return {LocalStorage}
* @api private
*/
function localstorage() {
try {
// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
// The Browser also has localStorage in the global context.
return localStorage;
} catch (error) {// Swallow
// XXX (@Qix-) should we be logging these?
}
}
module.exports = __webpack_require__(26711)(exports);
var formatters = module.exports.formatters;
/**
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
*/
formatters.j = function (v) {
try {
return JSON.stringify(v);
} catch (error) {
return '[UnexpectedJSONParseError]: ' + error.message;
}
};
/***/ }),
/***/ 26711:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*/
function setup(env) {
createDebug.debug = createDebug;
createDebug.default = createDebug;
createDebug.coerce = coerce;
createDebug.disable = disable;
createDebug.enable = enable;
createDebug.enabled = enabled;
createDebug.humanize = __webpack_require__(57824);
Object.keys(env).forEach(function (key) {
createDebug[key] = env[key];
});
/**
* Active `debug` instances.
*/
createDebug.instances = [];
/**
* The currently active debug mode names, and names to skip.
*/
createDebug.names = [];
createDebug.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
createDebug.formatters = {};
/**
* Selects a color for a debug namespace
* @param {String} namespace The namespace string for the for the debug instance to be colored
* @return {Number|String} An ANSI color code for the given namespace
* @api private
*/
function selectColor(namespace) {
var hash = 0;
for (var i = 0; i < namespace.length; i++) {
hash = (hash << 5) - hash + namespace.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
}
createDebug.selectColor = selectColor;
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug(namespace) {
var prevTime;
function debug() {
// Disabled?
if (!debug.enabled) {
return;
}
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
var self = debug; // Set `diff` timestamp
var curr = Number(new Date());
var ms = curr - (prevTime || curr);
self.diff = ms;
self.prev = prevTime;
self.curr = curr;
prevTime = curr;
args[0] = createDebug.coerce(args[0]);
if (typeof args[0] !== 'string') {
// Anything else let's inspect with %O
args.unshift('%O');
} // Apply any `formatters` transformations
var index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, function (match, format) {
// If we encounter an escaped % then don't increase the array index
if (match === '%%') {
return match;
}
index++;
var formatter = createDebug.formatters[format];
if (typeof formatter === 'function') {
var val = args[index];
match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format`
args.splice(index, 1);
index--;
}
return match;
}); // Apply env-specific formatting (colors, etc.)
createDebug.formatArgs.call(self, args);
var logFn = self.log || createDebug.log;
logFn.apply(self, args);
}
debug.namespace = namespace;
debug.enabled = createDebug.enabled(namespace);
debug.useColors = createDebug.useColors();
debug.color = selectColor(namespace);
debug.destroy = destroy;
debug.extend = extend; // Debug.formatArgs = formatArgs;
// debug.rawLog = rawLog;
// env-specific initialization logic for debug instances
if (typeof createDebug.init === 'function') {
createDebug.init(debug);
}
createDebug.instances.push(debug);
return debug;
}
function destroy() {
var index = createDebug.instances.indexOf(this);
if (index !== -1) {
createDebug.instances.splice(index, 1);
return true;
}
return false;
}
function extend(namespace, delimiter) {
return createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable(namespaces) {
createDebug.save(namespaces);
createDebug.names = [];
createDebug.skips = [];
var i;
var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
var len = split.length;
for (i = 0; i < len; i++) {
if (!split[i]) {
// ignore empty strings
continue;
}
namespaces = split[i].replace(/\*/g, '.*?');
if (namespaces[0] === '-') {
createDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));
} else {
createDebug.names.push(new RegExp('^' + namespaces + '$'));
}
}
for (i = 0; i < createDebug.instances.length; i++) {
var instance = createDebug.instances[i];
instance.enabled = createDebug.enabled(instance.namespace);
}
}
/**
* Disable debug output.
*
* @api public
*/
function disable() {
createDebug.enable('');
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled(name) {
if (name[name.length - 1] === '*') {
return true;
}
var i;
var len;
for (i = 0, len = createDebug.skips.length; i < len; i++) {
if (createDebug.skips[i].test(name)) {
return false;
}
}
for (i = 0, len = createDebug.names.length; i < len; i++) {
if (createDebug.names[i].test(name)) {
return true;
}
}
return false;
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce(val) {
if (val instanceof Error) {
return val.stack || val.message;
}
return val;
}
createDebug.enable(createDebug.load());
return createDebug;
}
module.exports = setup;
/***/ }),
/***/ 42781:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
/**
* Detect Electron renderer / nwjs process, which is node, but we should
* treat as a browser.
*/
if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {
module.exports = __webpack_require__(59642);
} else {
module.exports = __webpack_require__(52937);
}
/***/ }),
/***/ 52937:
/***/ ((module, exports, __webpack_require__) => {
"use strict";
/**
* Module dependencies.
*/
var tty = __webpack_require__(33867);
var util = __webpack_require__(31669);
/**
* This is the Node.js implementation of `debug()`.
*/
exports.init = init;
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
/**
* Colors.
*/
exports.colors = [6, 2, 3, 4, 5, 1];
try {
// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)
// eslint-disable-next-line import/no-extraneous-dependencies
var supportsColor = __webpack_require__(92130);
if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
exports.colors = [20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221];
}
} catch (error) {} // Swallow - we only care if `supports-color` is available; it doesn't have to be.
/**
* Build up the default `inspectOpts` object from the environment variables.
*
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
exports.inspectOpts = Object.keys(process.env).filter(function (key) {
return /^debug_/i.test(key);
}).reduce(function (obj, key) {
// Camel-case
var prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, function (_, k) {
return k.toUpperCase();
}); // Coerce string value into JS value
var val = process.env[key];
if (/^(yes|on|true|enabled)$/i.test(val)) {
val = true;
} else if (/^(no|off|false|disabled)$/i.test(val)) {
val = false;
} else if (val === 'null') {
val = null;
} else {
val = Number(val);
}
obj[prop] = val;
return obj;
}, {});
/**
* Is stdout a TTY? Colored output is enabled when `true`.
*/
function useColors() {
return 'colors' in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd);
}
/**
* Adds ANSI color escape codes if enabled.
*
* @api public
*/
function formatArgs(args) {
var name = this.namespace,
useColors = this.useColors;
if (useColors) {
var c = this.color;
var colorCode = "\x1B[3" + (c < 8 ? c : '8;5;' + c);
var prefix = " ".concat(colorCode, ";1m").concat(name, " \x1B[0m");
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + "\x1B[0m");
} else {
args[0] = getDate() + name + ' ' + args[0];
}
}
function getDate() {
if (exports.inspectOpts.hideDate) {
return '';
}
return new Date().toISOString() + ' ';
}
/**
* Invokes `util.format()` with the specified arguments and writes to stderr.
*/
function log() {
return process.stderr.write(util.format.apply(util, arguments) + '\n');
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
if (namespaces) {
process.env.DEBUG = namespaces;
} else {
// If you set a process.env field to null or undefined, it gets cast to the
// string 'null' or 'undefined'. Just delete instead.
delete process.env.DEBUG;
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
return process.env.DEBUG;
}
/**
* Init logic for `debug` instances.
*
* Create a new `inspectOpts` object in case `useColors` is set
* differently for a particular `debug` instance.
*/
function init(debug) {
debug.inspectOpts = {};
var keys = Object.keys(exports.inspectOpts);
for (var i = 0; i < keys.length; i++) {
debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
}
}
module.exports = __webpack_require__(26711)(exports);
var formatters = module.exports.formatters;
/**
* Map %o to `util.inspect()`, all on a single line.
*/
formatters.o = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts)
.split('\n')
.map(function (str) { return str.trim(); })
.join(' ');
};
/**
* Map %O to `util.inspect()`, allowing multiple lines if needed.
*/
formatters.O = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts);
};
/***/ }),
/***/ 2662:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var createHash = __webpack_require__(76417).createHash;
function get_header(header, credentials, opts) {
var type = header.split(' ')[0],
user = credentials[0],
pass = credentials[1];
if (type == 'Digest') {
return digest.generate(header, user, pass, opts.method, opts.path);
} else if (type == 'Basic') {
return basic(user, pass);
}
}
////////////////////
// basic
function md5(string) {
return createHash('md5').update(string).digest('hex');
}
function basic(user, pass) {
var str = typeof pass == 'undefined' ? user : [user, pass].join(':');
return 'Basic ' + Buffer.from(str).toString('base64');
}
////////////////////
// digest
// logic inspired from https://github.com/simme/node-http-digest-client
var digest = {};
digest.parse_header = function(header) {
var challenge = {},
matches = header.match(/([a-z0-9_-]+)="?([a-z0-9_=\/\.@\s-\+)()]+)"?/gi);
for (var i = 0, l = matches.length; i < l; i++) {
var parts = matches[i].split('='),
key = parts.shift(),
val = parts.join('=').replace(/^"/, '').replace(/"$/, '');
challenge[key] = val;
}
return challenge;
}
digest.update_nc = function(nc) {
var max = 99999999;
nc++;
if (nc > max)
nc = 1;
var padding = new Array(8).join('0') + '';
nc = nc + '';
return padding.substr(0, 8 - nc.length) + nc;
}
digest.generate = function(header, user, pass, method, path) {
var nc = 1,
cnonce = null,
challenge = digest.parse_header(header);
var ha1 = md5(user + ':' + challenge.realm + ':' + pass),
ha2 = md5(method.toUpperCase() + ':' + path),
resp = [ha1, challenge.nonce];
if (typeof challenge.qop === 'string') {
cnonce = md5(Math.random().toString(36)).substr(0, 8);
nc = digest.update_nc(nc);
resp = resp.concat(nc, cnonce);
resp = resp.concat(challenge.qop, ha2);
} else {
resp = resp.concat(ha2);
}
var params = {
uri : path,
realm : challenge.realm,
nonce : challenge.nonce,
username : user,
response : md5(resp.join(':'))
}
if (challenge.qop) {
params.qop = challenge.qop;
}
if (challenge.opaque) {
params.opaque = challenge.opaque;
}
if (cnonce) {
params.nc = nc;
params.cnonce = cnonce;
}
header = []
for (var k in params)
header.push(k + '="' + params[k] + '"')
return 'Digest ' + header.join(', ');
}
module.exports = {
header : get_header,
basic : basic,
digest : digest.generate
}
/***/ }),
/***/ 80284:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
// Simple cookie handling implementation based on the standard RFC 6265.
//
// This module just has two functionalities:
// - Parse a set-cookie-header as a key value object
// - Write a cookie-string from a key value object
//
// All cookie attributes are ignored.
var unescape = __webpack_require__(71191).unescape;
var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/;
var EXCLUDED_CHARS = /[\x00-\x1F\x7F\x3B\x3B\s\"\,\\"%]/g;
var TRAILING_SEMICOLON = /\x3B+$/;
var SEP_SEMICOLON = /\s*\x3B\s*/;
// i know these should be 'const', but I'd like to keep
// supporting earlier node.js versions as long as I can. :)
var KEY_INDEX = 1; // index of key from COOKIE_PAIR match
var VALUE_INDEX = 3; // index of value from COOKIE_PAIR match
// Returns a copy str trimmed and without trainling semicolon.
function cleanCookieString(str) {
return str.trim().replace(/\x3B+$/, '');
}
function getFirstPair(str) {
var index = str.indexOf('\x3B');
return index === -1 ? str : str.substr(0, index);
}
// Returns a encoded copy of str based on RFC6265 S4.1.1.
function encodeCookieComponent(str) {
return str.toString().replace(EXCLUDED_CHARS, encodeURIComponent);
}
// Parses a set-cookie-string based on the standard defined in RFC6265 S4.1.1.
function parseSetCookieString(str) {
str = cleanCookieString(str);
str = getFirstPair(str);
var res = COOKIE_PAIR.exec(str);
if (!res || !res[VALUE_INDEX]) return null;
return {
name : unescape(res[KEY_INDEX]),
value : unescape(res[VALUE_INDEX])
};
}
// Parses a set-cookie-header and returns a key/value object.
// Each key represents the name of a cookie.
function parseSetCookieHeader(header) {
if (!header) return {};
header = Array.isArray(header) ? header : [header];
return header.reduce(function(res, str) {
var cookie = parseSetCookieString(str);
if (cookie) res[cookie.name] = cookie.value;
return res;
}, {});
}
// Writes a set-cookie-string based on the standard definded in RFC6265 S4.1.1.
function writeCookieString(obj) {
return Object.keys(obj).reduce(function(str, name) {
var encodedName = encodeCookieComponent(name);
var encodedValue = encodeCookieComponent(obj[name]);
str += (str ? '; ' : '') + encodedName + '=' + encodedValue;
return str;
}, '');
}
// returns a key/val object from an array of cookie strings
exports.read = parseSetCookieHeader;
// writes a cookie string header
exports.write = writeCookieString;
/***/ }),
/***/ 64509:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var iconv,
inherits = __webpack_require__(31669).inherits,
stream = __webpack_require__(92413);
var regex = /(?:charset|encoding)\s*=\s*['"]? *([\w\-]+)/i;
inherits(StreamDecoder, stream.Transform);
function StreamDecoder(charset) {
if (!(this instanceof StreamDecoder))
return new StreamDecoder(charset);
stream.Transform.call(this, charset);
this.charset = charset;
this.parsed_chunk = false;
}
StreamDecoder.prototype._transform = function(chunk, encoding, done) {
var res, found;
// try get charset from chunk, just once
if (this.charset == 'utf8' && !this.parsed_chunk) {
this.parsed_chunk = true;
var matches = regex.exec(chunk.toString());
if (matches) {
found = matches[1].toLowerCase();
this.charset = found == 'utf-8' ? 'utf8' : found;
}
}
try {
res = iconv.decode(chunk, this.charset);
} catch(e) { // something went wrong, just return original chunk
res = chunk;
}
this.push(res);
done();
}
module.exports = function(charset) {
try {
if (!iconv) iconv = __webpack_require__(4914);
} catch(e) {
/* iconv not found */
}
if (iconv)
return new StreamDecoder(charset);
else
return new stream.PassThrough;
}
/***/ }),
/***/ 35679:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
var readFile = __webpack_require__(35747).readFile,
basename = __webpack_require__(85622).basename;
exports.build = function(data, boundary, callback) {
if (typeof data != 'object' || typeof data.pipe == 'function')
return callback(new Error('Multipart builder expects data as key/val object.'));
var body = '',
object = flatten(data),
count = Object.keys(object).length;
if (count === 0)
return callback(new Error('Empty multipart body. Invalid data.'))
function done(err, section) {
if (err) return callback(err);
if (section) body += section;
--count || callback(null, body + '--' + boundary + '--');
};
for (var key in object) {
var value = object[key];
if (value === null || typeof value == 'undefined') {
done();
} else if (Buffer.isBuffer(value)) {
var part = { buffer: value, content_type: 'application/octet-stream' };
generate_part(key, part, boundary, done);
} else {
var part = (value.buffer || value.file || value.content_type) ? value : { value: value };
generate_part(key, part, boundary, done);
}
}
}
function generate_part(name, part, boundary, callback) {
var return_part = '--' + boundary + '\r\n';
return_part += 'Content-Disposition: form-data; name="' + name + '"';
function append(data, filename) {
if (data) {
var binary = part.content_type.indexOf('text') == -1;
return_part += '; filename="' + encodeURIComponent(filename) + '"\r\n';
if (binary) return_part += 'Content-Transfer-Encoding: binary\r\n';
return_part += 'Content-Type: ' + part.content_type + '\r\n\r\n';
return_part += binary ? data.toString('binary') : data.toString('utf8');
}
callback(null, return_part + '\r\n');
};
if ((part.file || part.buffer) && part.content_type) {
var filename = part.filename ? part.filename : part.file ? basename(part.file) : name;
if (part.buffer) return append(part.buffer, filename);
readFile(part.file, function(err, data) {
if (err) return callback(err);
append(data, filename);
});
} else {
if (typeof part.value == 'object')
return callback(new Error('Object received for ' + name + ', expected string.'))
if (part.content_type) {
return_part += '\r\n';
return_part += 'Content-Type: ' + part.content_type;
}
return_part += '\r\n\r\n';
return_part += Buffer.from(String(part.value), 'utf8').toString('binary');
append();
}
}
// flattens nested objects for multipart body
function flatten(object, into, prefix) {
into = into || {};
for(var key in object) {
var prefix_key = prefix ? prefix + '[' + key + ']' : key;
var prop = object[key];
if (prop && typeof prop === 'object' && !(prop.buffer || prop.file || prop.content_type))
flatten(prop, into, prefix_key)
else
into[prefix_key] = prop;
}
return into;
}
/***/ }),
/***/ 11642:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
//////////////////////////////////////////
// Needle -- HTTP Client for Node.js
// Written by Tomás Pollak
// (c) 2012-2020 - Fork Ltd.
// MIT Licensed
//////////////////////////////////////////
var fs = __webpack_require__(35747),
http = __webpack_require__(98605),
https = __webpack_require__(57211),
url = __webpack_require__(78835),
stream = __webpack_require__(92413),
debug = __webpack_require__(42781)('needle'),
stringify = __webpack_require__(28280)/* .build */ .J,
multipart = __webpack_require__(35679),
auth = __webpack_require__(2662),
cookies = __webpack_require__(80284),
parsers = __webpack_require__(50722),
decoder = __webpack_require__(64509);
//////////////////////////////////////////
// variabilia
var version = __webpack_require__(6072)/* .version */ .i8;
var user_agent = 'Needle/' + version;
user_agent += ' (Node.js ' + process.version + '; ' + process.platform + ' ' + process.arch + ')';
var tls_options = 'agent pfx key passphrase cert ca ciphers rejectUnauthorized secureProtocol checkServerIdentity family';
// older versions of node (< 0.11.4) prevent the runtime from exiting
// because of connections in keep-alive state. so if this is the case
// we'll default new requests to set a Connection: close header.
var close_by_default = !http.Agent || http.Agent.defaultMaxSockets != Infinity;
// see if we have Object.assign. otherwise fall back to util._extend
var extend = Object.assign ? Object.assign : __webpack_require__(31669)._extend;
// these are the status codes that Needle interprets as redirects.
var redirect_codes = [301, 302, 303, 307, 308];
//////////////////////////////////////////
// decompressors for gzip/deflate/br bodies
function bind_opts(fn, options) {
return fn.bind(null, options);
}
var decompressors = {};
try {
var zlib = __webpack_require__(78761);
// Enable Z_SYNC_FLUSH to avoid Z_BUF_ERROR errors (Node PR #2595)
var zlib_options = {
flush: zlib.Z_SYNC_FLUSH,
finishFlush: zlib.Z_SYNC_FLUSH
};
var br_options = {
flush: zlib.BROTLI_OPERATION_FLUSH,
finishFlush: zlib.BROTLI_OPERATION_FLUSH
};
decompressors['x-deflate'] = bind_opts(zlib.Inflate, zlib_options);
decompressors['deflate'] = bind_opts(zlib.Inflate, zlib_options);
decompressors['x-gzip'] = bind_opts(zlib.Gunzip, zlib_options);
decompressors['gzip'] = bind_opts(zlib.Gunzip, zlib_options);
if (typeof zlib.BrotliDecompress === 'function') {
decompressors['br'] = bind_opts(zlib.BrotliDecompress, br_options);
}
} catch(e) { /* zlib not available */ }
//////////////////////////////////////////
// options and aliases
var defaults = {
// data
boundary : '--------------------NODENEEDLEHTTPCLIENT',
encoding : 'utf8',
parse_response : 'all', // same as true. valid options: 'json', 'xml' or false/null
proxy : null,
// headers
headers : {},
accept : '*/*',
user_agent : user_agent,
// numbers
open_timeout : 10000,
response_timeout : 0,
read_timeout : 0,
follow_max : 0,
stream_length : -1,
// booleans
compressed : false,
decode_response : true,
parse_cookies : true,
follow_set_cookies : false,
follow_set_referer : false,
follow_keep_method : false,
follow_if_same_host : false,
follow_if_same_protocol : false,
follow_if_same_location : false
}
var aliased = {
options: {
decode : 'decode_response',
parse : 'parse_response',
timeout : 'open_timeout',
follow : 'follow_max'
},
inverted: {}
}
// only once, invert aliased keys so we can get passed options.
Object.keys(aliased.options).map(function(k) {
var value = aliased.options[k];
aliased.inverted[value] = k;
});
//////////////////////////////////////////
// helpers
function keys_by_type(type) {
return Object.keys(defaults).map(function(el) {
if (defaults[el] !== null && defaults[el].constructor == type)
return el;
}).filter(function(el) { return el })
}
function parse_content_type(header) {
if (!header || header === '') return {};
var found, charset = 'utf8', arr = header.split(';');
if (arr.length > 1 && (found = arr[1].match(/charset=(.+)/)))
charset = found[1];
return { type: arr[0], charset: charset };
}
function is_stream(obj) {
return typeof obj.pipe === 'function';
}
function get_stream_length(stream, given_length, cb) {
if (given_length > 0)
return cb(given_length);
if (stream.end !== void 0 && stream.end !== Infinity && stream.start !== void 0)
return cb((stream.end + 1) - (stream.start || 0));
fs.stat(stream.path, function(err, stat) {
cb(stat ? stat.size - (stream.start || 0) : null);
});
}
function resolve_url(href, base) {
if (url.URL)
return new url.URL(href, base);
// older Node version (< v6.13)
return url.resolve(base, href);
}
function pump_streams(streams, cb) {
if (stream.pipeline)
return stream.pipeline.apply(null, streams.concat(cb));
var tmp = streams.shift();
while (streams.length) {
tmp = tmp.pipe(streams.shift());
tmp.once('error', function(e) {
cb && cb(e);
cb = null;
})
}
}
//////////////////////////////////////////
// the main act
function Needle(method, uri, data, options, callback) {
// if (!(this instanceof Needle)) {
// return new Needle(method, uri, data, options, callback);
// }
if (typeof uri !== 'string')
throw new TypeError('URL must be a string, not ' + uri);
this.method = method.toLowerCase();
this.uri = uri;
this.data = data;
if (typeof options == 'function') {
this.callback = options;
this.options = {};
} else {
this.callback = callback;
this.options = options;
}
}
Needle.prototype.setup = function(uri, options) {
function get_option(key, fallback) {
// if original is in options, return that value
if (typeof options[key] != 'undefined') return options[key];
// otherwise, return value from alias or fallback/undefined
return typeof options[aliased.inverted[key]] != 'undefined'
? options[aliased.inverted[key]] : fallback;
}
function check_value(expected, key) {
var value = get_option(key),
type = typeof value;
if (type != 'undefined' && type != expected)
throw new TypeError(type + ' received for ' + key + ', but expected a ' + expected);
return (type == expected) ? value : defaults[key];
}
//////////////////////////////////////////////////
// the basics
var config = {
http_opts : {
localAddress: get_option('localAddress', undefined),
lookup: get_option('lookup', undefined)
}, // passed later to http.request() directly
headers : {},
output : options.output,
proxy : get_option('proxy', defaults.proxy),
parser : get_option('parse_response', defaults.parse_response),
encoding : options.encoding || (options.multipart ? 'binary' : defaults.encoding)
}
keys_by_type(Boolean).forEach(function(key) {
config[key] = check_value('boolean', key);
})
keys_by_type(Number).forEach(function(key) {
config[key] = check_value('number', key);
})
// populate http_opts with given TLS options
tls_options.split(' ').forEach(function(key) {
if (typeof options[key] != 'undefined') {
config.http_opts[key] = options[key];
if (typeof options.agent == 'undefined')
config.http_opts.agent = false; // otherwise tls options are skipped
}
});
//////////////////////////////////////////////////
// headers, cookies
for (var key in defaults.headers)
config.headers[key] = defaults.headers[key];
config.headers['accept'] = options.accept || defaults.accept;
config.headers['user-agent'] = options.user_agent || defaults.user_agent;
if (options.content_type)
config.headers['content-type'] = options.content_type;
// set connection header if opts.connection was passed, or if node < 0.11.4 (close)
if (options.connection || close_by_default)
config.headers['connection'] = options.connection || 'close';
if ((options.compressed || defaults.compressed) && typeof zlib != 'undefined')
config.headers['accept-encoding'] = decompressors['br'] ? 'gzip, deflate, br' : 'gzip, deflate';
if (options.cookies)
config.headers['cookie'] = cookies.write(options.cookies);
//////////////////////////////////////////////////
// basic/digest auth
if (uri.match(/[^\/]@/)) { // url contains user:pass@host, so parse it.
var parts = (url.parse(uri).auth || '').split(':');
options.username = parts[0];
options.password = parts[1];
}
if (options.username) {
if (options.auth && (options.auth == 'auto' || options.auth == 'digest')) {
config.credentials = [options.username, options.password];
} else {
config.headers['authorization'] = auth.basic(options.username, options.password);
}
}
// if proxy is present, set auth header from either url or proxy_user option.
if (config.proxy) {
if (config.proxy.indexOf('http') === -1)
config.proxy = 'http://' + config.proxy;
if (config.proxy.indexOf('@') !== -1) {
var proxy = (url.parse(config.proxy).auth || '').split(':');
options.proxy_user = proxy[0];
options.proxy_pass = proxy[1];
}
if (options.proxy_user)
config.headers['proxy-authorization'] = auth.basic(options.proxy_user, options.proxy_pass);
}
// now that all our headers are set, overwrite them if instructed.
for (var h in options.headers)
config.headers[h.toLowerCase()] = options.headers[h];
config.uri_modifier = get_option('uri_modifier', null);
return config;
}
Needle.prototype.start = function() {
var out = new stream.PassThrough({ objectMode: false }),
uri = this.uri,
data = this.data,
method = this.method,
callback = (typeof this.options == 'function') ? this.options : this.callback,
options = this.options || {};
// if no 'http' is found on URL, prepend it.
if (uri.indexOf('http') === -1)
uri = uri.replace(/^(\/\/)?/, 'http://');
var self = this, body, waiting = false, config = this.setup(uri, options);
// unless options.json was set to false, assume boss also wants JSON if content-type matches.
var json = options.json || (options.json !== false && config.headers['content-type'] == 'application/json');
if (data) {
if (options.multipart) { // boss says we do multipart. so we do it.
var boundary = options.boundary || defaults.boundary;
waiting = true;
multipart.build(data, boundary, function(err, parts) {
if (err) throw(err);
config.headers['content-type'] = 'multipart/form-data; boundary=' + boundary;
next(parts);
});
} else if (is_stream(data)) {
if (method == 'get')
throw new Error('Refusing to pipe() a stream via GET. Did you mean .post?');
if (config.stream_length > 0 || (config.stream_length === 0 && data.path)) {
// ok, let's get the stream's length and set it as the content-length header.
// this prevents some servers from cutting us off before all the data is sent.
waiting = true;
get_stream_length(data, config.stream_length, function(length) {
data.length = length;
next(data);
})
} else {
// if the boss doesn't want us to get the stream's length, or if it doesn't
// have a file descriptor for that purpose, then just head on.
body = data;
}
} else if (Buffer.isBuffer(data)) {
body = data; // use the raw buffer as request body.
} else if (method == 'get' && !json) {
// append the data to the URI as a querystring.
uri = uri.replace(/\?.*|$/, '?' + stringify(data));
} else { // string or object data, no multipart.
// if string, leave it as it is, otherwise, stringify.
body = (typeof(data) === 'string') ? data
: json ? JSON.stringify(data) : stringify(data);
// ensure we have a buffer so bytecount is correct.
body = Buffer.from(body, config.encoding);
}
}
function next(body) {
if (body) {
if (body.length) config.headers['content-length'] = body.length;
// if no content-type was passed, determine if json or not.
if (!config.headers['content-type']) {
config.headers['content-type'] = json
? 'application/json; charset=utf-8'
: 'application/x-www-form-urlencoded'; // no charset says W3 spec.
}
}
// unless a specific accept header was set, assume json: true wants JSON back.
if (options.json && (!options.accept && !(options.headers || {}).accept))
config.headers['accept'] = 'application/json';
self.send_request(1, method, uri, config, body, out, callback);
}
if (!waiting) next(body);
return out;
}
Needle.prototype.get_request_opts = function(method, uri, config) {
var opts = config.http_opts,
proxy = config.proxy,
remote = proxy ? url.parse(proxy) : url.parse(uri);
opts.protocol = remote.protocol;
opts.host = remote.hostname;
opts.port = remote.port || (remote.protocol == 'https:' ? 443 : 80);
opts.path = proxy ? uri : remote.pathname + (remote.search || '');
opts.method = method;
opts.headers = config.headers;
if (!opts.headers['host']) {
// if using proxy, make sure the host header shows the final destination
var target = proxy ? url.parse(uri) : remote;
opts.headers['host'] = target.hostname;
// and if a non standard port was passed, append it to the port header
if (target.port && [80, 443].indexOf(target.port) === -1) {
opts.headers['host'] += ':' + target.port;
}
}
return opts;
}
Needle.prototype.should_follow = function(location, config, original) {
if (!location) return false;
// returns true if location contains matching property (host or protocol)
function matches(property) {
var property = original[property];
return location.indexOf(property) !== -1;
}
// first, check whether the requested location is actually different from the original
if (!config.follow_if_same_location && location === original)
return false;
if (config.follow_if_same_host && !matches('host'))
return false; // host does not match, so not following
if (config.follow_if_same_protocol && !matches('protocol'))
return false; // procotol does not match, so not following
return true;
}
Needle.prototype.send_request = function(count, method, uri, config, post_data, out, callback) {
if (typeof config.uri_modifier === 'function') {
var modified_uri = config.uri_modifier(uri);
debug('Modifying request URI', uri + ' => ' + modified_uri);
uri = modified_uri;
}
var request,
timer,
returned = 0,
self = this,
request_opts = this.get_request_opts(method, uri, config),
protocol = request_opts.protocol == 'https:' ? https : http;
function done(err, resp) {
if (returned++ > 0)
return debug('Already finished, stopping here.');
if (timer) clearTimeout(timer);
request.removeListener('error', had_error);
out.done = true;
if (callback)
return callback(err, resp, resp ? resp.body : undefined);
// NOTE: this event used to be called 'end', but the behaviour was confusing
// when errors ocurred, because the stream would still emit an 'end' event.
out.emit('done', err);
// trigger the 'done' event on streams we're being piped to, if any
var pipes = out._readableState.pipes || [];
if (!pipes.forEach) pipes = [pipes];
pipes.forEach(function(st) { st.emit('done', err); })
}
function had_error(err) {
debug('Request error', err);
out.emit('err', err);
done(err || new Error('Unknown error when making request.'));
}
function set_timeout(type, milisecs) {
if (timer) clearTimeout(timer);
if (milisecs <= 0) return;
timer = setTimeout(function() {
out.emit('timeout', type);
request.abort();
// also invoke done() to terminate job on read_timeout
if (type == 'read') done(new Error(type + ' timeout'));
}, milisecs);
}
// handle errors on the underlying socket, that may be closed while writing
// for an example case, see test/long_string_spec.js. we make sure this
// scenario ocurred by verifying the socket's writable & destroyed states.
function on_socket_end() {
if (returned && !this.writable && this.destroyed === false) {
this.destroy();
had_error(new Error('Remote end closed socket abruptly.'))
}
}
debug('Making request #' + count, request_opts);
request = protocol.request(request_opts, function(resp) {
var headers = resp.headers;
debug('Got response', resp.statusCode, headers);
out.emit('response', resp);
set_timeout('read', config.read_timeout);
// if we got cookies, parse them unless we were instructed not to. make sure to include any
// cookies that might have been set on previous redirects.
if (config.parse_cookies && (headers['set-cookie'] || config.previous_resp_cookies)) {
resp.cookies = extend(config.previous_resp_cookies || {}, cookies.read(headers['set-cookie']));
debug('Got cookies', resp.cookies);
}
// if redirect code is found, determine if we should follow it according to the given options.
if (redirect_codes.indexOf(resp.statusCode) !== -1 && self.should_follow(headers.location, config, uri)) {
// clear timer before following redirects to prevent unexpected setTimeout consequence
clearTimeout(timer);
if (count <= config.follow_max) {
out.emit('redirect', headers.location);
// unless 'follow_keep_method' is true, rewrite the request to GET before continuing.
if (!config.follow_keep_method) {
method = 'GET';
post_data = null;
delete config.headers['content-length']; // in case the original was a multipart POST request.
}
// if follow_set_cookies is true, insert cookies in the next request's headers.
// we set both the original request cookies plus any response cookies we might have received.
if (config.follow_set_cookies) {
var request_cookies = cookies.read(config.headers['cookie']);
config.previous_resp_cookies = resp.cookies;
if (Object.keys(request_cookies).length || Object.keys(resp.cookies || {}).length) {
config.headers['cookie'] = cookies.write(extend(request_cookies, resp.cookies));
}
} else if (config.headers['cookie']) {
debug('Clearing original request cookie', config.headers['cookie']);
delete config.headers['cookie'];
}
if (config.follow_set_referer)
config.headers['referer'] = encodeURI(uri); // the original, not the destination URL.
config.headers['host'] = null; // clear previous Host header to avoid conflicts.
var redirect_url = resolve_url(headers.location, uri);
debug('Redirecting to ' + redirect_url.toString());
return self.send_request(++count, method, redirect_url.toString(), config, post_data, out, callback);
} else if (config.follow_max > 0) {
return done(new Error('Max redirects reached. Possible loop in: ' + headers.location));
}
}
// if auth is requested and credentials were not passed, resend request, provided we have user/pass.
if (resp.statusCode == 401 && headers['www-authenticate'] && config.credentials) {
if (!config.headers['authorization']) { // only if authentication hasn't been sent
var auth_header = auth.header(headers['www-authenticate'], config.credentials, request_opts);
if (auth_header) {
config.headers['authorization'] = auth_header;
return self.send_request(count, method, uri, config, post_data, out, callback);
}
}
}
// ok, so we got a valid (non-redirect & authorized) response. let's notify the stream guys.
out.emit('header', resp.statusCode, headers);
out.emit('headers', headers);
var pipeline = [],
mime = parse_content_type(headers['content-type']),
text_response = mime.type && (mime.type.indexOf('text/') != -1 || !!mime.type.match(/(\/|\+)(xml|json)$/));
// To start, if our body is compressed and we're able to inflate it, do it.
if (headers['content-encoding'] && decompressors[headers['content-encoding']]) {
var decompressor = decompressors[headers['content-encoding']]();
// make sure we catch errors triggered by the decompressor.
decompressor.on('error', had_error);
pipeline.push(decompressor);
}
// If parse is enabled and we have a parser for it, then go for it.
if (config.parser && parsers[mime.type]) {
// If a specific parser was requested, make sure we don't parse other types.
var parser_name = config.parser.toString().toLowerCase();
if (['xml', 'json'].indexOf(parser_name) == -1 || parsers[mime.type].name == parser_name) {
// OK, so either we're parsing all content types or the one requested matches.
out.parser = parsers[mime.type].name;
pipeline.push(parsers[mime.type].fn());
// Set objectMode on out stream to improve performance.
out._writableState.objectMode = true;
out._readableState.objectMode = true;
}
// If we're not parsing, and unless decoding was disabled, we'll try
// decoding non UTF-8 bodies to UTF-8, using the iconv-lite library.
} else if (text_response && config.decode_response && mime.charset) {
pipeline.push(decoder(mime.charset));
}
// And `out` is the stream we finally push the decoded/parsed output to.
pipeline.push(out);
// Now, release the kraken!
pump_streams([resp].concat(pipeline), function(err) {
if (err) debug(err)
// on node v8.x, if an error ocurrs on the receiving end,
// then we want to abort the request to avoid having dangling sockets
if (err && err.message == 'write after end') request.destroy();
});
// If the user has requested and output file, pipe the output stream to it.
// In stream mode, we will still get the response stream to play with.
if (config.output && resp.statusCode == 200) {
// for some reason, simply piping resp to the writable stream doesn't
// work all the time (stream gets cut in the middle with no warning).
// so we'll manually need to do the readable/write(chunk) trick.
var file = fs.createWriteStream(config.output);
file.on('error', had_error);
out.on('end', function() {
if (file.writable) file.end();
});
file.on('close', function() {
delete out.file;
})
out.on('readable', function() {
var chunk;
while ((chunk = this.read()) !== null) {
if (file.writable) file.write(chunk);
// if callback was requested, also push it to resp.body
if (resp.body) resp.body.push(chunk);
}
})
out.file = file;
}
// Only aggregate the full body if a callback was requested.
if (callback) {
resp.raw = [];
resp.body = [];
resp.bytes = 0;
// Gather and count the amount of (raw) bytes using a PassThrough stream.
var clean_pipe = new stream.PassThrough();
clean_pipe.on('readable', function() {
var chunk;
while ((chunk = this.read()) != null) {
resp.bytes += chunk.length;
resp.raw.push(chunk);
}
})
pump_streams([resp, clean_pipe], function(err) {
if (err) debug(err);
});
// Listen on the 'readable' event to aggregate the chunks, but only if
// file output wasn't requested. Otherwise we'd have two stream readers.
if (!config.output || resp.statusCode != 200) {
out.on('readable', function() {
var chunk;
while ((chunk = this.read()) !== null) {
// We're either pushing buffers or objects, never strings.
if (typeof chunk == 'string') chunk = Buffer.from(chunk);
// Push all chunks to resp.body. We'll bind them in resp.end().
resp.body.push(chunk);
}
})
}
}
// And set the .body property once all data is in.
out.on('end', function() {
if (resp.body) { // callback mode
// we want to be able to access to the raw data later, so keep a reference.
resp.raw = Buffer.concat(resp.raw);
// if parse was successful, we should have an array with one object
if (resp.body[0] !== undefined && !Buffer.isBuffer(resp.body[0])) {
// that's our body right there.
resp.body = resp.body[0];
// set the parser property on our response. we may want to check.
if (out.parser) resp.parser = out.parser;
} else { // we got one or several buffers. string or binary.
resp.body = Buffer.concat(resp.body);
// if we're here and parsed is true, it means we tried to but it didn't work.
// so given that we got a text response, let's stringify it.
if (text_response || out.parser) {
resp.body = resp.body.toString();
}
}
}
// if an output file is being written to, make sure the callback
// is triggered after all data has been written to it.
if (out.file) {
out.file.on('close', function() {
done(null, resp);
})
} else { // elvis has left the building.
done(null, resp);
}
});
// out.on('error', function(err) {
// had_error(err);
// if (err.code == 'ERR_STREAM_DESTROYED' || err.code == 'ERR_STREAM_PREMATURE_CLOSE') {
// request.abort();
// }
// })
}); // end request call
// unless open_timeout was disabled, set a timeout to abort the request.
set_timeout('open', config.open_timeout);
// handle errors on the request object. things might get bumpy.
request.on('error', had_error);
// make sure timer is cleared if request is aborted (issue #257)
request.once('abort', function() {
if (timer) clearTimeout(timer);
})
// handle socket 'end' event to ensure we don't get delayed EPIPE errors.
request.once('socket', function(socket) {
if (socket.connecting) {
socket.once('connect', function() {
set_timeout('response', config.response_timeout);
})
} else {
set_timeout('response', config.response_timeout);
}
// socket.once('close', function(e) {
// console.log('socket closed!', e);
// })
if (!socket.on_socket_end) {
socket.on_socket_end = on_socket_end;
socket.once('end', function() { process.nextTick(on_socket_end.bind(socket)) });
}
})
if (post_data) {
if (is_stream(post_data)) {
pump_streams([post_data, request], function(err) {
if (err) debug(err);
});
} else {
request.write(post_data, config.encoding);
request.end();
}
} else {
request.end();
}
out.abort = function() { request.abort() }; // easier access
out.request = request;
return out;
}
//////////////////////////////////////////
// exports
if (typeof Promise !== 'undefined') {
module.exports = function() {
var verb, args = [].slice.call(arguments);
if (args[0].match(/\.|\//)) // first argument looks like a URL
verb = (args.length > 2) ? 'post' : 'get';
else
verb = args.shift();
if (verb.match(/get|head/i) && args.length == 2)
args.splice(1, 0, null); // assume no data if head/get with two args (url, options)
return new Promise(function(resolve, reject) {
module.exports.request(verb, args[0], args[1], args[2], function(err, resp) {
return err ? reject(err) : resolve(resp);
});
})
}
}
module.exports.version = version;
module.exports.defaults = function(obj) {
for (var key in obj) {
var target_key = aliased.options[key] || key;
if (defaults.hasOwnProperty(target_key) && typeof obj[key] != 'undefined') {
if (target_key != 'parse_response' && target_key != 'proxy') {
// ensure type matches the original, except for proxy/parse_response that can be null/bool or string
var valid_type = defaults[target_key].constructor.name;
if (obj[key].constructor.name != valid_type)
throw new TypeError('Invalid type for ' + key + ', should be ' + valid_type);
}
defaults[target_key] = obj[key];
} else {
throw new Error('Invalid property for defaults:' + target_key);
}
}
return defaults;
}
'head get'.split(' ').forEach(function(method) {
module.exports[method] = function(uri, options, callback) {
return new Needle(method, uri, null, options, callback).start();
}
})
'post put patch delete'.split(' ').forEach(function(method) {
module.exports[method] = function(uri, data, options, callback) {
return new Needle(method, uri, data, options, callback).start();
}
})
module.exports.request = function(method, uri, data, opts, callback) {
return new Needle(method, uri, data, opts, callback).start();
};
/***/ }),
/***/ 50722:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
//////////////////////////////////////////
// Defines mappings between content-type
// and the appropriate parsers.
//////////////////////////////////////////
var Transform = __webpack_require__(92413).Transform;
var sax = __webpack_require__(36099);
function parseXML(str, cb) {
var obj, current, parser = sax.parser(true, { trim: true, lowercase: true })
parser.onerror = parser.onend = done;
function done(err) {
parser.onerror = parser.onend = function() { }
cb(err, obj)
}
function newElement(name, attributes) {
return {
name: name || '',
value: '',
attributes: attributes || {},
children: []
}
}
parser.oncdata = parser.ontext = function(t) {
if (current) current.value += t
}
parser.onopentag = function(node) {
var element = newElement(node.name, node.attributes)
if (current) {
element.parent = current
current.children.push(element)
} else { // root object
obj = element
}
current = element
};
parser.onclosetag = function() {
if (typeof current.parent !== 'undefined') {
var just_closed = current
current = current.parent
delete just_closed.parent
}
}
parser.write(str).close()
}
function parserFactory(name, fn) {
function parser() {
var chunks = [],
stream = new Transform({ objectMode: true });
// Buffer all our data
stream._transform = function(chunk, encoding, done) {
chunks.push(chunk);
done();
}
// And call the parser when all is there.
stream._flush = function(done) {
var self = this,
data = Buffer.concat(chunks);
try {
fn(data, function(err, result) {
if (err) throw err;
self.push(result);
});
} catch (err) {
self.push(data); // just pass the original data
} finally {
done();
}
}
return stream;
}
return { fn: parser, name: name };
}
var parsers = {}
function buildParser(name, types, fn) {
var parser = parserFactory(name, fn);
types.forEach(function(type) {
parsers[type] = parser;
})
}
buildParser('json', [
'application/json',
'text/javascript',
'application/vnd.api+json'
], function(buffer, cb) {
var err, data;
try { data = JSON.parse(buffer); } catch (e) { err = e; }
cb(err, data);
});
buildParser('xml', [
'text/xml',
'application/xml',
'application/rdf+xml',
'application/rss+xml',
'application/atom+xml'
], function(buffer, cb) {
parseXML(buffer.toString(), function(err, obj) {
cb(err, obj)
})
});
module.exports = parsers;
module.exports.use = buildParser;
/***/ }),
/***/ 28280:
/***/ ((__unused_webpack_module, exports) => {
// based on the qs module, but handles null objects as expected
// fixes by Tomas Pollak.
var toString = Object.prototype.toString;
function stringify(obj, prefix) {
if (prefix && (obj === null || typeof obj == 'undefined')) {
return prefix + '=';
} else if (toString.call(obj) == '[object Array]') {
return stringifyArray(obj, prefix);
} else if (toString.call(obj) == '[object Object]') {
return stringifyObject(obj, prefix);
} else if (toString.call(obj) == '[object Date]') {
return obj.toISOString();
} else if (prefix) { // string inside array or hash
return prefix + '=' + encodeURIComponent(String(obj));
} else if (String(obj).indexOf('=') !== -1) { // string with equal sign
return String(obj);
} else {
throw new TypeError('Cannot build a querystring out of: ' + obj);
}
};
function stringifyArray(arr, prefix) {
var ret = [];
for (var i = 0, len = arr.length; i < len; i++) {
if (prefix)
ret.push(stringify(arr[i], prefix + '[]'));
else
ret.push(stringify(arr[i]));
}
return ret.join('&');
}
function stringifyObject(obj, prefix) {
var ret = [];
Object.keys(obj).forEach(function(key) {
ret.push(stringify(obj[key], prefix
? prefix + '[' + encodeURIComponent(key) + ']'
: encodeURIComponent(key)));
})
return ret.join('&');
}
exports.J = stringify;
/***/ }),
/***/ 49503:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
const AggregateError = __webpack_require__(56455);
module.exports = async (
iterable,
mapper,
{
concurrency = Infinity,
stopOnError = true
} = {}
) => {
return new Promise((resolve, reject) => {
if (typeof mapper !== 'function') {
throw new TypeError('Mapper function is required');
}
if (!(typeof concurrency === 'number' && concurrency >= 1)) {
throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${concurrency}\` (${typeof concurrency})`);
}
const ret = [];
const errors = [];
const iterator = iterable[Symbol.iterator]();
let isRejected = false;
let isIterableDone = false;
let resolvingCount = 0;
let currentIndex = 0;
const next = () => {
if (isRejected) {
return;
}
const nextItem = iterator.next();
const i = currentIndex;
currentIndex++;
if (nextItem.done) {
isIterableDone = true;
if (resolvingCount === 0) {
if (!stopOnError && errors.length !== 0) {
reject(new AggregateError(errors));
} else {
resolve(ret);
}
}
return;
}
resolvingCount++;
(async () => {
try {
const element = await nextItem.value;
ret[i] = await mapper(element, i);
resolvingCount--;
next();
} catch (error) {
if (stopOnError) {
isRejected = true;
reject(error);
} else {
errors.push(error);
resolvingCount--;
next();
}
}
})();
};
for (let i = 0; i < concurrency; i++) {
next();
if (isIterableDone) {
break;
}
}
});
};
/***/ }),
/***/ 18920:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var parse_error_1 = __webpack_require__(78558);
exports.ParseError = parse_error_1.ParseError;
var invalid_user_input_error_1 = __webpack_require__(95362);
exports.InvalidUserInputError = invalid_user_input_error_1.InvalidUserInputError;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 95362:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
class InvalidUserInputError extends Error {
constructor(message) {
super(message);
this.name = 'InvalidUserInputError';
Error.captureStackTrace(this, InvalidUserInputError);
}
}
exports.InvalidUserInputError = InvalidUserInputError;
//# sourceMappingURL=invalid-user-input-error.js.map
/***/ }),
/***/ 78558:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
class ParseError extends Error {
constructor(message) {
super(message);
this.name = 'ParseError';
Error.captureStackTrace(this, ParseError);
}
}
exports.ParseError = ParseError;
//# sourceMappingURL=parse-error.js.map
/***/ }),
/***/ 59794:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const fs = __webpack_require__(35747);
const _isEmpty = __webpack_require__(99245);
const path = __webpack_require__(85622);
const errors_1 = __webpack_require__(18920);
const file_parser_1 = __webpack_require__(64702);
const composer_parser_1 = __webpack_require__(70054);
function buildDepTree(lockFileContent, manifestFileContent, defaultProjectName, systemVersions, includeDev = false) {
const lockFileJson = file_parser_1.FileParser.parseLockFile(lockFileContent);
const manifestJson = file_parser_1.FileParser.parseManifestFile(manifestFileContent);
if (!lockFileJson.packages) {
throw new errors_1.InvalidUserInputError('Invalid lock file. Must contain `packages` property');
}
const name = manifestJson.name || defaultProjectName;
const version = composer_parser_1.ComposerParser.getVersion(manifestJson) || '0.0.0';
const dependencies = composer_parser_1.ComposerParser.buildDependencies(manifestJson, lockFileJson, manifestJson, systemVersions, includeDev);
const hasDevDependencies = !_isEmpty(manifestJson['require-dev']);
return {
name,
version,
dependencies,
hasDevDependencies,
packageFormatVersion: 'composer:0.0.1',
};
}
exports.buildDepTree = buildDepTree;
function buildDepTreeFromFiles(basePath, lockFileName, systemVersions, includeDev = false) {
if (!basePath) {
throw new errors_1.InvalidUserInputError('Missing `basePath` parameter for buildDepTreeFromFiles()');
}
if (!lockFileName) {
throw new errors_1.InvalidUserInputError('Missing `lockfile` parameter for buildDepTreeFromFiles()');
}
if (!systemVersions) {
throw new errors_1.InvalidUserInputError('Missing `systemVersions` parameter for buildDepTreeFromFiles()');
}
const lockFilePath = path.resolve(basePath, lockFileName);
const manifestFilePath = path.resolve(basePath, path.dirname(lockFilePath), 'composer.json');
if (!fs.existsSync(lockFilePath)) {
throw new errors_1.InvalidUserInputError(`Lockfile not found at location: ${lockFilePath}`);
}
if (!fs.existsSync(manifestFilePath)) {
throw new errors_1.InvalidUserInputError(`Target file composer.json not found at location: ${manifestFilePath}`);
}
const lockFileContent = fs.readFileSync(lockFilePath, 'utf-8');
const manifestFileContent = fs.readFileSync(manifestFilePath, 'utf-8');
const defaultProjectName = getDefaultProjectName(basePath, lockFileName);
return buildDepTree(lockFileContent, manifestFileContent, defaultProjectName, systemVersions, includeDev);
}
exports.buildDepTreeFromFiles = buildDepTreeFromFiles;
function getDefaultProjectName(basePath, lockFileName) {
return path.dirname(path.resolve(path.join(basePath, lockFileName))).split(path.sep).pop();
}
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 70054:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const _findKey = __webpack_require__(7495);
const _get = __webpack_require__(29208);
const _invert = __webpack_require__(6668);
const _isEmpty = __webpack_require__(99245);
const types_1 = __webpack_require__(69932);
const _ = {
get: _get,
isEmpty: _isEmpty,
invert: _invert,
findKey: _findKey,
};
class ComposerParser {
static getVersion(depObj) {
// check for `version` property. may not exist
const versionFound = _.get(depObj, 'version', '');
// even if found, may be an alias, so check
const availableAliases = _.get(depObj, "extra['branch-alias']", []);
// if the version matches the alias (either as is, or without 'dev-'), use the aliases version.
// otherwise, use the version as is, and if not, the first found alias
return _.get(availableAliases, versionFound) ||
_.get(_.invert(availableAliases), versionFound.replace('dev-', '')) &&
versionFound.replace('dev-', '') ||
versionFound ||
_.findKey(_.invert(availableAliases), '0'); // first available alias
}
static buildDependencies(composerJsonObj, composerLockObj, depObj, systemPackages, includeDev = false, isDevTree = false, depRecursiveArray = [], packageRefCount = {}) {
const result = {};
// find depObj properties
const depName = _.get(depObj, 'name');
const require = _.get(depObj, 'require', {});
const requireDev = includeDev ? _.get(depObj, 'require-dev', {}) : {};
// recursion tests
const inRecursiveArray = depRecursiveArray.indexOf(depName) > -1;
const exceedsMaxRepeats = packageRefCount[depName] >= this.MAX_PACKAGE_REPEATS;
const hasNoDependencies = _.isEmpty(require) && _.isEmpty(requireDev);
// break recursion when
if (inRecursiveArray || exceedsMaxRepeats || hasNoDependencies) {
return result;
}
// prevent circular dependencies
depRecursiveArray.push(depName);
// get locked packages
const packages = _.get(composerLockObj, 'packages', []);
const packagesDev = includeDev ? _.get(composerLockObj, 'packages-dev', []) : [];
const allPackages = [
...packages,
...packagesDev,
];
// parse require dependencies
for (const name of Object.keys(require)) {
let version = '';
// lets find if this dependency has an object in composer.lock
const lockedPackage = allPackages.find((dep) => dep.name === name);
if (lockedPackage) {
version = this.getVersion(lockedPackage);
}
else {
// here we use the system version or composer json - not a locked version
version = _.get(systemPackages, name) || _.get(require, name);
}
// remove any starting 'v' from version numbers
version = version.replace(/^v(\d)/, '$1');
// bump package reference count (or assign to 1 if we haven't seen this before)
packageRefCount[name] = (packageRefCount[name] || 0) + 1;
result[name] = {
name,
version,
dependencies: this.buildDependencies(composerJsonObj, composerLockObj, lockedPackage, // undefined if transitive dependency
systemPackages, includeDev, false, depRecursiveArray, packageRefCount),
labels: {
scope: isDevTree ? types_1.Scope.dev : types_1.Scope.prod,
},
};
}
// parse require-dev dependencies
for (const name of Object.keys(requireDev)) {
let version = '';
// lets find if this dependency has an object in composer.lock
const lockedPackage = allPackages.find((dep) => dep.name === name);
if (lockedPackage) {
version = this.getVersion(lockedPackage);
}
else {
// here we use the system version or composer json - not a locked version
version = _.get(systemPackages, name) || _.get(requireDev, name);
}
// remove any starting 'v' from version numbers
version = version.replace(/^v(\d)/, '$1');
// bump package reference count (or assign to 1 if we haven't seen this before)
packageRefCount[name] = (packageRefCount[name] || 0) + 1;
result[name] = {
name,
version,
dependencies: this.buildDependencies(composerJsonObj, composerLockObj, lockedPackage, // undefined if transitive dependency
systemPackages, includeDev, true, depRecursiveArray, packageRefCount),
labels: {
scope: types_1.Scope.dev,
},
};
}
// remove from recursive check
depRecursiveArray.pop();
// return dep tree
return result;
}
}
exports.ComposerParser = ComposerParser;
// After this threshold, a package node in the dep tree won't have expanded dependencies.
// This is a cheap protection against combinatorial explosion when there's N packages
// that depend on each other (producing N! branches of the dep tree).
// The value of 150 was chosen as a lowest one that doesn't break existing tests.
// Switching to dependency graph would render this trick obsolete.
ComposerParser.MAX_PACKAGE_REPEATS = 150;
//# sourceMappingURL=composer-parser.js.map
/***/ }),
/***/ 64702:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const errors_1 = __webpack_require__(18920);
class FileParser {
static parseLockFile(lockFileContent) {
try {
return JSON.parse(lockFileContent);
}
catch (e) {
throw new errors_1.ParseError(`Failed to parse lock file. Error: ${e.message}`);
}
}
static parseManifestFile(manifestFileContent) {
try {
return JSON.parse(manifestFileContent);
}
catch (e) {
throw new errors_1.ParseError(`Failed to parse manifest file. Error: ${e.message}`);
}
}
}
exports.FileParser = FileParser;
//# sourceMappingURL=file-parser.js.map
/***/ }),
/***/ 69932:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var Scope;
(function (Scope) {
Scope["prod"] = "prod";
Scope["dev"] = "dev";
})(Scope = exports.Scope || (exports.Scope = {}));
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 81608:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DepGraphBuilder = void 0;
const graphlib = __webpack_require__(77947);
const dep_graph_1 = __webpack_require__(80918);
class DepGraphBuilder {
constructor(pkgManager, rootPkg) {
this._pkgs = {};
this._pkgNodes = {};
const graph = new graphlib.Graph({
directed: true,
multigraph: false,
compound: false,
});
if (!rootPkg) {
rootPkg = {
name: '_root',
version: '0.0.0',
};
}
this._rootNodeId = 'root-node';
this._rootPkgId = DepGraphBuilder._getPkgId(rootPkg);
this._pkgs[this._rootPkgId] = rootPkg;
graph.setNode(this._rootNodeId, { pkgId: this._rootPkgId });
this._pkgNodes[this._rootPkgId] = new Set([this._rootNodeId]);
this._graph = graph;
this._pkgManager = pkgManager;
}
get rootNodeId() {
return this._rootNodeId;
}
static _getPkgId(pkg) {
return `${pkg.name}@${pkg.version || ''}`;
}
getPkgs() {
return Object.values(this._pkgs);
}
// TODO: this can create disconnected nodes
addPkgNode(pkgInfo, nodeId, nodeInfo) {
if (nodeId === this._rootNodeId) {
throw new Error('DepGraphBuilder.addPkgNode() cant override root node');
}
const pkgId = DepGraphBuilder._getPkgId(pkgInfo);
this._pkgs[pkgId] = pkgInfo;
this._pkgNodes[pkgId] = this._pkgNodes[pkgId] || new Set();
this._pkgNodes[pkgId].add(nodeId);
this._graph.setNode(nodeId, { pkgId, info: nodeInfo });
return this;
}
// TODO: this can create cycles
connectDep(parentNodeId, depNodeId) {
if (!this._graph.hasNode(parentNodeId)) {
throw new Error('parentNodeId does not exist');
}
if (!this._graph.hasNode(depNodeId)) {
throw new Error('depNodeId does not exist');
}
this._graph.setEdge(parentNodeId, depNodeId);
return this;
}
build() {
return new dep_graph_1.DepGraphImpl(this._graph, this._rootNodeId, this._pkgs, this._pkgNodes, this._pkgManager);
}
}
exports.DepGraphBuilder = DepGraphBuilder;
//# sourceMappingURL=builder.js.map
/***/ }),
/***/ 6139:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createFromJSON = exports.SUPPORTED_SCHEMA_RANGE = void 0;
const semver = __webpack_require__(21663);
const graphlib = __webpack_require__(77947);
const errors_1 = __webpack_require__(79651);
const validate_graph_1 = __webpack_require__(80753);
const dep_graph_1 = __webpack_require__(80918);
exports.SUPPORTED_SCHEMA_RANGE = '^1.0.0';
/**
* Create a DepGraph instance from a JSON representation of a dep graph. This
* is typically used after passing the graph over the wire as `DepGraphData`.
*/
function createFromJSON(depGraphData) {
validateDepGraphData(depGraphData);
const graph = new graphlib.Graph({
directed: true,
multigraph: false,
compound: false,
});
const pkgs = {};
const pkgNodes = {};
for (const { id, info } of depGraphData.pkgs) {
pkgs[id] = info.version ? info : Object.assign(Object.assign({}, info), { version: undefined });
}
for (const node of depGraphData.graph.nodes) {
const pkgId = node.pkgId;
if (!pkgNodes[pkgId]) {
pkgNodes[pkgId] = new Set();
}
pkgNodes[pkgId].add(node.nodeId);
graph.setNode(node.nodeId, { pkgId, info: node.info });
}
for (const node of depGraphData.graph.nodes) {
for (const depNodeId of node.deps) {
graph.setEdge(node.nodeId, depNodeId.nodeId);
}
}
validate_graph_1.validateGraph(graph, depGraphData.graph.rootNodeId, pkgs, pkgNodes);
return new dep_graph_1.DepGraphImpl(graph, depGraphData.graph.rootNodeId, pkgs, pkgNodes, depGraphData.pkgManager);
}
exports.createFromJSON = createFromJSON;
function assert(condition, msg) {
if (!condition) {
throw new errors_1.ValidationError(msg);
}
}
function validateDepGraphData(depGraphData) {
assert(!!semver.valid(depGraphData.schemaVersion) &&
semver.satisfies(depGraphData.schemaVersion, exports.SUPPORTED_SCHEMA_RANGE), `dep-graph schemaVersion not in "${exports.SUPPORTED_SCHEMA_RANGE}"`);
assert(depGraphData.pkgManager && !!depGraphData.pkgManager.name, '.pkgManager.name is missing');
const pkgsMap = depGraphData.pkgs.reduce((acc, cur) => {
assert(!(cur.id in acc), 'more than one pkg with same id');
assert(!!cur.info, '.pkgs item missing .info');
acc[cur.id] = cur.info;
return acc;
}, {});
const nodesMap = depGraphData.graph.nodes.reduce((acc, cur) => {
assert(!(cur.nodeId in acc), 'more than on node with same id');
acc[cur.nodeId] = cur;
return acc;
}, {});
const rootNodeId = depGraphData.graph.rootNodeId;
const rootNode = nodesMap[rootNodeId];
assert(rootNodeId in nodesMap, `.${rootNodeId} root graph node is missing`);
const rootPkgId = rootNode.pkgId;
assert(rootPkgId in pkgsMap, `.${rootPkgId} root pkg missing`);
assert(nodesMap[rootNodeId].pkgId === rootPkgId, `the root node .pkgId should be "${rootPkgId}"`);
const pkgIds = Object.keys(pkgsMap);
// NOTE: this name@version check is very strict,
// we can relax it later, it just makes things easier now
assert(pkgIds.filter((pkgId) => pkgId !== dep_graph_1.DepGraphImpl.getPkgId(pkgsMap[pkgId]))
.length === 0, 'pkgs ids should be name@version');
assert(Object.values(nodesMap).filter((node) => !(node.pkgId in pkgsMap))
.length === 0, 'some instance nodes belong to non-existing pkgIds');
assert(Object.values(pkgsMap).filter((pkg) => !pkg.name)
.length === 0, 'some .pkgs elements have no .name field');
}
//# sourceMappingURL=create-from-json.js.map
/***/ }),
/***/ 80918:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DepGraphImpl = void 0;
const _isEqual = __webpack_require__(72307);
const graphlib = __webpack_require__(77947);
const create_from_json_1 = __webpack_require__(6139);
class DepGraphImpl {
constructor(_graph, _rootNodeId, _pkgs, _pkgNodes, _pkgManager) {
this._graph = _graph;
this._rootNodeId = _rootNodeId;
this._pkgs = _pkgs;
this._pkgNodes = _pkgNodes;
this._pkgManager = _pkgManager;
this._countNodePathsToRootCache = new Map();
this._rootPkgId = _graph.node(_rootNodeId).pkgId;
this._pkgList = Object.values(_pkgs);
this._depPkgsList = this._pkgList.filter((pkg) => pkg !== this.rootPkg);
}
static getPkgId(pkg) {
return `${pkg.name}@${pkg.version || ''}`;
}
get pkgManager() {
return this._pkgManager;
}
get rootPkg() {
return this._pkgs[this._rootPkgId];
}
get rootNodeId() {
return this._rootNodeId;
}
/**
* Get all unique packages in the graph (including the root package)
*/
getPkgs() {
return this._pkgList;
}
/**
* Get all unique packages in the graph (excluding the root package)
*/
getDepPkgs() {
return this._depPkgsList;
}
getPkgNodes(pkg) {
const pkgId = DepGraphImpl.getPkgId(pkg);
const nodes = [];
for (const nodeId of Array.from(this._pkgNodes[pkgId])) {
const graphNode = this.getGraphNode(nodeId);
nodes.push({
info: graphNode.info || {},
});
}
return nodes;
}
getNode(nodeId) {
return this.getGraphNode(nodeId).info || {};
}
getNodePkg(nodeId) {
return this._pkgs[this.getGraphNode(nodeId).pkgId];
}
getPkgNodeIds(pkg) {
const pkgId = DepGraphImpl.getPkgId(pkg);
if (!this._pkgs[pkgId]) {
throw new Error(`no such pkg: ${pkgId}`);
}
return Array.from(this._pkgNodes[pkgId]);
}
getNodeDepsNodeIds(nodeId) {
const deps = this._graph.successors(nodeId);
if (!deps) {
throw new Error(`no such node: ${nodeId}`);
}
return deps;
}
getNodeParentsNodeIds(nodeId) {
const parents = this._graph.predecessors(nodeId);
if (!parents) {
throw new Error(`no such node: ${nodeId}`);
}
return parents;
}
hasCycles() {
// `isAcyclic` is expensive, so memoize
if (this._hasCycles === undefined) {
this._hasCycles = !graphlib.alg.isAcyclic(this._graph);
}
return this._hasCycles;
}
pkgPathsToRoot(pkg) {
const pathsToRoot = [];
for (const nodeId of this.getPkgNodeIds(pkg)) {
const pathsFromNodeToRoot = this.pathsFromNodeToRoot(nodeId);
for (const path of pathsFromNodeToRoot) {
pathsToRoot.push(path);
}
}
// note: sorting to get shorter paths first -
// it's nicer - and better resembles older behaviour
return pathsToRoot.sort((a, b) => a.length - b.length);
}
countPathsToRoot(pkg) {
let count = 0;
for (const nodeId of this.getPkgNodeIds(pkg)) {
count += this.countNodePathsToRoot(nodeId);
}
return count;
}
equals(other, { compareRoot = true } = {}) {
let otherDepGraph;
if (other instanceof DepGraphImpl) {
otherDepGraph = other;
}
else {
// At runtime theoretically we can have multiple versions of
// @snyk/dep-graph. If "other" is not an instance of the same class it is
// safer to rebuild it from JSON.
otherDepGraph = create_from_json_1.createFromJSON(other.toJSON());
}
// In theory, for the graphs created by standard means, `_.isEquals(this._data, otherDepGraph._data)`
// should suffice, since node IDs will be generated in a predictable way.
// However, there might be different versions of graph and inconsistencies
// in the ordering of the arrays, so we perform a deep comparison.
return this.nodeEquals(this, this.rootNodeId, otherDepGraph, otherDepGraph.rootNodeId, compareRoot);
}
directDepsLeadingTo(pkg) {
const pkgNodes = this.getPkgNodeIds(pkg);
const directDeps = this.getNodeDepsNodeIds(this.rootNodeId);
const nodes = directDeps.filter((directDep) => {
const reachableNodes = graphlib.alg.postorder(this._graph, [directDep]);
return reachableNodes.filter((node) => pkgNodes.includes(node)).length;
});
return nodes.map((node) => this.getNodePkg(node));
}
/**
* Create a JSON representation of a dep graph. This is typically used to
* send the dep graph over the wire
*/
toJSON() {
const nodeIds = this._graph.nodes();
const nodes = nodeIds.reduce((acc, nodeId) => {
const deps = (this._graph.successors(nodeId) || []).map((depNodeId) => ({
nodeId: depNodeId,
}));
const node = this._graph.node(nodeId);
const elem = {
nodeId,
pkgId: node.pkgId,
deps,
};
if (node.info && Object.keys(node.info).length > 0) {
elem.info = node.info;
}
acc.push(elem);
return acc;
}, []);
const pkgs = Object.keys(this._pkgs).map((pkgId) => ({
id: pkgId,
info: this._pkgs[pkgId],
}));
return {
schemaVersion: DepGraphImpl.SCHEMA_VERSION,
pkgManager: this._pkgManager,
pkgs,
graph: {
rootNodeId: this._rootNodeId,
nodes,
},
};
}
nodeEquals(graphA, nodeIdA, graphB, nodeIdB, compareRoot, traversedPairs = new Set()) {
// Skip root nodes comparision if needed.
if (compareRoot ||
(nodeIdA !== graphA.rootNodeId && nodeIdB !== graphB.rootNodeId)) {
const pkgA = graphA.getNodePkg(nodeIdA);
const pkgB = graphB.getNodePkg(nodeIdB);
// Compare PkgInfo (name and version).
if (!_isEqual(pkgA, pkgB)) {
return false;
}
const infoA = graphA.getNode(nodeIdA);
const infoB = graphB.getNode(nodeIdB);
// Compare NodeInfo (VersionProvenance and labels).
if (!_isEqual(infoA, infoB)) {
return false;
}
}
let depsA = graphA.getNodeDepsNodeIds(nodeIdA);
let depsB = graphB.getNodeDepsNodeIds(nodeIdB);
// Number of dependencies should be the same.
if (depsA.length !== depsB.length) {
return false;
}
// Sort dependencies by name@version string.
const sortFn = (graph) => (idA, idB) => {
const pkgA = graph.getNodePkg(idA);
const pkgB = graph.getNodePkg(idB);
return DepGraphImpl.getPkgId(pkgA).localeCompare(DepGraphImpl.getPkgId(pkgB));
};
depsA = depsA.sort(sortFn(graphA));
depsB = depsB.sort(sortFn(graphB));
// Compare Each dependency recursively.
for (let i = 0; i < depsA.length; i++) {
const pairKey = `${depsA[i]}_${depsB[i]}`;
// Prevent cycles.
if (traversedPairs.has(pairKey)) {
continue;
}
traversedPairs.add(pairKey);
if (!this.nodeEquals(graphA, depsA[i], graphB, depsB[i], compareRoot, traversedPairs)) {
return false;
}
}
return true;
}
getGraphNode(nodeId) {
const node = this._graph.node(nodeId);
if (!node) {
throw new Error(`no such node: ${nodeId}`);
}
return node;
}
pathsFromNodeToRoot(nodeId, ancestors = []) {
const parentNodesIds = this.getNodeParentsNodeIds(nodeId);
const pkgInfo = this.getNodePkg(nodeId);
if (parentNodesIds.length === 0) {
return [[pkgInfo]];
}
const allPaths = [];
ancestors = ancestors.concat(nodeId);
for (const id of parentNodesIds) {
if (ancestors.includes(id))
continue;
const pathToRoot = this.pathsFromNodeToRoot(id, ancestors).map((path) => [pkgInfo].concat(path));
for (const path of pathToRoot) {
allPaths.push(path);
}
}
return allPaths;
}
countNodePathsToRoot(nodeId, ancestors = []) {
if (ancestors.includes(nodeId)) {
return 0;
}
if (this._countNodePathsToRootCache.has(nodeId)) {
return this._countNodePathsToRootCache.get(nodeId) || 0;
}
const parentNodesIds = this.getNodeParentsNodeIds(nodeId);
if (parentNodesIds.length === 0) {
this._countNodePathsToRootCache.set(nodeId, 1);
return 1;
}
ancestors = ancestors.concat(nodeId);
const count = parentNodesIds.reduce((acc, parentNodeId) => {
return acc + this.countNodePathsToRoot(parentNodeId, ancestors);
}, 0);
this._countNodePathsToRootCache.set(nodeId, count);
return count;
}
}
exports.DepGraphImpl = DepGraphImpl;
DepGraphImpl.SCHEMA_VERSION = '1.2.0';
//# sourceMappingURL=dep-graph.js.map
/***/ }),
/***/ 54519:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.CustomError = void 0;
class CustomError extends Error {
constructor(message) {
super(message);
Object.setPrototypeOf(this, CustomError.prototype);
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
}
}
exports.CustomError = CustomError;
//# sourceMappingURL=custom-error.js.map
/***/ }),
/***/ 79651:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.ValidationError = void 0;
const validation_error_1 = __webpack_require__(85151);
Object.defineProperty(exports, "ValidationError", ({ enumerable: true, get: function () { return validation_error_1.ValidationError; } }));
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 85151:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.ValidationError = void 0;
const custom_error_1 = __webpack_require__(54519);
class ValidationError extends custom_error_1.CustomError {
constructor(message) {
super(message);
Object.setPrototypeOf(this, ValidationError.prototype);
}
}
exports.ValidationError = ValidationError;
//# sourceMappingURL=validation-error.js.map
/***/ }),
/***/ 80753:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.validateGraph = void 0;
const graphlib = __webpack_require__(77947);
const errors_1 = __webpack_require__(79651);
function assert(condition, msg) {
if (!condition) {
throw new errors_1.ValidationError(msg);
}
}
function validateGraph(graph, rootNodeId, pkgs, pkgNodes) {
assert((graph.predecessors(rootNodeId) || []).length === 0, `"${rootNodeId}" is not really the root`);
const reachableFromRoot = graphlib.alg.postorder(graph, [rootNodeId]);
const nodeIds = graph.nodes();
assert(JSON.stringify(nodeIds.sort()) === JSON.stringify(reachableFromRoot.sort()), 'not all graph nodes are reachable from root');
const pkgIds = Object.keys(pkgs);
const pkgsWithoutInstances = pkgIds.filter((pkgId) => !pkgNodes[pkgId] || pkgNodes[pkgId].size === 0);
assert(pkgsWithoutInstances.length === 0, 'not all pkgs have instance nodes');
}
exports.validateGraph = validateGraph;
//# sourceMappingURL=validate-graph.js.map
/***/ }),
/***/ 25261:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.dfs = void 0;
const each = __webpack_require__(43590);
/*
* A helper that preforms a pre- or post-order traversal on the input graph
* and returns the nodes in the order they were visited. If the graph is
* undirected then this algorithm will navigate using neighbors. If the graph
* is directed then this algorithm will navigate using successors.
*
* Order must be one of "pre" or "post".
*/
function dfs(g, vs, order) {
if (!Array.isArray(vs)) {
vs = [vs];
}
const navigation = (g.isDirected() ? g.successors : g.neighbors).bind(g);
const acc = [];
const visited = {};
each(vs, (v) => {
if (!g.hasNode(v)) {
throw new Error('Graph does not have node: ' + v);
}
doDfs(g, v, order === 'post', visited, navigation, acc);
});
return acc;
}
exports.dfs = dfs;
function doDfs(g, v, postorder, visited, navigation, acc) {
if (!(v in visited)) {
visited[v] = true;
if (!postorder) {
acc.push(v);
}
each(navigation(v), function (w) {
doDfs(g, w, postorder, visited, navigation, acc);
});
if (postorder) {
acc.push(v);
}
}
}
//# sourceMappingURL=dfs.js.map
/***/ }),
/***/ 40976:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.isAcyclic = void 0;
const topsort_1 = __webpack_require__(29865);
function isAcyclic(g) {
try {
topsort_1.topsort(g);
}
catch (e) {
if (e instanceof topsort_1.CycleException) {
return false;
}
throw e;
}
return true;
}
exports.isAcyclic = isAcyclic;
//# sourceMappingURL=is-acyclic.js.map
/***/ }),
/***/ 45300:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.postorder = void 0;
const dfs_1 = __webpack_require__(25261);
function postorder(g, vs) {
return dfs_1.dfs(g, vs, 'post');
}
exports.postorder = postorder;
//# sourceMappingURL=postorder.js.map
/***/ }),
/***/ 29865:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.CycleException = exports.topsort = void 0;
const each = __webpack_require__(43590);
const size = __webpack_require__(27946);
function topsort(g) {
const visited = {};
const stack = {};
const results = [];
function visit(node) {
if (node in stack) {
throw new CycleException();
}
if (!(node in visited)) {
stack[node] = true;
visited[node] = true;
each(g.predecessors(node), visit);
delete stack[node];
results.push(node);
}
}
each(g.sinks(), visit);
if (size(visited) !== g.nodeCount()) {
throw new CycleException();
}
return results;
}
exports.topsort = topsort;
class CycleException extends Error {
}
exports.CycleException = CycleException;
//# sourceMappingURL=topsort.js.map
/***/ }),
/***/ 69946:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Graph = void 0;
/* eslint-disable prefer-rest-params */
/* eslint-disable @typescript-eslint/no-this-alias */
const constant = __webpack_require__(4254);
const each = __webpack_require__(43590);
const _filter = __webpack_require__(43590);
const isEmpty = __webpack_require__(99245);
const isFunction = __webpack_require__(98423);
const isUndefined = __webpack_require__(28801);
const keys = __webpack_require__(44799);
const reduce = __webpack_require__(34766);
const union = __webpack_require__(96744);
const values = __webpack_require__(17720);
const DEFAULT_EDGE_NAME = '\x00';
const GRAPH_NODE = '\x00';
const EDGE_KEY_DELIM = '\x01';
// Implementation notes:
//
// * Node id query functions should return string ids for the nodes
// * Edge id query functions should return an "edgeObj", edge object, that is
// composed of enough information to uniquely identify an edge: {v, w, name}.
// * Internally we use an "edgeId", a stringified form of the edgeObj, to
// reference edges. This is because we need a performant way to look these
// edges up and, object properties, which have string keys, are the closest
// we're going to get to a performant hashtable in JavaScript.
class Graph {
constructor(opts) {
var _a, _b, _c;
/* Number of nodes in the graph. Should only be changed by the implementation. */
this._nodeCount = 0;
/* Number of edges in the graph. Should only be changed by the implementation. */
this._edgeCount = 0;
this._isDirected = (_a = opts === null || opts === void 0 ? void 0 : opts.directed) !== null && _a !== void 0 ? _a : true;
this._isMultigraph = (_b = opts === null || opts === void 0 ? void 0 : opts.multigraph) !== null && _b !== void 0 ? _b : false;
this._isCompound = (_c = opts === null || opts === void 0 ? void 0 : opts.compound) !== null && _c !== void 0 ? _c : false;
// Label for the graph itself
this._label = undefined;
// Defaults to be set when creating a new node
this._defaultNodeLabelFn = constant(undefined);
// Defaults to be set when creating a new edge
this._defaultEdgeLabelFn = constant(undefined);
// v -> label
this._nodes = {};
if (this._isCompound) {
// v -> parent
this._parent = {};
// v -> children
this._children = {};
this._children[GRAPH_NODE] = {};
}
// v -> edgeObj
this._in = {};
// u -> v -> Number
this._preds = {};
// v -> edgeObj
this._out = {};
// v -> w -> Number
this._sucs = {};
// e -> edgeObj
this._edgeObjs = {};
// e -> label
this._edgeLabels = {};
}
/* === Graph functions ========= */
isDirected() {
return this._isDirected;
}
isMultigraph() {
return this._isMultigraph;
}
isCompound() {
return this._isCompound;
}
setGraph(label) {
this._label = label;
return this;
}
graph() {
return this._label;
}
/* === Node functions ========== */
setDefaultNodeLabel(newDefault) {
if (!isFunction(newDefault)) {
newDefault = constant(newDefault);
}
this._defaultNodeLabelFn = newDefault;
return this;
}
nodeCount() {
return this._nodeCount;
}
nodes() {
return keys(this._nodes);
}
sources() {
const self = this;
return _filter(this.nodes(), function (v) {
return isEmpty(self._in[v]);
});
}
sinks() {
const self = this;
return _filter(this.nodes(), function (v) {
return isEmpty(self._out[v]);
});
}
setNodes(vs, value) {
const args = arguments;
const self = this;
each(vs, function (v) {
if (args.length > 1) {
self.setNode(v, value);
}
else {
self.setNode(v);
}
});
return this;
}
setNode(v, value) {
if (v in this._nodes) {
if (arguments.length > 1) {
this._nodes[v] = value;
}
return this;
}
this._nodes[v] = arguments.length > 1 ? value : this._defaultNodeLabelFn(v);
if (this._isCompound) {
this._parent[v] = GRAPH_NODE;
this._children[v] = {};
this._children[GRAPH_NODE][v] = true;
}
this._in[v] = {};
this._preds[v] = {};
this._out[v] = {};
this._sucs[v] = {};
++this._nodeCount;
return this;
}
node(v) {
return this._nodes[v];
}
hasNode(v) {
return v in this._nodes;
}
removeNode(v) {
const self = this;
if (v in this._nodes) {
const removeEdge = function (e) {
self.removeEdge(self._edgeObjs[e]);
};
delete this._nodes[v];
if (this._isCompound) {
this._removeFromParentsChildList(v);
delete this._parent[v];
each(this.children(v), function (child) {
self.setParent(child);
});
delete this._children[v];
}
each(keys(this._in[v]), removeEdge);
delete this._in[v];
delete this._preds[v];
each(keys(this._out[v]), removeEdge);
delete this._out[v];
delete this._sucs[v];
--this._nodeCount;
}
return this;
}
setParent(v, parent) {
if (!this._isCompound) {
throw new Error('Cannot set parent in a non-compound graph');
}
if (isUndefined(parent)) {
parent = GRAPH_NODE;
}
else {
// Coerce parent to string
parent += '';
for (let ancestor = parent; !isUndefined(ancestor); ancestor = this.parent(ancestor)) {
if (ancestor === v) {
throw new Error('Setting ' +
parent +
' as parent of ' +
v +
' would create a cycle');
}
}
this.setNode(parent);
}
this.setNode(v);
this._removeFromParentsChildList(v);
this._parent[v] = parent;
this._children[parent][v] = true;
return this;
}
_removeFromParentsChildList(v) {
delete this._children[this._parent[v]][v];
}
parent(v) {
if (this._isCompound) {
const parent = this._parent[v];
if (parent !== GRAPH_NODE) {
return parent;
}
}
}
children(v) {
if (isUndefined(v)) {
v = GRAPH_NODE;
}
if (this._isCompound) {
const children = this._children[v];
if (children) {
return keys(children);
}
}
else if (v === GRAPH_NODE) {
return this.nodes();
}
else if (this.hasNode(v)) {
return [];
}
}
predecessors(v) {
const predsV = this._preds[v];
if (predsV) {
return keys(predsV);
}
}
successors(v) {
const sucsV = this._sucs[v];
if (sucsV) {
return keys(sucsV);
}
}
neighbors(v) {
const preds = this.predecessors(v);
if (preds) {
return union(preds, this.successors(v));
}
}
isLeaf(v) {
let neighbors;
if (this.isDirected()) {
neighbors = this.successors(v);
}
else {
neighbors = this.neighbors(v);
}
return neighbors.length === 0;
}
filterNodes(filter) {
const copy = new Graph({
directed: this._isDirected,
multigraph: this._isMultigraph,
compound: this._isCompound,
});
copy.setGraph(this.graph());
const self = this;
each(this._nodes, function (value, v) {
if (filter(v)) {
copy.setNode(v, value);
}
});
each(this._edgeObjs, function (e) {
if (copy.hasNode(e.v) && copy.hasNode(e.w)) {
copy.setEdge(e, self.edge(e));
}
});
const parents = {};
function findParent(v) {
const parent = self.parent(v);
if (parent === undefined || copy.hasNode(parent)) {
parents[v] = parent;
return parent;
}
else if (parent in parents) {
return parents[parent];
}
else {
return findParent(parent);
}
}
if (this._isCompound) {
each(copy.nodes(), function (v) {
copy.setParent(v, findParent(v));
});
}
return copy;
}
/* === Edge functions ========== */
setDefaultEdgeLabel(newDefault) {
if (!isFunction(newDefault)) {
newDefault = constant(newDefault);
}
this._defaultEdgeLabelFn = newDefault;
return this;
}
edgeCount() {
return this._edgeCount;
}
edges() {
return values(this._edgeObjs);
}
setPath(vs, value) {
const self = this;
const args = arguments;
reduce(vs, function (v, w) {
if (args.length > 1) {
self.setEdge(v, w, value);
}
else {
self.setEdge(v, w);
}
return w;
});
return this;
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
setEdge(...args) {
let v, w, name, value;
let valueSpecified = false;
const arg0 = arguments[0];
if (typeof arg0 === 'object' && arg0 !== null && 'v' in arg0) {
v = arg0.v;
w = arg0.w;
name = arg0.name;
if (arguments.length === 2) {
value = arguments[1];
valueSpecified = true;
}
}
else {
v = arg0;
w = arguments[1];
name = arguments[3];
if (arguments.length > 2) {
value = arguments[2];
valueSpecified = true;
}
}
v = '' + v;
w = '' + w;
if (!isUndefined(name)) {
name = '' + name;
}
const e = edgeArgsToId(this._isDirected, v, w, name);
if (e in this._edgeLabels) {
if (valueSpecified) {
this._edgeLabels[e] = value;
}
return this;
}
if (!isUndefined(name) && !this._isMultigraph) {
throw new Error('Cannot set a named edge when isMultigraph = false');
}
// It didn't exist, so we need to create it.
// First ensure the nodes exist.
this.setNode(v);
this.setNode(w);
this._edgeLabels[e] = valueSpecified
? value
: this._defaultEdgeLabelFn(v, w, name);
const edgeObj = edgeArgsToObj(this._isDirected, v, w, name);
// Ensure we add undirected edges in a consistent way.
v = edgeObj.v;
w = edgeObj.w;
Object.freeze(edgeObj);
this._edgeObjs[e] = edgeObj;
incrementOrInitEntry(this._preds[w], v);
incrementOrInitEntry(this._sucs[v], w);
this._in[w][e] = edgeObj;
this._out[v][e] = edgeObj;
this._edgeCount++;
return this;
}
edge(v, w, name) {
const e = arguments.length === 1
? edgeObjToId(this._isDirected, arguments[0])
: edgeArgsToId(this._isDirected, v, w, name);
return this._edgeLabels[e];
}
hasEdge(v, w, name) {
const e = arguments.length === 1
? edgeObjToId(this._isDirected, arguments[0])
: edgeArgsToId(this._isDirected, v, w, name);
return e in this._edgeLabels;
}
removeEdge(v, w, name) {
const e = arguments.length === 1
? edgeObjToId(this._isDirected, arguments[0])
: edgeArgsToId(this._isDirected, v, w, name);
const edge = this._edgeObjs[e];
if (edge) {
v = edge.v;
w = edge.w;
delete this._edgeLabels[e];
delete this._edgeObjs[e];
decrementOrRemoveEntry(this._preds[w], v);
decrementOrRemoveEntry(this._sucs[v], w);
delete this._in[w][e];
delete this._out[v][e];
this._edgeCount--;
}
return this;
}
inEdges(v, u) {
const inV = this._in[v];
if (inV) {
const edges = values(inV);
if (!u) {
return edges;
}
return _filter(edges, function (edge) {
return edge.v === u;
});
}
}
outEdges(v, w) {
const outV = this._out[v];
if (outV) {
const edges = values(outV);
if (!w) {
return edges;
}
return _filter(edges, function (edge) {
return edge.w === w;
});
}
}
nodeEdges(v, w) {
const inEdges = this.inEdges(v, w);
if (inEdges) {
return inEdges.concat(this.outEdges(v, w));
}
}
}
exports.Graph = Graph;
function incrementOrInitEntry(map, k) {
if (map[k]) {
map[k]++;
}
else {
map[k] = 1;
}
}
function decrementOrRemoveEntry(map, k) {
if (!--map[k]) {
delete map[k];
}
}
function edgeArgsToId(isDirected, v_, w_, name) {
let v = '' + v_;
let w = '' + w_;
if (!isDirected && v > w) {
const tmp = v;
v = w;
w = tmp;
}
return (v +
EDGE_KEY_DELIM +
w +
EDGE_KEY_DELIM +
(isUndefined(name) ? DEFAULT_EDGE_NAME : name));
}
function edgeArgsToObj(isDirected, v_, w_, name) {
let v = '' + v_;
let w = '' + w_;
if (!isDirected && v > w) {
const tmp = v;
v = w;
w = tmp;
}
const edgeObj = { v: v, w: w };
if (name) {
edgeObj.name = name;
}
return edgeObj;
}
function edgeObjToId(isDirected, edgeObj) {
return edgeArgsToId(isDirected, edgeObj.v, edgeObj.w, edgeObj.name);
}
//# sourceMappingURL=graph.js.map
/***/ }),
/***/ 77947:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.alg = exports.Graph = void 0;
var graph_1 = __webpack_require__(69946);
Object.defineProperty(exports, "Graph", ({ enumerable: true, get: function () { return graph_1.Graph; } }));
const is_acyclic_1 = __webpack_require__(40976);
const postorder_1 = __webpack_require__(45300);
exports.alg = {
isAcyclic: is_acyclic_1.isAcyclic,
postorder: postorder_1.postorder,
};
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 71479:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.legacy = exports.Errors = exports.DepGraphBuilder = exports.createFromJSON = void 0;
var create_from_json_1 = __webpack_require__(6139);
Object.defineProperty(exports, "createFromJSON", ({ enumerable: true, get: function () { return create_from_json_1.createFromJSON; } }));
var builder_1 = __webpack_require__(81608);
Object.defineProperty(exports, "DepGraphBuilder", ({ enumerable: true, get: function () { return builder_1.DepGraphBuilder; } }));
const Errors = __webpack_require__(79651);
exports.Errors = Errors;
const legacy = __webpack_require__(67790);
exports.legacy = legacy;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 83422:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.partitionCycles = exports.getCycle = void 0;
function getCycle(ancestors, nodeId) {
if (!ancestors.includes(nodeId)) {
return null;
}
// first item is where the cycle starts and ends.
return ancestors.slice(ancestors.indexOf(nodeId));
}
exports.getCycle = getCycle;
function partitionCycles(nodeId, allCyclesTheNodeIsPartOf) {
const cyclesStartWithThisNode = [];
const cyclesWithThisNode = [];
for (const cycle of allCyclesTheNodeIsPartOf) {
const nodeStartsCycle = cycle[0] === nodeId;
if (nodeStartsCycle) {
cyclesStartWithThisNode.push(cycle);
}
else {
cyclesWithThisNode.push(cycle);
}
}
return { cyclesStartWithThisNode, cyclesWithThisNode };
}
exports.partitionCycles = partitionCycles;
//# sourceMappingURL=cycles.js.map
/***/ }),
/***/ 67790:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.graphToDepTree = exports.depTreeToGraph = void 0;
const crypto = __webpack_require__(76417);
const event_loop_spinner_1 = __webpack_require__(77158);
const builder_1 = __webpack_require__(81608);
const objectHash = __webpack_require__(49807);
const cycles_1 = __webpack_require__(83422);
const memiozation_1 = __webpack_require__(98155);
function addLabel(dep, key, value) {
if (!dep.labels) {
dep.labels = {};
}
dep.labels[key] = value;
}
/**
* @deprecated Don't use dep trees as an intermediate step, because they are
* large structures, resulting in high memory usage and high CPU costs from
* serializing / deserializing. Instead, create a graph directly with
* {@link DepGraphBuilder}
*/
async function depTreeToGraph(depTree, pkgManagerName) {
const rootPkg = {
name: depTree.name,
version: depTree.version || undefined,
};
const pkgManagerInfo = {
name: pkgManagerName,
};
const targetOS = depTree.targetOS;
if (targetOS) {
pkgManagerInfo.repositories = [
{
alias: `${targetOS.name}:${targetOS.version}`,
},
];
}
const builder = new builder_1.DepGraphBuilder(pkgManagerInfo, rootPkg);
await buildGraph(builder, depTree, depTree.name, true);
const depGraph = await builder.build();
return shortenNodeIds(depGraph);
}
exports.depTreeToGraph = depTreeToGraph;
async function buildGraph(builder, depTree, pkgName, isRoot = false, memoizationMap = new Map()) {
if (memoizationMap.has(depTree)) {
return memoizationMap.get(depTree);
}
const getNodeId = (name, version, hashId) => `${name}@${version || ''}|${hashId}`;
const depNodesIds = [];
const hash = crypto.createHash('sha1');
if (depTree.versionProvenance) {
hash.update(objectHash(depTree.versionProvenance));
}
if (depTree.labels) {
hash.update(objectHash(depTree.labels));
}
const deps = depTree.dependencies || {};
// filter-out invalid null deps (shouldn't happen - but did...)
const depNames = Object.keys(deps).filter((d) => !!deps[d]);
for (const depName of depNames.sort()) {
const dep = deps[depName];
const subtreeHash = await buildGraph(builder, dep, depName, false, memoizationMap);
const depPkg = {
name: depName,
version: dep.version,
};
const depNodeId = getNodeId(depPkg.name, depPkg.version, subtreeHash);
depNodesIds.push(depNodeId);
const nodeInfo = {};
if (dep.versionProvenance) {
nodeInfo.versionProvenance = dep.versionProvenance;
}
if (dep.labels) {
nodeInfo.labels = dep.labels;
}
builder.addPkgNode(depPkg, depNodeId, nodeInfo);
hash.update(depNodeId);
}
const treeHash = hash.digest('hex');
let pkgNodeId;
if (isRoot) {
pkgNodeId = builder.rootNodeId;
}
else {
// we don't assume depTree has a .name to support output of `npm list --json`
const pkg = {
name: pkgName,
version: depTree.version,
};
pkgNodeId = getNodeId(pkg.name, pkg.version, treeHash);
const nodeInfo = {};
if (depTree.versionProvenance) {
nodeInfo.versionProvenance = depTree.versionProvenance;
}
if (depTree.labels) {
nodeInfo.labels = depTree.labels;
}
builder.addPkgNode(pkg, pkgNodeId, nodeInfo);
}
for (const depNodeId of depNodesIds) {
builder.connectDep(pkgNodeId, depNodeId);
}
if (depNodesIds.length > 0 && event_loop_spinner_1.eventLoopSpinner.isStarving()) {
await event_loop_spinner_1.eventLoopSpinner.spin();
}
memoizationMap.set(depTree, treeHash);
return treeHash;
}
async function shortenNodeIds(depGraph) {
const builder = new builder_1.DepGraphBuilder(depGraph.pkgManager, depGraph.rootPkg);
const nodesMap = {};
// create nodes with shorter ids
for (const pkg of depGraph.getPkgs()) {
const nodeIds = depGraph.getPkgNodeIds(pkg);
for (let i = 0; i < nodeIds.length; i++) {
const nodeId = nodeIds[i];
if (nodeId === depGraph.rootNodeId) {
continue;
}
const nodeInfo = depGraph.getNode(nodeId);
let newNodeId;
if (nodeIds.length === 1) {
newNodeId = `${trimAfterLastSep(nodeId, '|')}`;
}
else {
newNodeId = `${trimAfterLastSep(nodeId, '|')}|${i + 1}`;
}
nodesMap[nodeId] = newNodeId;
builder.addPkgNode(pkg, newNodeId, nodeInfo);
}
if (event_loop_spinner_1.eventLoopSpinner.isStarving()) {
await event_loop_spinner_1.eventLoopSpinner.spin();
}
}
// connect nodes
for (const pkg of depGraph.getPkgs()) {
for (const nodeId of depGraph.getPkgNodeIds(pkg)) {
for (const depNodeId of depGraph.getNodeDepsNodeIds(nodeId)) {
const parentNode = nodesMap[nodeId] || nodeId;
const childNode = nodesMap[depNodeId] || depNodeId;
builder.connectDep(parentNode, childNode);
}
}
if (event_loop_spinner_1.eventLoopSpinner.isStarving()) {
await event_loop_spinner_1.eventLoopSpinner.spin();
}
}
return builder.build();
}
/**
* @deprecated Don't use dep trees. You should adapt your code to use graphs,
* and enhance the dep-graph library if there is missing functionality from
* the graph structure
*/
async function graphToDepTree(depGraphInterface, pkgType, opts = { deduplicateWithinTopLevelDeps: false }) {
const depGraph = depGraphInterface;
const [depTree] = await buildSubtree(depGraph, depGraph.rootNodeId, opts.deduplicateWithinTopLevelDeps ? null : false);
depTree.type = depGraph.pkgManager.name;
depTree.packageFormatVersion = constructPackageFormatVersion(pkgType);
const targetOS = constructTargetOS(depGraph);
if (targetOS) {
depTree.targetOS = targetOS;
}
return depTree;
}
exports.graphToDepTree = graphToDepTree;
function constructPackageFormatVersion(pkgType) {
if (pkgType === 'maven') {
pkgType = 'mvn';
}
return `${pkgType}:0.0.1`;
}
function constructTargetOS(depGraph) {
if (['apk', 'apt', 'deb', 'rpm', 'linux'].indexOf(depGraph.pkgManager.name) ===
-1) {
// .targetOS is undefined unless its a linux pkgManager
return;
}
if (!depGraph.pkgManager.repositories ||
!depGraph.pkgManager.repositories.length ||
!depGraph.pkgManager.repositories[0].alias) {
throw new Error('Incomplete .pkgManager, could not create .targetOS');
}
const [name, version] = depGraph.pkgManager.repositories[0].alias.split(':');
return { name, version };
}
async function buildSubtree(depGraph, nodeId, maybeDeduplicationSet = false, // false = disabled; null = not in deduplication scope yet
ancestors = [], memoizationMap = new Map()) {
if (!maybeDeduplicationSet) {
const memoizedDepTree = memiozation_1.getMemoizedDepTree(nodeId, ancestors, memoizationMap);
if (memoizedDepTree) {
return [memoizedDepTree, undefined];
}
}
const isRoot = nodeId === depGraph.rootNodeId;
const nodePkg = depGraph.getNodePkg(nodeId);
const nodeInfo = depGraph.getNode(nodeId);
const depTree = {};
depTree.name = nodePkg.name;
depTree.version = nodePkg.version;
if (nodeInfo.versionProvenance) {
depTree.versionProvenance = nodeInfo.versionProvenance;
}
if (nodeInfo.labels) {
depTree.labels = Object.assign({}, nodeInfo.labels);
}
const depInstanceIds = depGraph.getNodeDepsNodeIds(nodeId);
if (!depInstanceIds || depInstanceIds.length === 0) {
memoizationMap.set(nodeId, { depTree });
return [depTree, undefined];
}
const cycle = cycles_1.getCycle(ancestors, nodeId);
if (cycle) {
// This node starts a cycle and now it's the second visit.
addLabel(depTree, 'pruned', 'cyclic');
return [depTree, [cycle]];
}
if (maybeDeduplicationSet) {
if (maybeDeduplicationSet.has(nodeId)) {
if (depInstanceIds.length > 0) {
addLabel(depTree, 'pruned', 'true');
}
return [depTree, undefined];
}
maybeDeduplicationSet.add(nodeId);
}
const cycles = [];
for (const depInstId of depInstanceIds) {
// Deduplication of nodes occurs only within a scope of a top-level dependency.
// Therefore, every top-level dep gets an independent set to track duplicates.
if (isRoot && maybeDeduplicationSet !== false) {
maybeDeduplicationSet = new Set();
}
const [subtree, subtreeCycles] = await buildSubtree(depGraph, depInstId, maybeDeduplicationSet, ancestors.concat(nodeId), memoizationMap);
if (subtreeCycles) {
for (const cycle of subtreeCycles) {
cycles.push(cycle);
}
}
if (!subtree) {
continue;
}
if (!depTree.dependencies) {
depTree.dependencies = {};
}
depTree.dependencies[subtree.name] = subtree;
}
if (event_loop_spinner_1.eventLoopSpinner.isStarving()) {
await event_loop_spinner_1.eventLoopSpinner.spin();
}
const partitionedCycles = cycles_1.partitionCycles(nodeId, cycles);
memiozation_1.memoize(nodeId, memoizationMap, depTree, partitionedCycles);
return [depTree, partitionedCycles.cyclesWithThisNode];
}
function trimAfterLastSep(str, sep) {
return str.slice(0, str.lastIndexOf(sep));
}
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 98155:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getMemoizedDepTree = exports.memoize = void 0;
function memoize(nodeId, memoizationMap, depTree, partitionedCycles) {
const { cyclesStartWithThisNode, cyclesWithThisNode } = partitionedCycles;
if (cyclesStartWithThisNode.length > 0) {
const cycleNodeIds = new Set(...cyclesStartWithThisNode);
memoizationMap.set(nodeId, { depTree, cycleNodeIds });
}
else if (cyclesWithThisNode.length === 0) {
memoizationMap.set(nodeId, { depTree });
}
// Don't memoize nodes in cycles (cyclesWithThisNode.length > 0)
}
exports.memoize = memoize;
function getMemoizedDepTree(nodeId, ancestors, memoizationMap) {
if (!memoizationMap.has(nodeId))
return null;
const { depTree, cycleNodeIds } = memoizationMap.get(nodeId);
if (!cycleNodeIds)
return depTree;
const ancestorsArePartOfTheCycle = ancestors.some((nodeId) => cycleNodeIds.has(nodeId));
return ancestorsArePartOfTheCycle ? null : depTree;
}
exports.getMemoizedDepTree = getMemoizedDepTree;
//# sourceMappingURL=memiozation.js.map
/***/ }),
/***/ 75616:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
// A linked list to keep track of recently-used-ness
const Yallist = __webpack_require__(74918)
const MAX = Symbol('max')
const LENGTH = Symbol('length')
const LENGTH_CALCULATOR = Symbol('lengthCalculator')
const ALLOW_STALE = Symbol('allowStale')
const MAX_AGE = Symbol('maxAge')
const DISPOSE = Symbol('dispose')
const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')
const LRU_LIST = Symbol('lruList')
const CACHE = Symbol('cache')
const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')
const naiveLength = () => 1
// lruList is a yallist where the head is the youngest
// item, and the tail is the oldest. the list contains the Hit
// objects as the entries.
// Each Hit object has a reference to its Yallist.Node. This
// never changes.
//
// cache is a Map (or PseudoMap) that matches the keys to
// the Yallist.Node object.
class LRUCache {
constructor (options) {
if (typeof options === 'number')
options = { max: options }
if (!options)
options = {}
if (options.max && (typeof options.max !== 'number' || options.max < 0))
throw new TypeError('max must be a non-negative number')
// Kind of weird to have a default max of Infinity, but oh well.
const max = this[MAX] = options.max || Infinity
const lc = options.length || naiveLength
this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc
this[ALLOW_STALE] = options.stale || false
if (options.maxAge && typeof options.maxAge !== 'number')
throw new TypeError('maxAge must be a number')
this[MAX_AGE] = options.maxAge || 0
this[DISPOSE] = options.dispose
this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false
this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false
this.reset()
}
// resize the cache when the max changes.
set max (mL) {
if (typeof mL !== 'number' || mL < 0)
throw new TypeError('max must be a non-negative number')
this[MAX] = mL || Infinity
trim(this)
}
get max () {
return this[MAX]
}
set allowStale (allowStale) {
this[ALLOW_STALE] = !!allowStale
}
get allowStale () {
return this[ALLOW_STALE]
}
set maxAge (mA) {
if (typeof mA !== 'number')
throw new TypeError('maxAge must be a non-negative number')
this[MAX_AGE] = mA
trim(this)
}
get maxAge () {
return this[MAX_AGE]
}
// resize the cache when the lengthCalculator changes.
set lengthCalculator (lC) {
if (typeof lC !== 'function')
lC = naiveLength
if (lC !== this[LENGTH_CALCULATOR]) {
this[LENGTH_CALCULATOR] = lC
this[LENGTH] = 0
this[LRU_LIST].forEach(hit => {
hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)
this[LENGTH] += hit.length
})
}
trim(this)
}
get lengthCalculator () { return this[LENGTH_CALCULATOR] }
get length () { return this[LENGTH] }
get itemCount () { return this[LRU_LIST].length }
rforEach (fn, thisp) {
thisp = thisp || this
for (let walker = this[LRU_LIST].tail; walker !== null;) {
const prev = walker.prev
forEachStep(this, fn, walker, thisp)
walker = prev
}
}
forEach (fn, thisp) {
thisp = thisp || this
for (let walker = this[LRU_LIST].head; walker !== null;) {
const next = walker.next
forEachStep(this, fn, walker, thisp)
walker = next
}
}
keys () {
return this[LRU_LIST].toArray().map(k => k.key)
}
values () {
return this[LRU_LIST].toArray().map(k => k.value)
}
reset () {
if (this[DISPOSE] &&
this[LRU_LIST] &&
this[LRU_LIST].length) {
this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))
}
this[CACHE] = new Map() // hash of items by key
this[LRU_LIST] = new Yallist() // list of items in order of use recency
this[LENGTH] = 0 // length of items in the list
}
dump () {
return this[LRU_LIST].map(hit =>
isStale(this, hit) ? false : {
k: hit.key,
v: hit.value,
e: hit.now + (hit.maxAge || 0)
}).toArray().filter(h => h)
}
dumpLru () {
return this[LRU_LIST]
}
set (key, value, maxAge) {
maxAge = maxAge || this[MAX_AGE]
if (maxAge && typeof maxAge !== 'number')
throw new TypeError('maxAge must be a number')
const now = maxAge ? Date.now() : 0
const len = this[LENGTH_CALCULATOR](value, key)
if (this[CACHE].has(key)) {
if (len > this[MAX]) {
del(this, this[CACHE].get(key))
return false
}
const node = this[CACHE].get(key)
const item = node.value
// dispose of the old one before overwriting
// split out into 2 ifs for better coverage tracking
if (this[DISPOSE]) {
if (!this[NO_DISPOSE_ON_SET])
this[DISPOSE](key, item.value)
}
item.now = now
item.maxAge = maxAge
item.value = value
this[LENGTH] += len - item.length
item.length = len
this.get(key)
trim(this)
return true
}
const hit = new Entry(key, value, len, now, maxAge)
// oversized objects fall out of cache automatically.
if (hit.length > this[MAX]) {
if (this[DISPOSE])
this[DISPOSE](key, value)
return false
}
this[LENGTH] += hit.length
this[LRU_LIST].unshift(hit)
this[CACHE].set(key, this[LRU_LIST].head)
trim(this)
return true
}
has (key) {
if (!this[CACHE].has(key)) return false
const hit = this[CACHE].get(key).value
return !isStale(this, hit)
}
get (key) {
return get(this, key, true)
}
peek (key) {
return get(this, key, false)
}
pop () {
const node = this[LRU_LIST].tail
if (!node)
return null
del(this, node)
return node.value
}
del (key) {
del(this, this[CACHE].get(key))
}
load (arr) {
// reset the cache
this.reset()
const now = Date.now()
// A previous serialized cache has the most recent items first
for (let l = arr.length - 1; l >= 0; l--) {
const hit = arr[l]
const expiresAt = hit.e || 0
if (expiresAt === 0)
// the item was created without expiration in a non aged cache
this.set(hit.k, hit.v)
else {
const maxAge = expiresAt - now
// dont add already expired items
if (maxAge > 0) {
this.set(hit.k, hit.v, maxAge)
}
}
}
}
prune () {
this[CACHE].forEach((value, key) => get(this, key, false))
}
}
const get = (self, key, doUse) => {
const node = self[CACHE].get(key)
if (node) {
const hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!self[ALLOW_STALE])
return undefined
} else {
if (doUse) {
if (self[UPDATE_AGE_ON_GET])
node.value.now = Date.now()
self[LRU_LIST].unshiftNode(node)
}
}
return hit.value
}
}
const isStale = (self, hit) => {
if (!hit || (!hit.maxAge && !self[MAX_AGE]))
return false
const diff = Date.now() - hit.now
return hit.maxAge ? diff > hit.maxAge
: self[MAX_AGE] && (diff > self[MAX_AGE])
}
const trim = self => {
if (self[LENGTH] > self[MAX]) {
for (let walker = self[LRU_LIST].tail;
self[LENGTH] > self[MAX] && walker !== null;) {
// We know that we're about to delete this one, and also
// what the next least recently used key will be, so just
// go ahead and set it now.
const prev = walker.prev
del(self, walker)
walker = prev
}
}
}
const del = (self, node) => {
if (node) {
const hit = node.value
if (self[DISPOSE])
self[DISPOSE](hit.key, hit.value)
self[LENGTH] -= hit.length
self[CACHE].delete(hit.key)
self[LRU_LIST].removeNode(node)
}
}
class Entry {
constructor (key, value, length, now, maxAge) {
this.key = key
this.value = value
this.length = length
this.now = now
this.maxAge = maxAge || 0
}
}
const forEachStep = (self, fn, node, thisp) => {
let hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!self[ALLOW_STALE])
hit = undefined
}
if (hit)
fn.call(thisp, hit.value, hit.key, self)
}
module.exports = LRUCache
/***/ }),
/***/ 95622:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const ANY = Symbol('SemVer ANY')
// hoisted class for cyclic dependency
class Comparator {
static get ANY () {
return ANY
}
constructor (comp, options) {
options = parseOptions(options)
if (comp instanceof Comparator) {
if (comp.loose === !!options.loose) {
return comp
} else {
comp = comp.value
}
}
debug('comparator', comp, options)
this.options = options
this.loose = !!options.loose
this.parse(comp)
if (this.semver === ANY) {
this.value = ''
} else {
this.value = this.operator + this.semver.version
}
debug('comp', this)
}
parse (comp) {
const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
const m = comp.match(r)
if (!m) {
throw new TypeError(`Invalid comparator: ${comp}`)
}
this.operator = m[1] !== undefined ? m[1] : ''
if (this.operator === '=') {
this.operator = ''
}
// if it literally is just '>' or '' then allow anything.
if (!m[2]) {
this.semver = ANY
} else {
this.semver = new SemVer(m[2], this.options.loose)
}
}
toString () {
return this.value
}
test (version) {
debug('Comparator.test', version, this.options.loose)
if (this.semver === ANY || version === ANY) {
return true
}
if (typeof version === 'string') {
try {
version = new SemVer(version, this.options)
} catch (er) {
return false
}
}
return cmp(version, this.operator, this.semver, this.options)
}
intersects (comp, options) {
if (!(comp instanceof Comparator)) {
throw new TypeError('a Comparator is required')
}
if (!options || typeof options !== 'object') {
options = {
loose: !!options,
includePrerelease: false
}
}
if (this.operator === '') {
if (this.value === '') {
return true
}
return new Range(comp.value, options).test(this.value)
} else if (comp.operator === '') {
if (comp.value === '') {
return true
}
return new Range(this.value, options).test(comp.semver)
}
const sameDirectionIncreasing =
(this.operator === '>=' || this.operator === '>') &&
(comp.operator === '>=' || comp.operator === '>')
const sameDirectionDecreasing =
(this.operator === '<=' || this.operator === '<') &&
(comp.operator === '<=' || comp.operator === '<')
const sameSemVer = this.semver.version === comp.semver.version
const differentDirectionsInclusive =
(this.operator === '>=' || this.operator === '<=') &&
(comp.operator === '>=' || comp.operator === '<=')
const oppositeDirectionsLessThan =
cmp(this.semver, '<', comp.semver, options) &&
(this.operator === '>=' || this.operator === '>') &&
(comp.operator === '<=' || comp.operator === '<')
const oppositeDirectionsGreaterThan =
cmp(this.semver, '>', comp.semver, options) &&
(this.operator === '<=' || this.operator === '<') &&
(comp.operator === '>=' || comp.operator === '>')
return (
sameDirectionIncreasing ||
sameDirectionDecreasing ||
(sameSemVer && differentDirectionsInclusive) ||
oppositeDirectionsLessThan ||
oppositeDirectionsGreaterThan
)
}
}
module.exports = Comparator
const parseOptions = __webpack_require__(23856)
const {re, t} = __webpack_require__(90996)
const cmp = __webpack_require__(35637)
const debug = __webpack_require__(75236)
const SemVer = __webpack_require__(81538)
const Range = __webpack_require__(3577)
/***/ }),
/***/ 3577:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
// hoisted class for cyclic dependency
class Range {
constructor (range, options) {
options = parseOptions(options)
if (range instanceof Range) {
if (
range.loose === !!options.loose &&
range.includePrerelease === !!options.includePrerelease
) {
return range
} else {
return new Range(range.raw, options)
}
}
if (range instanceof Comparator) {
// just put it in the set and return
this.raw = range.value
this.set = [[range]]
this.format()
return this
}
this.options = options
this.loose = !!options.loose
this.includePrerelease = !!options.includePrerelease
// First, split based on boolean or ||
this.raw = range
this.set = range
.split(/\s*\|\|\s*/)
// map the range to a 2d array of comparators
.map(range => this.parseRange(range.trim()))
// throw out any comparator lists that are empty
// this generally means that it was not a valid range, which is allowed
// in loose mode, but will still throw if the WHOLE range is invalid.
.filter(c => c.length)
if (!this.set.length) {
throw new TypeError(`Invalid SemVer Range: ${range}`)
}
// if we have any that are not the null set, throw out null sets.
if (this.set.length > 1) {
// keep the first one, in case they're all null sets
const first = this.set[0]
this.set = this.set.filter(c => !isNullSet(c[0]))
if (this.set.length === 0)
this.set = [first]
else if (this.set.length > 1) {
// if we have any that are *, then the range is just *
for (const c of this.set) {
if (c.length === 1 && isAny(c[0])) {
this.set = [c]
break
}
}
}
}
this.format()
}
format () {
this.range = this.set
.map((comps) => {
return comps.join(' ').trim()
})
.join('||')
.trim()
return this.range
}
toString () {
return this.range
}
parseRange (range) {
range = range.trim()
// memoize range parsing for performance.
// this is a very hot path, and fully deterministic.
const memoOpts = Object.keys(this.options).join(',')
const memoKey = `parseRange:${memoOpts}:${range}`
const cached = cache.get(memoKey)
if (cached)
return cached
const loose = this.options.loose
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace(this.options.includePrerelease))
debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range, re[t.COMPARATORTRIM])
// `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
// `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
// normalize spaces
range = range.split(/\s+/).join(' ')
// At this point, the range is completely trimmed and
// ready to be split into comparators.
const compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
const rangeList = range
.split(' ')
.map(comp => parseComparator(comp, this.options))
.join(' ')
.split(/\s+/)
// >=0.0.0 is equivalent to *
.map(comp => replaceGTE0(comp, this.options))
// in loose mode, throw out any that are not valid comparators
.filter(this.options.loose ? comp => !!comp.match(compRe) : () => true)
.map(comp => new Comparator(comp, this.options))
// if any comparators are the null set, then replace with JUST null set
// if more than one comparator, remove any * comparators
// also, don't include the same comparator more than once
const l = rangeList.length
const rangeMap = new Map()
for (const comp of rangeList) {
if (isNullSet(comp))
return [comp]
rangeMap.set(comp.value, comp)
}
if (rangeMap.size > 1 && rangeMap.has(''))
rangeMap.delete('')
const result = [...rangeMap.values()]
cache.set(memoKey, result)
return result
}
intersects (range, options) {
if (!(range instanceof Range)) {
throw new TypeError('a Range is required')
}
return this.set.some((thisComparators) => {
return (
isSatisfiable(thisComparators, options) &&
range.set.some((rangeComparators) => {
return (
isSatisfiable(rangeComparators, options) &&
thisComparators.every((thisComparator) => {
return rangeComparators.every((rangeComparator) => {
return thisComparator.intersects(rangeComparator, options)
})
})
)
})
)
})
}
// if ANY of the sets match ALL of its comparators, then pass
test (version) {
if (!version) {
return false
}
if (typeof version === 'string') {
try {
version = new SemVer(version, this.options)
} catch (er) {
return false
}
}
for (let i = 0; i < this.set.length; i++) {
if (testSet(this.set[i], version, this.options)) {
return true
}
}
return false
}
}
module.exports = Range
const LRU = __webpack_require__(75616)
const cache = new LRU({ max: 1000 })
const parseOptions = __webpack_require__(23856)
const Comparator = __webpack_require__(95622)
const debug = __webpack_require__(75236)
const SemVer = __webpack_require__(81538)
const {
re,
t,
comparatorTrimReplace,
tildeTrimReplace,
caretTrimReplace
} = __webpack_require__(90996)
const isNullSet = c => c.value === '<0.0.0-0'
const isAny = c => c.value === ''
// take a set of comparators and determine whether there
// exists a version which can satisfy it
const isSatisfiable = (comparators, options) => {
let result = true
const remainingComparators = comparators.slice()
let testComparator = remainingComparators.pop()
while (result && remainingComparators.length) {
result = remainingComparators.every((otherComparator) => {
return testComparator.intersects(otherComparator, options)
})
testComparator = remainingComparators.pop()
}
return result
}
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
const parseComparator = (comp, options) => {
debug('comp', comp, options)
comp = replaceCarets(comp, options)
debug('caret', comp)
comp = replaceTildes(comp, options)
debug('tildes', comp)
comp = replaceXRanges(comp, options)
debug('xrange', comp)
comp = replaceStars(comp, options)
debug('stars', comp)
return comp
}
const isX = id => !id || id.toLowerCase() === 'x' || id === '*'
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0
const replaceTildes = (comp, options) =>
comp.trim().split(/\s+/).map((comp) => {
return replaceTilde(comp, options)
}).join(' ')
const replaceTilde = (comp, options) => {
const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
return comp.replace(r, (_, M, m, p, pr) => {
debug('tilde', comp, _, M, m, p, pr)
let ret
if (isX(M)) {
ret = ''
} else if (isX(m)) {
ret = `>=${M}.0.0 <${+M + 1}.0.0-0`
} else if (isX(p)) {
// ~1.2 == >=1.2.0 <1.3.0-0
ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0`
} else if (pr) {
debug('replaceTilde pr', pr)
ret = `>=${M}.${m}.${p}-${pr
} <${M}.${+m + 1}.0-0`
} else {
// ~1.2.3 == >=1.2.3 <1.3.0-0
ret = `>=${M}.${m}.${p
} <${M}.${+m + 1}.0-0`
}
debug('tilde return', ret)
return ret
})
}
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0
// ^1.2.3 --> >=1.2.3 <2.0.0-0
// ^1.2.0 --> >=1.2.0 <2.0.0-0
const replaceCarets = (comp, options) =>
comp.trim().split(/\s+/).map((comp) => {
return replaceCaret(comp, options)
}).join(' ')
const replaceCaret = (comp, options) => {
debug('caret', comp, options)
const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
const z = options.includePrerelease ? '-0' : ''
return comp.replace(r, (_, M, m, p, pr) => {
debug('caret', comp, _, M, m, p, pr)
let ret
if (isX(M)) {
ret = ''
} else if (isX(m)) {
ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0`
} else if (isX(p)) {
if (M === '0') {
ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0`
} else {
ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0`
}
} else if (pr) {
debug('replaceCaret pr', pr)
if (M === '0') {
if (m === '0') {
ret = `>=${M}.${m}.${p}-${pr
} <${M}.${m}.${+p + 1}-0`
} else {
ret = `>=${M}.${m}.${p}-${pr
} <${M}.${+m + 1}.0-0`
}
} else {
ret = `>=${M}.${m}.${p}-${pr
} <${+M + 1}.0.0-0`
}
} else {
debug('no pr')
if (M === '0') {
if (m === '0') {
ret = `>=${M}.${m}.${p
}${z} <${M}.${m}.${+p + 1}-0`
} else {
ret = `>=${M}.${m}.${p
}${z} <${M}.${+m + 1}.0-0`
}
} else {
ret = `>=${M}.${m}.${p
} <${+M + 1}.0.0-0`
}
}
debug('caret return', ret)
return ret
})
}
const replaceXRanges = (comp, options) => {
debug('replaceXRanges', comp, options)
return comp.split(/\s+/).map((comp) => {
return replaceXRange(comp, options)
}).join(' ')
}
const replaceXRange = (comp, options) => {
comp = comp.trim()
const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
return comp.replace(r, (ret, gtlt, M, m, p, pr) => {
debug('xRange', comp, ret, gtlt, M, m, p, pr)
const xM = isX(M)
const xm = xM || isX(m)
const xp = xm || isX(p)
const anyX = xp
if (gtlt === '=' && anyX) {
gtlt = ''
}
// if we're including prereleases in the match, then we need
// to fix this to -0, the lowest possible prerelease value
pr = options.includePrerelease ? '-0' : ''
if (xM) {
if (gtlt === '>' || gtlt === '<') {
// nothing is allowed
ret = '<0.0.0-0'
} else {
// nothing is forbidden
ret = '*'
}
} else if (gtlt && anyX) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if (xm) {
m = 0
}
p = 0
if (gtlt === '>') {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
gtlt = '>='
if (xm) {
M = +M + 1
m = 0
p = 0
} else {
m = +m + 1
p = 0
}
} else if (gtlt === '<=') {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if (xm) {
M = +M + 1
} else {
m = +m + 1
}
}
if (gtlt === '<')
pr = '-0'
ret = `${gtlt + M}.${m}.${p}${pr}`
} else if (xm) {
ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0`
} else if (xp) {
ret = `>=${M}.${m}.0${pr
} <${M}.${+m + 1}.0-0`
}
debug('xRange return', ret)
return ret
})
}
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
const replaceStars = (comp, options) => {
debug('replaceStars', comp, options)
// Looseness is ignored here. star is always as loose as it gets!
return comp.trim().replace(re[t.STAR], '')
}
const replaceGTE0 = (comp, options) => {
debug('replaceGTE0', comp, options)
return comp.trim()
.replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '')
}
// This function is passed to string.replace(re[t.HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0-0
const hyphenReplace = incPr => ($0,
from, fM, fm, fp, fpr, fb,
to, tM, tm, tp, tpr, tb) => {
if (isX(fM)) {
from = ''
} else if (isX(fm)) {
from = `>=${fM}.0.0${incPr ? '-0' : ''}`
} else if (isX(fp)) {
from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}`
} else if (fpr) {
from = `>=${from}`
} else {
from = `>=${from}${incPr ? '-0' : ''}`
}
if (isX(tM)) {
to = ''
} else if (isX(tm)) {
to = `<${+tM + 1}.0.0-0`
} else if (isX(tp)) {
to = `<${tM}.${+tm + 1}.0-0`
} else if (tpr) {
to = `<=${tM}.${tm}.${tp}-${tpr}`
} else if (incPr) {
to = `<${tM}.${tm}.${+tp + 1}-0`
} else {
to = `<=${to}`
}
return (`${from} ${to}`).trim()
}
const testSet = (set, version, options) => {
for (let i = 0; i < set.length; i++) {
if (!set[i].test(version)) {
return false
}
}
if (version.prerelease.length && !options.includePrerelease) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for (let i = 0; i < set.length; i++) {
debug(set[i].semver)
if (set[i].semver === Comparator.ANY) {
continue
}
if (set[i].semver.prerelease.length > 0) {
const allowed = set[i].semver
if (allowed.major === version.major &&
allowed.minor === version.minor &&
allowed.patch === version.patch) {
return true
}
}
}
// Version has a -pre, but it's not one of the ones we like.
return false
}
return true
}
/***/ }),
/***/ 81538:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const debug = __webpack_require__(75236)
const { MAX_LENGTH, MAX_SAFE_INTEGER } = __webpack_require__(14031)
const { re, t } = __webpack_require__(90996)
const parseOptions = __webpack_require__(23856)
const { compareIdentifiers } = __webpack_require__(6857)
class SemVer {
constructor (version, options) {
options = parseOptions(options)
if (version instanceof SemVer) {
if (version.loose === !!options.loose &&
version.includePrerelease === !!options.includePrerelease) {
return version
} else {
version = version.version
}
} else if (typeof version !== 'string') {
throw new TypeError(`Invalid Version: ${version}`)
}
if (version.length > MAX_LENGTH) {
throw new TypeError(
`version is longer than ${MAX_LENGTH} characters`
)
}
debug('SemVer', version, options)
this.options = options
this.loose = !!options.loose
// this isn't actually relevant for versions, but keep it so that we
// don't run into trouble passing this.options around.
this.includePrerelease = !!options.includePrerelease
const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
if (!m) {
throw new TypeError(`Invalid Version: ${version}`)
}
this.raw = version
// these are actually numbers
this.major = +m[1]
this.minor = +m[2]
this.patch = +m[3]
if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
throw new TypeError('Invalid major version')
}
if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
throw new TypeError('Invalid minor version')
}
if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
throw new TypeError('Invalid patch version')
}
// numberify any prerelease numeric ids
if (!m[4]) {
this.prerelease = []
} else {
this.prerelease = m[4].split('.').map((id) => {
if (/^[0-9]+$/.test(id)) {
const num = +id
if (num >= 0 && num < MAX_SAFE_INTEGER) {
return num
}
}
return id
})
}
this.build = m[5] ? m[5].split('.') : []
this.format()
}
format () {
this.version = `${this.major}.${this.minor}.${this.patch}`
if (this.prerelease.length) {
this.version += `-${this.prerelease.join('.')}`
}
return this.version
}
toString () {
return this.version
}
compare (other) {
debug('SemVer.compare', this.version, this.options, other)
if (!(other instanceof SemVer)) {
if (typeof other === 'string' && other === this.version) {
return 0
}
other = new SemVer(other, this.options)
}
if (other.version === this.version) {
return 0
}
return this.compareMain(other) || this.comparePre(other)
}
compareMain (other) {
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
return (
compareIdentifiers(this.major, other.major) ||
compareIdentifiers(this.minor, other.minor) ||
compareIdentifiers(this.patch, other.patch)
)
}
comparePre (other) {
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
// NOT having a prerelease is > having one
if (this.prerelease.length && !other.prerelease.length) {
return -1
} else if (!this.prerelease.length && other.prerelease.length) {
return 1
} else if (!this.prerelease.length && !other.prerelease.length) {
return 0
}
let i = 0
do {
const a = this.prerelease[i]
const b = other.prerelease[i]
debug('prerelease compare', i, a, b)
if (a === undefined && b === undefined) {
return 0
} else if (b === undefined) {
return 1
} else if (a === undefined) {
return -1
} else if (a === b) {
continue
} else {
return compareIdentifiers(a, b)
}
} while (++i)
}
compareBuild (other) {
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
let i = 0
do {
const a = this.build[i]
const b = other.build[i]
debug('prerelease compare', i, a, b)
if (a === undefined && b === undefined) {
return 0
} else if (b === undefined) {
return 1
} else if (a === undefined) {
return -1
} else if (a === b) {
continue
} else {
return compareIdentifiers(a, b)
}
} while (++i)
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc (release, identifier) {
switch (release) {
case 'premajor':
this.prerelease.length = 0
this.patch = 0
this.minor = 0
this.major++
this.inc('pre', identifier)
break
case 'preminor':
this.prerelease.length = 0
this.patch = 0
this.minor++
this.inc('pre', identifier)
break
case 'prepatch':
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this.prerelease.length = 0
this.inc('patch', identifier)
this.inc('pre', identifier)
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease':
if (this.prerelease.length === 0) {
this.inc('patch', identifier)
}
this.inc('pre', identifier)
break
case 'major':
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if (
this.minor !== 0 ||
this.patch !== 0 ||
this.prerelease.length === 0
) {
this.major++
}
this.minor = 0
this.patch = 0
this.prerelease = []
break
case 'minor':
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if (this.patch !== 0 || this.prerelease.length === 0) {
this.minor++
}
this.patch = 0
this.prerelease = []
break
case 'patch':
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if (this.prerelease.length === 0) {
this.patch++
}
this.prerelease = []
break
// This probably shouldn't be used publicly.
// 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction.
case 'pre':
if (this.prerelease.length === 0) {
this.prerelease = [0]
} else {
let i = this.prerelease.length
while (--i >= 0) {
if (typeof this.prerelease[i] === 'number') {
this.prerelease[i]++
i = -2
}
}
if (i === -1) {
// didn't increment anything
this.prerelease.push(0)
}
}
if (identifier) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
if (this.prerelease[0] === identifier) {
if (isNaN(this.prerelease[1])) {
this.prerelease = [identifier, 0]
}
} else {
this.prerelease = [identifier, 0]
}
}
break
default:
throw new Error(`invalid increment argument: ${release}`)
}
this.format()
this.raw = this.version
return this
}
}
module.exports = SemVer
/***/ }),
/***/ 65105:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const parse = __webpack_require__(83896)
const clean = (version, options) => {
const s = parse(version.trim().replace(/^[=v]+/, ''), options)
return s ? s.version : null
}
module.exports = clean
/***/ }),
/***/ 35637:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const eq = __webpack_require__(28179)
const neq = __webpack_require__(83568)
const gt = __webpack_require__(78203)
const gte = __webpack_require__(66658)
const lt = __webpack_require__(1926)
const lte = __webpack_require__(46266)
const cmp = (a, op, b, loose) => {
switch (op) {
case '===':
if (typeof a === 'object')
a = a.version
if (typeof b === 'object')
b = b.version
return a === b
case '!==':
if (typeof a === 'object')
a = a.version
if (typeof b === 'object')
b = b.version
return a !== b
case '':
case '=':
case '==':
return eq(a, b, loose)
case '!=':
return neq(a, b, loose)
case '>':
return gt(a, b, loose)
case '>=':
return gte(a, b, loose)
case '<':
return lt(a, b, loose)
case '<=':
return lte(a, b, loose)
default:
throw new TypeError(`Invalid operator: ${op}`)
}
}
module.exports = cmp
/***/ }),
/***/ 12901:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const parse = __webpack_require__(83896)
const {re, t} = __webpack_require__(90996)
const coerce = (version, options) => {
if (version instanceof SemVer) {
return version
}
if (typeof version === 'number') {
version = String(version)
}
if (typeof version !== 'string') {
return null
}
options = options || {}
let match = null
if (!options.rtl) {
match = version.match(re[t.COERCE])
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
let next
while ((next = re[t.COERCERTL].exec(version)) &&
(!match || match.index + match[0].length !== version.length)
) {
if (!match ||
next.index + next[0].length !== match.index + match[0].length) {
match = next
}
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
}
// leave it in a clean state
re[t.COERCERTL].lastIndex = -1
}
if (match === null)
return null
return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options)
}
module.exports = coerce
/***/ }),
/***/ 48372:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const compareBuild = (a, b, loose) => {
const versionA = new SemVer(a, loose)
const versionB = new SemVer(b, loose)
return versionA.compare(versionB) || versionA.compareBuild(versionB)
}
module.exports = compareBuild
/***/ }),
/***/ 39309:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compare = __webpack_require__(62742)
const compareLoose = (a, b) => compare(a, b, true)
module.exports = compareLoose
/***/ }),
/***/ 62742:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const compare = (a, b, loose) =>
new SemVer(a, loose).compare(new SemVer(b, loose))
module.exports = compare
/***/ }),
/***/ 3976:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const parse = __webpack_require__(83896)
const eq = __webpack_require__(28179)
const diff = (version1, version2) => {
if (eq(version1, version2)) {
return null
} else {
const v1 = parse(version1)
const v2 = parse(version2)
const hasPre = v1.prerelease.length || v2.prerelease.length
const prefix = hasPre ? 'pre' : ''
const defaultResult = hasPre ? 'prerelease' : ''
for (const key in v1) {
if (key === 'major' || key === 'minor' || key === 'patch') {
if (v1[key] !== v2[key]) {
return prefix + key
}
}
}
return defaultResult // may be undefined
}
}
module.exports = diff
/***/ }),
/***/ 28179:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compare = __webpack_require__(62742)
const eq = (a, b, loose) => compare(a, b, loose) === 0
module.exports = eq
/***/ }),
/***/ 78203:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compare = __webpack_require__(62742)
const gt = (a, b, loose) => compare(a, b, loose) > 0
module.exports = gt
/***/ }),
/***/ 66658:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compare = __webpack_require__(62742)
const gte = (a, b, loose) => compare(a, b, loose) >= 0
module.exports = gte
/***/ }),
/***/ 10948:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const inc = (version, release, options, identifier) => {
if (typeof (options) === 'string') {
identifier = options
options = undefined
}
try {
return new SemVer(version, options).inc(release, identifier).version
} catch (er) {
return null
}
}
module.exports = inc
/***/ }),
/***/ 1926:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compare = __webpack_require__(62742)
const lt = (a, b, loose) => compare(a, b, loose) < 0
module.exports = lt
/***/ }),
/***/ 46266:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compare = __webpack_require__(62742)
const lte = (a, b, loose) => compare(a, b, loose) <= 0
module.exports = lte
/***/ }),
/***/ 71188:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const major = (a, loose) => new SemVer(a, loose).major
module.exports = major
/***/ }),
/***/ 90961:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const minor = (a, loose) => new SemVer(a, loose).minor
module.exports = minor
/***/ }),
/***/ 83568:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compare = __webpack_require__(62742)
const neq = (a, b, loose) => compare(a, b, loose) !== 0
module.exports = neq
/***/ }),
/***/ 83896:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const {MAX_LENGTH} = __webpack_require__(14031)
const { re, t } = __webpack_require__(90996)
const SemVer = __webpack_require__(81538)
const parseOptions = __webpack_require__(23856)
const parse = (version, options) => {
options = parseOptions(options)
if (version instanceof SemVer) {
return version
}
if (typeof version !== 'string') {
return null
}
if (version.length > MAX_LENGTH) {
return null
}
const r = options.loose ? re[t.LOOSE] : re[t.FULL]
if (!r.test(version)) {
return null
}
try {
return new SemVer(version, options)
} catch (er) {
return null
}
}
module.exports = parse
/***/ }),
/***/ 22793:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const patch = (a, loose) => new SemVer(a, loose).patch
module.exports = patch
/***/ }),
/***/ 58258:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const parse = __webpack_require__(83896)
const prerelease = (version, options) => {
const parsed = parse(version, options)
return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
}
module.exports = prerelease
/***/ }),
/***/ 78822:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compare = __webpack_require__(62742)
const rcompare = (a, b, loose) => compare(b, a, loose)
module.exports = rcompare
/***/ }),
/***/ 13668:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compareBuild = __webpack_require__(48372)
const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose))
module.exports = rsort
/***/ }),
/***/ 71344:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const Range = __webpack_require__(3577)
const satisfies = (version, range, options) => {
try {
range = new Range(range, options)
} catch (er) {
return false
}
return range.test(version)
}
module.exports = satisfies
/***/ }),
/***/ 71927:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const compareBuild = __webpack_require__(48372)
const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose))
module.exports = sort
/***/ }),
/***/ 65202:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const parse = __webpack_require__(83896)
const valid = (version, options) => {
const v = parse(version, options)
return v ? v.version : null
}
module.exports = valid
/***/ }),
/***/ 21663:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
// just pre-load all the stuff that index.js lazily exports
const internalRe = __webpack_require__(90996)
module.exports = {
re: internalRe.re,
src: internalRe.src,
tokens: internalRe.t,
SEMVER_SPEC_VERSION: __webpack_require__(14031).SEMVER_SPEC_VERSION,
SemVer: __webpack_require__(81538),
compareIdentifiers: __webpack_require__(6857).compareIdentifiers,
rcompareIdentifiers: __webpack_require__(6857).rcompareIdentifiers,
parse: __webpack_require__(83896),
valid: __webpack_require__(65202),
clean: __webpack_require__(65105),
inc: __webpack_require__(10948),
diff: __webpack_require__(3976),
major: __webpack_require__(71188),
minor: __webpack_require__(90961),
patch: __webpack_require__(22793),
prerelease: __webpack_require__(58258),
compare: __webpack_require__(62742),
rcompare: __webpack_require__(78822),
compareLoose: __webpack_require__(39309),
compareBuild: __webpack_require__(48372),
sort: __webpack_require__(71927),
rsort: __webpack_require__(13668),
gt: __webpack_require__(78203),
lt: __webpack_require__(1926),
eq: __webpack_require__(28179),
neq: __webpack_require__(83568),
gte: __webpack_require__(66658),
lte: __webpack_require__(46266),
cmp: __webpack_require__(35637),
coerce: __webpack_require__(12901),
Comparator: __webpack_require__(95622),
Range: __webpack_require__(3577),
satisfies: __webpack_require__(71344),
toComparators: __webpack_require__(56780),
maxSatisfying: __webpack_require__(44808),
minSatisfying: __webpack_require__(40776),
minVersion: __webpack_require__(78544),
validRange: __webpack_require__(37709),
outside: __webpack_require__(92054),
gtr: __webpack_require__(9891),
ltr: __webpack_require__(8100),
intersects: __webpack_require__(55128),
simplifyRange: __webpack_require__(2545),
subset: __webpack_require__(32675),
}
/***/ }),
/***/ 14031:
/***/ ((module) => {
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
const SEMVER_SPEC_VERSION = '2.0.0'
const MAX_LENGTH = 256
const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
/* istanbul ignore next */ 9007199254740991
// Max safe segment length for coercion.
const MAX_SAFE_COMPONENT_LENGTH = 16
module.exports = {
SEMVER_SPEC_VERSION,
MAX_LENGTH,
MAX_SAFE_INTEGER,
MAX_SAFE_COMPONENT_LENGTH
}
/***/ }),
/***/ 75236:
/***/ ((module) => {
const debug = (
typeof process === 'object' &&
process.env &&
process.env.NODE_DEBUG &&
/\bsemver\b/i.test(process.env.NODE_DEBUG)
) ? (...args) => console.error('SEMVER', ...args)
: () => {}
module.exports = debug
/***/ }),
/***/ 6857:
/***/ ((module) => {
const numeric = /^[0-9]+$/
const compareIdentifiers = (a, b) => {
const anum = numeric.test(a)
const bnum = numeric.test(b)
if (anum && bnum) {
a = +a
b = +b
}
return a === b ? 0
: (anum && !bnum) ? -1
: (bnum && !anum) ? 1
: a < b ? -1
: 1
}
const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a)
module.exports = {
compareIdentifiers,
rcompareIdentifiers
}
/***/ }),
/***/ 23856:
/***/ ((module) => {
// parse out just the options we care about so we always get a consistent
// obj with keys in a consistent order.
const opts = ['includePrerelease', 'loose', 'rtl']
const parseOptions = options =>
!options ? {}
: typeof options !== 'object' ? { loose: true }
: opts.filter(k => options[k]).reduce((options, k) => {
options[k] = true
return options
}, {})
module.exports = parseOptions
/***/ }),
/***/ 90996:
/***/ ((module, exports, __webpack_require__) => {
const { MAX_SAFE_COMPONENT_LENGTH } = __webpack_require__(14031)
const debug = __webpack_require__(75236)
exports = module.exports = {}
// The actual regexps go on exports.re
const re = exports.re = []
const src = exports.src = []
const t = exports.t = {}
let R = 0
const createToken = (name, value, isGlobal) => {
const index = R++
debug(index, value)
t[name] = index
src[index] = value
re[index] = new RegExp(value, isGlobal ? 'g' : undefined)
}
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*')
createToken('NUMERICIDENTIFIERLOOSE', '[0-9]+')
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
createToken('NONNUMERICIDENTIFIER', '\\d*[a-zA-Z-][a-zA-Z0-9-]*')
// ## Main Version
// Three dot-separated numeric identifiers.
createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` +
`(${src[t.NUMERICIDENTIFIER]})\\.` +
`(${src[t.NUMERICIDENTIFIER]})`)
createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` +
`(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` +
`(${src[t.NUMERICIDENTIFIERLOOSE]})`)
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER]
}|${src[t.NONNUMERICIDENTIFIER]})`)
createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE]
}|${src[t.NONNUMERICIDENTIFIER]})`)
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER]
}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`)
createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE]
}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`)
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
createToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+')
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER]
}(?:\\.${src[t.BUILDIDENTIFIER]})*))`)
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
createToken('FULLPLAIN', `v?${src[t.MAINVERSION]
}${src[t.PRERELEASE]}?${
src[t.BUILD]}?`)
createToken('FULL', `^${src[t.FULLPLAIN]}$`)
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE]
}${src[t.PRERELEASELOOSE]}?${
src[t.BUILD]}?`)
createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`)
createToken('GTLT', '((?:<|>)?=?)')
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`)
createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`)
createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` +
`(?:\\.(${src[t.XRANGEIDENTIFIER]})` +
`(?:\\.(${src[t.XRANGEIDENTIFIER]})` +
`(?:${src[t.PRERELEASE]})?${
src[t.BUILD]}?` +
`)?)?`)
createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` +
`(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` +
`(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` +
`(?:${src[t.PRERELEASELOOSE]})?${
src[t.BUILD]}?` +
`)?)?`)
createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`)
createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
createToken('COERCE', `${'(^|[^\\d])' +
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:$|[^\\d])`)
createToken('COERCERTL', src[t.COERCE], true)
// Tilde ranges.
// Meaning is "reasonably at or greater than"
createToken('LONETILDE', '(?:~>?)')
createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true)
exports.tildeTrimReplace = '$1~'
createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`)
createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`)
// Caret ranges.
// Meaning is "at least and backwards compatible with"
createToken('LONECARET', '(?:\\^)')
createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true)
exports.caretTrimReplace = '$1^'
createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`)
createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`)
// A simple gt/lt/eq thing, or just "" to indicate "any version"
createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`)
createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`)
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT]
}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true)
exports.comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` +
`\\s+-\\s+` +
`(${src[t.XRANGEPLAIN]})` +
`\\s*$`)
createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` +
`\\s+-\\s+` +
`(${src[t.XRANGEPLAINLOOSE]})` +
`\\s*$`)
// Star ranges basically just allow anything at all.
createToken('STAR', '(<|>)?=?\\s*\\*')
// >=0.0.0 is like a star
createToken('GTE0', '^\\s*>=\\s*0\.0\.0\\s*$')
createToken('GTE0PRE', '^\\s*>=\\s*0\.0\.0-0\\s*$')
/***/ }),
/***/ 9891:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
// Determine if version is greater than all the versions possible in the range.
const outside = __webpack_require__(92054)
const gtr = (version, range, options) => outside(version, range, '>', options)
module.exports = gtr
/***/ }),
/***/ 55128:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const Range = __webpack_require__(3577)
const intersects = (r1, r2, options) => {
r1 = new Range(r1, options)
r2 = new Range(r2, options)
return r1.intersects(r2)
}
module.exports = intersects
/***/ }),
/***/ 8100:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const outside = __webpack_require__(92054)
// Determine if version is less than all the versions possible in the range
const ltr = (version, range, options) => outside(version, range, '<', options)
module.exports = ltr
/***/ }),
/***/ 44808:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const Range = __webpack_require__(3577)
const maxSatisfying = (versions, range, options) => {
let max = null
let maxSV = null
let rangeObj = null
try {
rangeObj = new Range(range, options)
} catch (er) {
return null
}
versions.forEach((v) => {
if (rangeObj.test(v)) {
// satisfies(v, range, options)
if (!max || maxSV.compare(v) === -1) {
// compare(max, v, true)
max = v
maxSV = new SemVer(max, options)
}
}
})
return max
}
module.exports = maxSatisfying
/***/ }),
/***/ 40776:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const Range = __webpack_require__(3577)
const minSatisfying = (versions, range, options) => {
let min = null
let minSV = null
let rangeObj = null
try {
rangeObj = new Range(range, options)
} catch (er) {
return null
}
versions.forEach((v) => {
if (rangeObj.test(v)) {
// satisfies(v, range, options)
if (!min || minSV.compare(v) === 1) {
// compare(min, v, true)
min = v
minSV = new SemVer(min, options)
}
}
})
return min
}
module.exports = minSatisfying
/***/ }),
/***/ 78544:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const Range = __webpack_require__(3577)
const gt = __webpack_require__(78203)
const minVersion = (range, loose) => {
range = new Range(range, loose)
let minver = new SemVer('0.0.0')
if (range.test(minver)) {
return minver
}
minver = new SemVer('0.0.0-0')
if (range.test(minver)) {
return minver
}
minver = null
for (let i = 0; i < range.set.length; ++i) {
const comparators = range.set[i]
let setMin = null
comparators.forEach((comparator) => {
// Clone to avoid manipulating the comparator's semver object.
const compver = new SemVer(comparator.semver.version)
switch (comparator.operator) {
case '>':
if (compver.prerelease.length === 0) {
compver.patch++
} else {
compver.prerelease.push(0)
}
compver.raw = compver.format()
/* fallthrough */
case '':
case '>=':
if (!setMin || gt(compver, setMin)) {
setMin = compver
}
break
case '<':
case '<=':
/* Ignore maximum versions */
break
/* istanbul ignore next */
default:
throw new Error(`Unexpected operation: ${comparator.operator}`)
}
})
if (setMin && (!minver || gt(minver, setMin)))
minver = setMin
}
if (minver && range.test(minver)) {
return minver
}
return null
}
module.exports = minVersion
/***/ }),
/***/ 92054:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const SemVer = __webpack_require__(81538)
const Comparator = __webpack_require__(95622)
const {ANY} = Comparator
const Range = __webpack_require__(3577)
const satisfies = __webpack_require__(71344)
const gt = __webpack_require__(78203)
const lt = __webpack_require__(1926)
const lte = __webpack_require__(46266)
const gte = __webpack_require__(66658)
const outside = (version, range, hilo, options) => {
version = new SemVer(version, options)
range = new Range(range, options)
let gtfn, ltefn, ltfn, comp, ecomp
switch (hilo) {
case '>':
gtfn = gt
ltefn = lte
ltfn = lt
comp = '>'
ecomp = '>='
break
case '<':
gtfn = lt
ltefn = gte
ltfn = gt
comp = '<'
ecomp = '<='
break
default:
throw new TypeError('Must provide a hilo val of "<" or ">"')
}
// If it satisfies the range it is not outside
if (satisfies(version, range, options)) {
return false
}
// From now on, variable terms are as if we're in "gtr" mode.
// but note that everything is flipped for the "ltr" function.
for (let i = 0; i < range.set.length; ++i) {
const comparators = range.set[i]
let high = null
let low = null
comparators.forEach((comparator) => {
if (comparator.semver === ANY) {
comparator = new Comparator('>=0.0.0')
}
high = high || comparator
low = low || comparator
if (gtfn(comparator.semver, high.semver, options)) {
high = comparator
} else if (ltfn(comparator.semver, low.semver, options)) {
low = comparator
}
})
// If the edge version comparator has a operator then our version
// isn't outside it
if (high.operator === comp || high.operator === ecomp) {
return false
}
// If the lowest version comparator has an operator and our version
// is less than it then it isn't higher than the range
if ((!low.operator || low.operator === comp) &&
ltefn(version, low.semver)) {
return false
} else if (low.operator === ecomp && ltfn(version, low.semver)) {
return false
}
}
return true
}
module.exports = outside
/***/ }),
/***/ 2545:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
// given a set of versions and a range, create a "simplified" range
// that includes the same versions that the original range does
// If the original range is shorter than the simplified one, return that.
const satisfies = __webpack_require__(71344)
const compare = __webpack_require__(62742)
module.exports = (versions, range, options) => {
const set = []
let min = null
let prev = null
const v = versions.sort((a, b) => compare(a, b, options))
for (const version of v) {
const included = satisfies(version, range, options)
if (included) {
prev = version
if (!min)
min = version
} else {
if (prev) {
set.push([min, prev])
}
prev = null
min = null
}
}
if (min)
set.push([min, null])
const ranges = []
for (const [min, max] of set) {
if (min === max)
ranges.push(min)
else if (!max && min === v[0])
ranges.push('*')
else if (!max)
ranges.push(`>=${min}`)
else if (min === v[0])
ranges.push(`<=${max}`)
else
ranges.push(`${min} - ${max}`)
}
const simplified = ranges.join(' || ')
const original = typeof range.raw === 'string' ? range.raw : String(range)
return simplified.length < original.length ? simplified : range
}
/***/ }),
/***/ 32675:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const Range = __webpack_require__(3577)
const Comparator = __webpack_require__(95622)
const { ANY } = Comparator
const satisfies = __webpack_require__(71344)
const compare = __webpack_require__(62742)
// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff:
// - Every simple range `r1, r2, ...` is a null set, OR
// - Every simple range `r1, r2, ...` which is not a null set is a subset of
// some `R1, R2, ...`
//
// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff:
// - If c is only the ANY comparator
// - If C is only the ANY comparator, return true
// - Else if in prerelease mode, return false
// - else replace c with `[>=0.0.0]`
// - If C is only the ANY comparator
// - if in prerelease mode, return true
// - else replace C with `[>=0.0.0]`
// - Let EQ be the set of = comparators in c
// - If EQ is more than one, return true (null set)
// - Let GT be the highest > or >= comparator in c
// - Let LT be the lowest < or <= comparator in c
// - If GT and LT, and GT.semver > LT.semver, return true (null set)
// - If any C is a = range, and GT or LT are set, return false
// - If EQ
// - If GT, and EQ does not satisfy GT, return true (null set)
// - If LT, and EQ does not satisfy LT, return true (null set)
// - If EQ satisfies every C, return true
// - Else return false
// - If GT
// - If GT.semver is lower than any > or >= comp in C, return false
// - If GT is >=, and GT.semver does not satisfy every C, return false
// - If GT.semver has a prerelease, and not in prerelease mode
// - If no C has a prerelease and the GT.semver tuple, return false
// - If LT
// - If LT.semver is greater than any < or <= comp in C, return false
// - If LT is <=, and LT.semver does not satisfy every C, return false
// - If GT.semver has a prerelease, and not in prerelease mode
// - If no C has a prerelease and the LT.semver tuple, return false
// - Else return true
const subset = (sub, dom, options = {}) => {
if (sub === dom)
return true
sub = new Range(sub, options)
dom = new Range(dom, options)
let sawNonNull = false
OUTER: for (const simpleSub of sub.set) {
for (const simpleDom of dom.set) {
const isSub = simpleSubset(simpleSub, simpleDom, options)
sawNonNull = sawNonNull || isSub !== null
if (isSub)
continue OUTER
}
// the null set is a subset of everything, but null simple ranges in
// a complex range should be ignored. so if we saw a non-null range,
// then we know this isn't a subset, but if EVERY simple range was null,
// then it is a subset.
if (sawNonNull)
return false
}
return true
}
const simpleSubset = (sub, dom, options) => {
if (sub === dom)
return true
if (sub.length === 1 && sub[0].semver === ANY) {
if (dom.length === 1 && dom[0].semver === ANY)
return true
else if (options.includePrerelease)
sub = [ new Comparator('>=0.0.0-0') ]
else
sub = [ new Comparator('>=0.0.0') ]
}
if (dom.length === 1 && dom[0].semver === ANY) {
if (options.includePrerelease)
return true
else
dom = [ new Comparator('>=0.0.0') ]
}
const eqSet = new Set()
let gt, lt
for (const c of sub) {
if (c.operator === '>' || c.operator === '>=')
gt = higherGT(gt, c, options)
else if (c.operator === '<' || c.operator === '<=')
lt = lowerLT(lt, c, options)
else
eqSet.add(c.semver)
}
if (eqSet.size > 1)
return null
let gtltComp
if (gt && lt) {
gtltComp = compare(gt.semver, lt.semver, options)
if (gtltComp > 0)
return null
else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<='))
return null
}
// will iterate one or zero times
for (const eq of eqSet) {
if (gt && !satisfies(eq, String(gt), options))
return null
if (lt && !satisfies(eq, String(lt), options))
return null
for (const c of dom) {
if (!satisfies(eq, String(c), options))
return false
}
return true
}
let higher, lower
let hasDomLT, hasDomGT
// if the subset has a prerelease, we need a comparator in the superset
// with the same tuple and a prerelease, or it's not a subset
let needDomLTPre = lt &&
!options.includePrerelease &&
lt.semver.prerelease.length ? lt.semver : false
let needDomGTPre = gt &&
!options.includePrerelease &&
gt.semver.prerelease.length ? gt.semver : false
// exception: <1.2.3-0 is the same as <1.2.3
if (needDomLTPre && needDomLTPre.prerelease.length === 1 &&
lt.operator === '<' && needDomLTPre.prerelease[0] === 0) {
needDomLTPre = false
}
for (const c of dom) {
hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>='
hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<='
if (gt) {
if (needDomGTPre) {
if (c.semver.prerelease && c.semver.prerelease.length &&
c.semver.major === needDomGTPre.major &&
c.semver.minor === needDomGTPre.minor &&
c.semver.patch === needDomGTPre.patch) {
needDomGTPre = false
}
}
if (c.operator === '>' || c.operator === '>=') {
higher = higherGT(gt, c, options)
if (higher === c && higher !== gt)
return false
} else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options))
return false
}
if (lt) {
if (needDomLTPre) {
if (c.semver.prerelease && c.semver.prerelease.length &&
c.semver.major === needDomLTPre.major &&
c.semver.minor === needDomLTPre.minor &&
c.semver.patch === needDomLTPre.patch) {
needDomLTPre = false
}
}
if (c.operator === '<' || c.operator === '<=') {
lower = lowerLT(lt, c, options)
if (lower === c && lower !== lt)
return false
} else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options))
return false
}
if (!c.operator && (lt || gt) && gtltComp !== 0)
return false
}
// if there was a < or >, and nothing in the dom, then must be false
// UNLESS it was limited by another range in the other direction.
// Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0
if (gt && hasDomLT && !lt && gtltComp !== 0)
return false
if (lt && hasDomGT && !gt && gtltComp !== 0)
return false
// we needed a prerelease range in a specific tuple, but didn't get one
// then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0,
// because it includes prereleases in the 1.2.3 tuple
if (needDomGTPre || needDomLTPre)
return false
return true
}
// >=1.2.3 is lower than >1.2.3
const higherGT = (a, b, options) => {
if (!a)
return b
const comp = compare(a.semver, b.semver, options)
return comp > 0 ? a
: comp < 0 ? b
: b.operator === '>' && a.operator === '>=' ? b
: a
}
// <=1.2.3 is higher than <1.2.3
const lowerLT = (a, b, options) => {
if (!a)
return b
const comp = compare(a.semver, b.semver, options)
return comp < 0 ? a
: comp > 0 ? b
: b.operator === '<' && a.operator === '<=' ? b
: a
}
module.exports = subset
/***/ }),
/***/ 56780:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const Range = __webpack_require__(3577)
// Mostly just for testing and legacy API reasons
const toComparators = (range, options) =>
new Range(range, options).set
.map(comp => comp.map(c => c.value).join(' ').trim().split(' '))
module.exports = toComparators
/***/ }),
/***/ 37709:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const Range = __webpack_require__(3577)
const validRange = (range, options) => {
try {
// Return '*' instead of '' so that truthiness works.
// This will throw if it's invalid anyway
return new Range(range, options).range || '*'
} catch (er) {
return null
}
}
module.exports = validRange
/***/ }),
/***/ 44137:
/***/ ((module) => {
"use strict";
module.exports = function (Yallist) {
Yallist.prototype[Symbol.iterator] = function* () {
for (let walker = this.head; walker; walker = walker.next) {
yield walker.value
}
}
}
/***/ }),
/***/ 74918:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
module.exports = Yallist
Yallist.Node = Node
Yallist.create = Yallist
function Yallist (list) {
var self = this
if (!(self instanceof Yallist)) {
self = new Yallist()
}
self.tail = null
self.head = null
self.length = 0
if (list && typeof list.forEach === 'function') {
list.forEach(function (item) {
self.push(item)
})
} else if (arguments.length > 0) {
for (var i = 0, l = arguments.length; i < l; i++) {
self.push(arguments[i])
}
}
return self
}
Yallist.prototype.removeNode = function (node) {
if (node.list !== this) {
throw new Error('removing node which does not belong to this list')
}
var next = node.next
var prev = node.prev
if (next) {
next.prev = prev
}
if (prev) {
prev.next = next
}
if (node === this.head) {
this.head = next
}
if (node === this.tail) {
this.tail = prev
}
node.list.length--
node.next = null
node.prev = null
node.list = null
return next
}
Yallist.prototype.unshiftNode = function (node) {
if (node === this.head) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var head = this.head
node.list = this
node.next = head
if (head) {
head.prev = node
}
this.head = node
if (!this.tail) {
this.tail = node
}
this.length++
}
Yallist.prototype.pushNode = function (node) {
if (node === this.tail) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var tail = this.tail
node.list = this
node.prev = tail
if (tail) {
tail.next = node
}
this.tail = node
if (!this.head) {
this.head = node
}
this.length++
}
Yallist.prototype.push = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
push(this, arguments[i])
}
return this.length
}
Yallist.prototype.unshift = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
unshift(this, arguments[i])
}
return this.length
}
Yallist.prototype.pop = function () {
if (!this.tail) {
return undefined
}
var res = this.tail.value
this.tail = this.tail.prev
if (this.tail) {
this.tail.next = null
} else {
this.head = null
}
this.length--
return res
}
Yallist.prototype.shift = function () {
if (!this.head) {
return undefined
}
var res = this.head.value
this.head = this.head.next
if (this.head) {
this.head.prev = null
} else {
this.tail = null
}
this.length--
return res
}
Yallist.prototype.forEach = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.head, i = 0; walker !== null; i++) {
fn.call(thisp, walker.value, i, this)
walker = walker.next
}
}
Yallist.prototype.forEachReverse = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
fn.call(thisp, walker.value, i, this)
walker = walker.prev
}
}
Yallist.prototype.get = function (n) {
for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.next
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.getReverse = function (n) {
for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.prev
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.map = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.head; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.next
}
return res
}
Yallist.prototype.mapReverse = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.tail; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.prev
}
return res
}
Yallist.prototype.reduce = function (fn, initial) {
var acc
var walker = this.head
if (arguments.length > 1) {
acc = initial
} else if (this.head) {
walker = this.head.next
acc = this.head.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = 0; walker !== null; i++) {
acc = fn(acc, walker.value, i)
walker = walker.next
}
return acc
}
Yallist.prototype.reduceReverse = function (fn, initial) {
var acc
var walker = this.tail
if (arguments.length > 1) {
acc = initial
} else if (this.tail) {
walker = this.tail.prev
acc = this.tail.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = this.length - 1; walker !== null; i--) {
acc = fn(acc, walker.value, i)
walker = walker.prev
}
return acc
}
Yallist.prototype.toArray = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.head; walker !== null; i++) {
arr[i] = walker.value
walker = walker.next
}
return arr
}
Yallist.prototype.toArrayReverse = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.tail; walker !== null; i++) {
arr[i] = walker.value
walker = walker.prev
}
return arr
}
Yallist.prototype.slice = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
walker = walker.next
}
for (; walker !== null && i < to; i++, walker = walker.next) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.sliceReverse = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
walker = walker.prev
}
for (; walker !== null && i > from; i--, walker = walker.prev) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
if (start > this.length) {
start = this.length - 1
}
if (start < 0) {
start = this.length + start;
}
for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
walker = walker.next
}
var ret = []
for (var i = 0; walker && i < deleteCount; i++) {
ret.push(walker.value)
walker = this.removeNode(walker)
}
if (walker === null) {
walker = this.tail
}
if (walker !== this.head && walker !== this.tail) {
walker = walker.prev
}
for (var i = 0; i < nodes.length; i++) {
walker = insert(this, walker, nodes[i])
}
return ret;
}
Yallist.prototype.reverse = function () {
var head = this.head
var tail = this.tail
for (var walker = head; walker !== null; walker = walker.prev) {
var p = walker.prev
walker.prev = walker.next
walker.next = p
}
this.head = tail
this.tail = head
return this
}
function insert (self, node, value) {
var inserted = node === self.head ?
new Node(value, null, node, self) :
new Node(value, node, node.next, self)
if (inserted.next === null) {
self.tail = inserted
}
if (inserted.prev === null) {
self.head = inserted
}
self.length++
return inserted
}
function push (self, item) {
self.tail = new Node(item, self.tail, null, self)
if (!self.head) {
self.head = self.tail
}
self.length++
}
function unshift (self, item) {
self.head = new Node(item, null, self.head, self)
if (!self.tail) {
self.tail = self.head
}
self.length++
}
function Node (value, prev, next, list) {
if (!(this instanceof Node)) {
return new Node(value, prev, next, list)
}
this.list = list
this.value = value
if (prev) {
prev.next = this
this.prev = prev
} else {
this.prev = null
}
if (next) {
next.prev = this
this.next = next
} else {
this.next = null
}
}
try {
// add if support for Symbol.iterator is present
__webpack_require__(44137)(Yallist)
} catch (er) {}
/***/ }),
/***/ 87315:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.checkSupport = void 0;
const contentTypes = __webpack_require__(42625);
const registry_call_1 = __webpack_require__(15271);
/**
* Root V2 endpoint, useful to check V2 support and validating credentials.
*/
async function checkSupport(registryBase, username, password, options = {}) {
const result = await registry_call_1.registryV2Call(registryBase, "/", // full url path should be "/v2/" as per spec
contentTypes.JSON, username, password, options);
// always return thruthy object, even for ECR
return result || {};
}
exports.checkSupport = checkSupport;
//# sourceMappingURL=check-support.js.map
/***/ }),
/***/ 42625:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.JSON = exports.LAYER = exports.IMAGE_CONFIG = exports.MANIFEST_LIST_V2 = exports.MANIFEST_V2 = void 0;
exports.MANIFEST_V2 = "application/vnd.docker.distribution.manifest.v2+json";
exports.MANIFEST_LIST_V2 = "application/vnd.docker.distribution.manifest.list.v2+json";
exports.IMAGE_CONFIG = "application/vnd.docker.container.image.v1+json";
exports.LAYER = "application/vnd.docker.image.rootfs.diff.tar.gzip";
exports.JSON = "application/json";
//# sourceMappingURL=content-types.js.map
/***/ }),
/***/ 80290:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getAuthTokenForEndpoint = void 0;
const contentTypes = __webpack_require__(42625);
const registry_call_1 = __webpack_require__(15271);
const needle_1 = __webpack_require__(48360);
/**
* getAuthTokenForEndpoint attempts to retrieve a token for an unauthorized endpoint.
* It will make a request to the endpoint and in case it returns an Unauthorized (401)
* response code with the 'www-Authenticate' header, it will make a consecutive call according
* to the details within this header.
* More info: https://docs.docker.com/registry/spec/auth/token/#how-to-authenticate
* @param registryBase - the container registry base URL
* @param endpoint - the endpoint to which we would like to make the request
* @param username - the username for authentication
* @param password - the password for authentication
* @param reqOptions - http request options
* @returns an auth token string if authentication succeeded or null if
* authorization is not required/not bases on bearer token
* @throws an error if http error status code is not 401
*/
async function getAuthTokenForEndpoint(registryBase, endpoint, username, password, reqOptions = {}) {
const reqConfig = registry_call_1.buildUnauthenticatedV2RequestConfig(registryBase, endpoint, contentTypes.JSON, reqOptions);
try {
await needle_1.needleWrapper(reqConfig);
return null;
}
catch (err) {
if (err.statusCode !== 401) {
throw err;
}
const challengeHeaders = err.headers["www-authenticate"];
if (!challengeHeaders) {
return null;
}
const [authBase, service, scope] = registry_call_1.parseChallengeHeaders(challengeHeaders);
if (!authBase) {
return null;
}
else {
// Bearer token
const token = await registry_call_1.getToken(registryBase, authBase, service, scope, username, password, reqOptions);
return token;
}
}
}
exports.getAuthTokenForEndpoint = getAuthTokenForEndpoint;
//# sourceMappingURL=get-auth-token-for-endpoint.js.map
/***/ }),
/***/ 50702:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getImageConfig = void 0;
const registry_call_1 = __webpack_require__(15271);
const contentTypes = __webpack_require__(42625);
const needle_1 = __webpack_require__(48360);
async function getImageConfig(registryBase, repo, digest, username, password, options = {}) {
const endpoint = `/${repo}/blobs/${digest}`;
const configResponse = await registry_call_1.registryV2Call(registryBase, endpoint, contentTypes.IMAGE_CONFIG, username, password, options);
return needle_1.parseResponseBody(configResponse);
}
exports.getImageConfig = getImageConfig;
//# sourceMappingURL=get-image-config.js.map
/***/ }),
/***/ 70395:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getImageSize = void 0;
const get_manifest_1 = __webpack_require__(32603);
async function getImageSize(registryBase, repo, tag, username, password, options = {}) {
const manifest = await get_manifest_1.getManifest(registryBase, repo, tag, username, password, options);
const layers = manifest.layers || [];
return layers.reduce((size, layerConfig) => size + layerConfig.size, 0);
}
exports.getImageSize = getImageSize;
//# sourceMappingURL=get-image-size.js.map
/***/ }),
/***/ 78924:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getLayer = void 0;
const registry_call_1 = __webpack_require__(15271);
const contentTypes = __webpack_require__(42625);
async function getLayer(registryBase, repo, digest, username, password, options = {}) {
var _a;
const accept = `${(_a = options.acceptLayer) !== null && _a !== void 0 ? _a : contentTypes.LAYER}`;
const endpoint = `/${repo}/blobs/${digest}`;
options = Object.assign({ json: false, encoding: null }, options);
const layerResponse = await registry_call_1.registryV2Call(registryBase, endpoint, accept, username, password, options);
// When the content type is wrongly set to text instead of binary, response body is returned as string instead of a buffer
// This forces a buffer response.
if (!(layerResponse.body instanceof Buffer)) {
return Buffer.from(layerResponse.raw);
}
return layerResponse.body;
}
exports.getLayer = getLayer;
//# sourceMappingURL=get-layer.js.map
/***/ }),
/***/ 32603:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.computeDigest = exports.getManifest = void 0;
const registry_call_1 = __webpack_require__(15271);
const contentTypes = __webpack_require__(42625);
const needle_1 = __webpack_require__(48360);
const crypto_1 = __webpack_require__(76417);
const digestRegex = /^sha256:[a-f0-9]{64}$/i;
/**
* A request to get image manifest by tag or digest
* @param registryBase the hostname (and optionally base path) from which to get the manifest.
* @param repo the requested image repository.
* @param tag the manifest tag or digest value
* @param username optional, username to be used for basic auth.
* @param password optional, password to be used for basic auth.
* @param options optional, request options.
* @param platform optional, the manifest platform (os and architecture), applicable only for multi-architecture manifests.
* Defaults to linux/amd64. In case the requested manifest platform is not found an error is thrown.
* @returns
*/
async function getManifest(registryBase, repo, imageReference, username, password, options = {}, platform) {
var _a;
const accept = (_a = options.acceptManifest) !== null && _a !== void 0 ? _a : `${contentTypes.MANIFEST_V2}, ${contentTypes.MANIFEST_LIST_V2}`;
const endpoint = `/${repo}/manifests/${imageReference}`;
let manifestResponse = await registry_call_1.registryV2Call(registryBase, endpoint, accept, username, password, options);
const contentType = manifestResponse.headers["content-type"];
let indexDigest;
let manifestDigest;
if (contentType === contentTypes.MANIFEST_LIST_V2) {
indexDigest = computeDigest(manifestResponse.body);
manifestDigest = getManifestDigestByPlatform(manifestResponse, platform);
// need to call again with actual manifest (and not a list of manifest-lists)
const endpoint = `/${repo}/manifests/${manifestDigest}`;
manifestResponse = await registry_call_1.registryV2Call(registryBase, endpoint, accept, username, password, options);
}
const dockerContentDigest = manifestResponse.headers["Docker-Content-Digest"];
switch (true) {
case manifestDigest != undefined:
break;
case isManifestDigest(imageReference):
manifestDigest = imageReference;
break;
case isManifestDigest(dockerContentDigest):
manifestDigest = dockerContentDigest;
break;
default:
manifestDigest = computeDigest(manifestResponse.body);
}
const parsedBody = needle_1.parseResponseBody(manifestResponse);
return Object.assign(Object.assign({}, parsedBody), { indexDigest, manifestDigest });
}
exports.getManifest = getManifest;
function isManifestDigest(imageReference) {
return digestRegex.test(imageReference);
}
function computeDigest(body) {
if (!body) {
return undefined;
}
if (typeof body !== "string") {
body = JSON.stringify(body);
}
const hexDigest = crypto_1.createHash("sha256")
.update(body)
.digest("hex")
.toLowerCase();
return `sha256:${hexDigest}`;
}
exports.computeDigest = computeDigest;
function getManifestDigestByPlatform(manifestResponse, platform) {
const defaultPlatform = {
os: "linux",
architecture: "amd64",
};
const platformFilter = platform ? platform : defaultPlatform;
const manifestList = needle_1.parseResponseBody(manifestResponse);
const manifestPlatform = getManifestByOsAndArch(manifestList.manifests, platformFilter.os, platformFilter.architecture);
if (manifestPlatform) {
return manifestPlatform.digest;
}
throw new Error(`no supported manifest found for platform: ${JSON.stringify(platformFilter)}`);
}
function getManifestByOsAndArch(platformManifest, os, architecture) {
return platformManifest.find(manifest => manifest.platform.os === os &&
manifest.platform.architecture === architecture);
}
//# sourceMappingURL=get-manifest.js.map
/***/ }),
/***/ 73905:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getRepos = void 0;
const contentTypes = __webpack_require__(42625);
const registry_call_1 = __webpack_require__(15271);
async function getRepos(registryBase, username, password, pageSize = 100, maxPages = Number.MAX_SAFE_INTEGER, options = {}) {
const endpoint = "/_catalog";
return await registry_call_1.paginatedV2Call(registryBase, contentTypes.JSON, username, password, endpoint, "repositories", pageSize, maxPages, options);
}
exports.getRepos = getRepos;
//# sourceMappingURL=get-repos.js.map
/***/ }),
/***/ 80353:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getTags = void 0;
const registry_call_1 = __webpack_require__(15271);
const contentTypes = __webpack_require__(42625);
async function getTags(registryBase, repo, username, password, pageSize = 1000, maxPages = Number.MAX_SAFE_INTEGER, options = {}) {
const endpoint = `/${repo}/tags/list`;
return await registry_call_1.paginatedV2Call(registryBase, contentTypes.JSON, username, password, endpoint, "tags", pageSize, maxPages, options);
}
exports.getTags = getTags;
//# sourceMappingURL=get-tags.js.map
/***/ }),
/***/ 28310:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.types = exports.registryCall = exports.getTags = exports.getRepos = exports.getManifest = exports.getLayer = exports.getImageSize = exports.getImageConfig = exports.getAuthTokenForEndpoint = exports.checkSupport = void 0;
const check_support_1 = __webpack_require__(87315);
Object.defineProperty(exports, "checkSupport", ({ enumerable: true, get: function () { return check_support_1.checkSupport; } }));
const get_auth_token_for_endpoint_1 = __webpack_require__(80290);
Object.defineProperty(exports, "getAuthTokenForEndpoint", ({ enumerable: true, get: function () { return get_auth_token_for_endpoint_1.getAuthTokenForEndpoint; } }));
const get_image_config_1 = __webpack_require__(50702);
Object.defineProperty(exports, "getImageConfig", ({ enumerable: true, get: function () { return get_image_config_1.getImageConfig; } }));
const get_image_size_1 = __webpack_require__(70395);
Object.defineProperty(exports, "getImageSize", ({ enumerable: true, get: function () { return get_image_size_1.getImageSize; } }));
const get_layer_1 = __webpack_require__(78924);
Object.defineProperty(exports, "getLayer", ({ enumerable: true, get: function () { return get_layer_1.getLayer; } }));
const get_manifest_1 = __webpack_require__(32603);
Object.defineProperty(exports, "getManifest", ({ enumerable: true, get: function () { return get_manifest_1.getManifest; } }));
const get_repos_1 = __webpack_require__(73905);
Object.defineProperty(exports, "getRepos", ({ enumerable: true, get: function () { return get_repos_1.getRepos; } }));
const get_tags_1 = __webpack_require__(80353);
Object.defineProperty(exports, "getTags", ({ enumerable: true, get: function () { return get_tags_1.getTags; } }));
const registry_call_1 = __webpack_require__(15271);
Object.defineProperty(exports, "registryCall", ({ enumerable: true, get: function () { return registry_call_1.registryCall; } }));
const types = __webpack_require__(47235);
exports.types = types;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 48360:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.parseResponseBody = exports.needleWrapper = void 0;
const needle = __webpack_require__(57441);
// TODO: this is a temporary code that allows setting needle default timeout (alias for
// open_timeout) to check how it affects the stability of our system, and specifically
// if it helps reducing 'socket hang up' errors.
// More information: https://www.npmjs.com/package/needle
if (process.env.DOCKER_REGISTRY_V2_CLIENT_NEEDLE_OPEN_TIMEOUT) {
const openTimeout = parseInt(process.env.DOCKER_REGISTRY_V2_CLIENT_NEEDLE_OPEN_TIMEOUT);
if (Number.isInteger(openTimeout)) {
needle.defaults({
timeout: openTimeout,
});
}
}
/**
* A wrapper function that uses `needle` for making HTTP requests,
* and returns a response that matches what the response it used to get from `request` library
* @param options request options
*/
async function needleWrapper(options, maxRetries) {
var _a, _b;
let uri = options.uri;
// append query parameters
if (options.qs) {
for (const key in options.qs) {
if (options.qs[key] !== undefined) {
uri += `&${key}=${options.qs[key]}`;
}
}
if (!uri.includes("?")) {
uri = uri.replace("&", "?");
}
}
let response;
let lastError;
let retries = maxRetries !== null && maxRetries !== void 0 ? maxRetries : 0;
while (!response && retries >= 0) {
retries--;
try {
response = await needle("get", uri, options);
}
catch (err) {
lastError = err;
}
}
if (!response) {
throw lastError;
}
// throw an error in case status code is not 2xx
if (response && response.statusCode >= 300) {
let message;
if (((_b = (_a = response.body) === null || _a === void 0 ? void 0 : _a.errors) === null || _b === void 0 ? void 0 : _b.length) > 0) {
message = response.body.errors[0].message;
}
else {
message = response.body;
}
if (typeof message !== "string") {
try {
message = JSON.stringify(message);
// eslint-disable-next-line no-empty
}
catch (_) { }
}
throw new NeedleWrapperException(message, response.statusCode, response.headers);
}
return response;
}
exports.needleWrapper = needleWrapper;
function parseResponseBody(response) {
let body;
try {
body = JSON.parse(response.body);
}
catch (err) {
body = response.body;
}
return body;
}
exports.parseResponseBody = parseResponseBody;
class NeedleWrapperException extends Error {
constructor(message, statusCode, headers) {
super(message);
this.statusCode = statusCode;
this.headers = headers;
}
}
//# sourceMappingURL=needle.js.map
/***/ }),
/***/ 15271:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.buildUnauthenticatedV2RequestConfig = exports.parseChallengeHeaders = exports.getToken = exports.paginatedV2Call = exports.registryCall = exports.registryV2Call = void 0;
const parseLink = __webpack_require__(54336);
const url = __webpack_require__(78835);
const needle_1 = __webpack_require__(48360);
const BEARER_REALM = "Bearer realm";
const MAX_RETRIES = 1;
async function registryV2Call(registryBase, endpoint, accept, username, password, reqOptions = {}) {
const reqConfig = buildUnauthenticatedV2RequestConfig(registryBase, endpoint, accept, reqOptions);
try {
return await needle_1.needleWrapper(reqConfig, MAX_RETRIES);
}
catch (err) {
if (err.statusCode === 401) {
if (!username || !password) {
// Supply and empty username and password if no credentials
// are provided. These might be added later by a broker client.
username = username ? username : "";
password = password ? password : "";
}
const authConfig = await setAuthConfig(registryBase, err, reqConfig, username, password, reqOptions);
try {
return await needle_1.needleWrapper(authConfig, MAX_RETRIES);
}
catch (err) {
if (isRedirectStatus(err.statusCode)) {
return await handleRedirect(err, reqConfig);
}
throw err;
}
}
if (isRedirectStatus(err.statusCode)) {
return await handleRedirect(err, reqConfig);
}
throw err;
}
}
exports.registryV2Call = registryV2Call;
/**
* WARNING!!!
*
* This function was created for a very specific usecase (https://snyksec.atlassian.net/browse/MAGMA-1262)
* It uses the existing mechanism of obtaining a token for authenticating, but can be used to hit any API endpoint,
* and not necessarily a Docker V2 endpoint.
* This is clearly an abuse of a library that's named after the v2 API, and this function should be considered a tech debt.
* Once it's no longer necessary, it is advised that this function is removed.
*
*/
async function registryCall(uri, username, password, reqOptions = {}) {
const reqConfig = applyRequestOptions({ uri: `https://${uri}` }, reqOptions);
try {
return await needle_1.needleWrapper(reqConfig, MAX_RETRIES);
}
catch (err) {
if (err.statusCode === 401) {
if (!username || !password) {
// Supply and empty username and password if no credentials
// are provided. These might be added later by a broker client.
username = username ? username : "";
password = password ? password : "";
}
const authConfig = await setAuthConfig("", err, reqConfig, username, password, reqOptions);
try {
return await needle_1.needleWrapper(authConfig, MAX_RETRIES);
}
catch (err) {
if (isRedirectStatus(err.statusCode)) {
return await handleRedirect(err, reqConfig);
}
throw err;
}
}
if (isRedirectStatus(err.statusCode)) {
return await handleRedirect(err, reqConfig);
}
throw err;
}
}
exports.registryCall = registryCall;
async function paginatedV2Call(registryBase, accept, username, password, endpoint, key, pageSize = 1000, maxPages = Number.MAX_SAFE_INTEGER, reqOptions = {}) {
const result = [];
let pageEndpoint = `${endpoint}?n=${pageSize}`;
let pageCount = 0;
while (pageCount < maxPages) {
const response = await registryV2Call(registryBase, pageEndpoint, accept, username, password, reqOptions);
const body = needle_1.parseResponseBody(response);
if (body && body[key]) {
result.push(...body[key]);
}
if (!response.headers.link) {
break;
}
pageCount += 1;
pageEndpoint = pageEndpointForLink(endpoint, response.headers.link);
}
return result;
}
exports.paginatedV2Call = paginatedV2Call;
async function getToken(registryBase, authBase, service, scope, username, password, reqOptions = {}) {
const reqConfig = applyRequestOptions({
uri: authBase,
qs: {
service,
scope,
},
}, Object.assign({}, reqOptions));
// Test truthiness, should be false when username and password are undefined
if (username && password) {
reqConfig.username = username;
reqConfig.password = password;
}
const response = await needle_1.needleWrapper(reqConfig);
const body = needle_1.parseResponseBody(response);
return body.token || body.access_token;
}
exports.getToken = getToken;
function parseChallengeHeaders(challengeHeaders) {
const headersMap = {};
const headerSplit = challengeHeaders.split(",");
for (let i = 0; i < headerSplit.length; i++) {
const entry = headerSplit[i];
if (!entry.includes("=") && i > 0) {
// we'll get here in case a value includes a comma. we want to concat what's after the comma to the previous value
headerSplit[i - 1] += `,${entry}`;
headerSplit.splice(i, 1);
i--;
}
}
headerSplit.forEach(entry => {
const [key, value] = entry.split("=");
headersMap[key] = JSON.parse(value);
});
return [headersMap[BEARER_REALM], headersMap.service, headersMap.scope];
}
exports.parseChallengeHeaders = parseChallengeHeaders;
function buildUnauthenticatedV2RequestConfig(registryBase, endpoint, accept, reqOptions = {}) {
return applyRequestOptions({
headers: { Accept: accept },
uri: `https://${registryBase}/v2${endpoint}`,
}, reqOptions);
}
exports.buildUnauthenticatedV2RequestConfig = buildUnauthenticatedV2RequestConfig;
function pageEndpointForLink(endpoint, link) {
const linkPath = parseLink(link).next.url;
const linkQuery = linkPath.split("?")[1];
return `${endpoint}?${linkQuery}`;
}
async function setAuthConfig(registryBase, err, reqConfig, username, password, reqOptions) {
// See: https://docs.docker.com/registry/spec/auth/token/#how-to-authenticate
const challengeHeaders = err.headers["www-authenticate"];
if (!challengeHeaders) {
throw err;
}
const [authBase, service, scope] = parseChallengeHeaders(challengeHeaders);
if (!authBase) {
// basic auth
return Object.assign(Object.assign({}, reqConfig), { username, password });
}
else {
// bearer token
const token = await getToken(registryBase, authBase, service, scope, username, password, reqOptions);
return Object.assign(Object.assign({}, reqConfig), { headers: Object.assign(Object.assign({}, reqConfig.headers), { Authorization: `Bearer ${token}` }) });
}
}
async function handleRedirect(err, config) {
// ACR does not handle redirects well, where automatic redirects
// fail due to an unexpected authorization header.
// the solution is to follow the redirect, however discarding
// the token.
const location = err.headers.location;
if (!location) {
throw err;
}
// Only clear the Authorization headers if the redirect is for
// azure container registries.
if (location.includes("azurecr.io")) {
delete config.headers.Authorization;
}
const redirectUrl = assertFullUrl(location, config.uri);
config.uri = redirectUrl;
return await needle_1.needleWrapper(config, MAX_RETRIES);
}
/*
* Takes request config and applies allowed options to it.
* @param reqConfig - request config that is passed to the request library.
* @param reqOptions - options passed in from outside of v2 client library.
*/
function applyRequestOptions(reqConfig, reqOptions) {
const options = Object.assign({}, reqOptions);
let uri = applyUriProtocol(reqConfig.uri, options.protocol);
delete options.protocol;
uri = applyUriHostMappings(uri, options.hostMappings);
delete options.hostMappings;
const headers = applyHeaders(reqConfig.headers, options.headers);
delete options.headers;
return Object.assign(Object.assign(Object.assign({}, reqConfig), options), { uri,
headers });
}
function applyUriProtocol(uri, protocol) {
if (!protocol) {
return uri;
}
const updatedUrl = url.parse(uri);
updatedUrl.protocol = protocol;
return url.format(updatedUrl);
}
/**
* Applies host mappings to given uri.
*
* @param uri
* @param mappings - Array of mappings. Each mapping is represented as array
* tuple: [host_regex_matcher, new_host].
*/
function applyUriHostMappings(uri, mappings) {
if (!mappings) {
return uri;
}
const updatedUrl = url.parse(uri);
const mapping = mappings.find(([matcher]) => updatedUrl.host.match(matcher));
if (!mapping) {
return uri;
}
updatedUrl.host = mapping[1];
return url.format(updatedUrl);
}
function applyHeaders(currentHeaders, addHeaders) {
return Object.assign(Object.assign({}, (currentHeaders || {})), (addHeaders || {}));
}
function assertFullUrl(redirectLocation, originalLocation) {
let redirectUrl;
try {
redirectUrl = new URL(redirectLocation);
}
catch (_a) {
// in case the location is not a proper URL (contains just the path), attach the original URL's origin
const originalUrl = new URL(originalLocation);
redirectUrl = new URL(redirectLocation, originalUrl.origin);
}
return redirectUrl.href;
}
/*
Check whether the response status requires redirect.
Note: since we only make GET requests, we do not need to handle
different redirect statuses having different requirements for
handling the request method.
*/
function isRedirectStatus(statusCode) {
return (statusCode === 301 ||
statusCode === 302 ||
statusCode === 303 ||
statusCode === 307 ||
statusCode === 308);
}
//# sourceMappingURL=registry-call.js.map
/***/ }),
/***/ 47235:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
//# sourceMappingURL=types.js.map
/***/ }),
/***/ 89578:
/***/ ((module, exports, __webpack_require__) => {
"use strict";
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
/* eslint-env browser */
/**
* This is the web browser implementation of `debug()`.
*/
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.storage = localstorage();
/**
* Colors.
*/
exports.colors = ['#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33'];
/**
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
* and the Firebug extension (any Firefox version) are known
* to support "%c" CSS customizations.
*
* TODO: add a `localStorage` variable to explicitly enable/disable colors
*/
// eslint-disable-next-line complexity
function useColors() {
// NB: In an Electron preload script, document will be defined but not fully
// initialized. Since we know we're in Chrome, we'll just detect this case
// explicitly
if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
return true;
} // Internet Explorer and Edge do not support colors.
if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
return false;
} // Is webkit? http://stackoverflow.com/a/16459606/376773
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
return typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773
typeof window !== 'undefined' && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31?
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker
typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/);
}
/**
* Colorize log arguments if enabled.
*
* @api public
*/
function formatArgs(args) {
args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff);
if (!this.useColors) {
return;
}
var c = 'color: ' + this.color;
args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other
// arguments passed either before or after the %c, so we need to
// figure out the correct index to insert the CSS into
var index = 0;
var lastC = 0;
args[0].replace(/%[a-zA-Z%]/g, function (match) {
if (match === '%%') {
return;
}
index++;
if (match === '%c') {
// We only are interested in the *last* %c
// (the user may have provided their own)
lastC = index;
}
});
args.splice(lastC, 0, c);
}
/**
* Invokes `console.log()` when available.
* No-op when `console.log` is not a "function".
*
* @api public
*/
function log() {
var _console;
// This hackery is required for IE8/9, where
// the `console.log` function doesn't have 'apply'
return (typeof console === "undefined" ? "undefined" : _typeof(console)) === 'object' && console.log && (_console = console).log.apply(_console, arguments);
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
try {
if (namespaces) {
exports.storage.setItem('debug', namespaces);
} else {
exports.storage.removeItem('debug');
}
} catch (error) {// Swallow
// XXX (@Qix-) should we be logging these?
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
var r;
try {
r = exports.storage.getItem('debug');
} catch (error) {} // Swallow
// XXX (@Qix-) should we be logging these?
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
if (!r && typeof process !== 'undefined' && 'env' in process) {
r = process.env.DEBUG;
}
return r;
}
/**
* Localstorage attempts to return the localstorage.
*
* This is necessary because safari throws
* when a user disables cookies/localstorage
* and you attempt to access it.
*
* @return {LocalStorage}
* @api private
*/
function localstorage() {
try {
// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
// The Browser also has localStorage in the global context.
return localStorage;
} catch (error) {// Swallow
// XXX (@Qix-) should we be logging these?
}
}
module.exports = __webpack_require__(55335)(exports);
var formatters = module.exports.formatters;
/**
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
*/
formatters.j = function (v) {
try {
return JSON.stringify(v);
} catch (error) {
return '[UnexpectedJSONParseError]: ' + error.message;
}
};
/***/ }),
/***/ 55335:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*/
function setup(env) {
createDebug.debug = createDebug;
createDebug.default = createDebug;
createDebug.coerce = coerce;
createDebug.disable = disable;
createDebug.enable = enable;
createDebug.enabled = enabled;
createDebug.humanize = __webpack_require__(57824);
Object.keys(env).forEach(function (key) {
createDebug[key] = env[key];
});
/**
* Active `debug` instances.
*/
createDebug.instances = [];
/**
* The currently active debug mode names, and names to skip.
*/
createDebug.names = [];
createDebug.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
createDebug.formatters = {};
/**
* Selects a color for a debug namespace
* @param {String} namespace The namespace string for the for the debug instance to be colored
* @return {Number|String} An ANSI color code for the given namespace
* @api private
*/
function selectColor(namespace) {
var hash = 0;
for (var i = 0; i < namespace.length; i++) {
hash = (hash << 5) - hash + namespace.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
}
createDebug.selectColor = selectColor;
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug(namespace) {
var prevTime;
function debug() {
// Disabled?
if (!debug.enabled) {
return;
}
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
var self = debug; // Set `diff` timestamp
var curr = Number(new Date());
var ms = curr - (prevTime || curr);
self.diff = ms;
self.prev = prevTime;
self.curr = curr;
prevTime = curr;
args[0] = createDebug.coerce(args[0]);
if (typeof args[0] !== 'string') {
// Anything else let's inspect with %O
args.unshift('%O');
} // Apply any `formatters` transformations
var index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, function (match, format) {
// If we encounter an escaped % then don't increase the array index
if (match === '%%') {
return match;
}
index++;
var formatter = createDebug.formatters[format];
if (typeof formatter === 'function') {
var val = args[index];
match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format`
args.splice(index, 1);
index--;
}
return match;
}); // Apply env-specific formatting (colors, etc.)
createDebug.formatArgs.call(self, args);
var logFn = self.log || createDebug.log;
logFn.apply(self, args);
}
debug.namespace = namespace;
debug.enabled = createDebug.enabled(namespace);
debug.useColors = createDebug.useColors();
debug.color = selectColor(namespace);
debug.destroy = destroy;
debug.extend = extend; // Debug.formatArgs = formatArgs;
// debug.rawLog = rawLog;
// env-specific initialization logic for debug instances
if (typeof createDebug.init === 'function') {
createDebug.init(debug);
}
createDebug.instances.push(debug);
return debug;
}
function destroy() {
var index = createDebug.instances.indexOf(this);
if (index !== -1) {
createDebug.instances.splice(index, 1);
return true;
}
return false;
}
function extend(namespace, delimiter) {
return createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable(namespaces) {
createDebug.save(namespaces);
createDebug.names = [];
createDebug.skips = [];
var i;
var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
var len = split.length;
for (i = 0; i < len; i++) {
if (!split[i]) {
// ignore empty strings
continue;
}
namespaces = split[i].replace(/\*/g, '.*?');
if (namespaces[0] === '-') {
createDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));
} else {
createDebug.names.push(new RegExp('^' + namespaces + '$'));
}
}
for (i = 0; i < createDebug.instances.length; i++) {
var instance = createDebug.instances[i];
instance.enabled = createDebug.enabled(instance.namespace);
}
}
/**
* Disable debug output.
*
* @api public
*/
function disable() {
createDebug.enable('');
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled(name) {
if (name[name.length - 1] === '*') {
return true;
}
var i;
var len;
for (i = 0, len = createDebug.skips.length; i < len; i++) {
if (createDebug.skips[i].test(name)) {
return false;
}
}
for (i = 0, len = createDebug.names.length; i < len; i++) {
if (createDebug.names[i].test(name)) {
return true;
}
}
return false;
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce(val) {
if (val instanceof Error) {
return val.stack || val.message;
}
return val;
}
createDebug.enable(createDebug.load());
return createDebug;
}
module.exports = setup;
/***/ }),
/***/ 4993:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
/**
* Detect Electron renderer / nwjs process, which is node, but we should
* treat as a browser.
*/
if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {
module.exports = __webpack_require__(89578);
} else {
module.exports = __webpack_require__(61238);
}
/***/ }),
/***/ 61238:
/***/ ((module, exports, __webpack_require__) => {
"use strict";
/**
* Module dependencies.
*/
var tty = __webpack_require__(33867);
var util = __webpack_require__(31669);
/**
* This is the Node.js implementation of `debug()`.
*/
exports.init = init;
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
/**
* Colors.
*/
exports.colors = [6, 2, 3, 4, 5, 1];
try {
// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)
// eslint-disable-next-line import/no-extraneous-dependencies
var supportsColor = __webpack_require__(92130);
if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
exports.colors = [20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221];
}
} catch (error) {} // Swallow - we only care if `supports-color` is available; it doesn't have to be.
/**
* Build up the default `inspectOpts` object from the environment variables.
*
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
exports.inspectOpts = Object.keys(process.env).filter(function (key) {
return /^debug_/i.test(key);
}).reduce(function (obj, key) {
// Camel-case
var prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, function (_, k) {
return k.toUpperCase();
}); // Coerce string value into JS value
var val = process.env[key];
if (/^(yes|on|true|enabled)$/i.test(val)) {
val = true;
} else if (/^(no|off|false|disabled)$/i.test(val)) {
val = false;
} else if (val === 'null') {
val = null;
} else {
val = Number(val);
}
obj[prop] = val;
return obj;
}, {});
/**
* Is stdout a TTY? Colored output is enabled when `true`.
*/
function useColors() {
return 'colors' in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd);
}
/**
* Adds ANSI color escape codes if enabled.
*
* @api public
*/
function formatArgs(args) {
var name = this.namespace,
useColors = this.useColors;
if (useColors) {
var c = this.color;
var colorCode = "\x1B[3" + (c < 8 ? c : '8;5;' + c);
var prefix = " ".concat(colorCode, ";1m").concat(name, " \x1B[0m");
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + "\x1B[0m");
} else {
args[0] = getDate() + name + ' ' + args[0];
}
}
function getDate() {
if (exports.inspectOpts.hideDate) {
return '';
}
return new Date().toISOString() + ' ';
}
/**
* Invokes `util.format()` with the specified arguments and writes to stderr.
*/
function log() {
return process.stderr.write(util.format.apply(util, arguments) + '\n');
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
if (namespaces) {
process.env.DEBUG = namespaces;
} else {
// If you set a process.env field to null or undefined, it gets cast to the
// string 'null' or 'undefined'. Just delete instead.
delete process.env.DEBUG;
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
return process.env.DEBUG;
}
/**
* Init logic for `debug` instances.
*
* Create a new `inspectOpts` object in case `useColors` is set
* differently for a particular `debug` instance.
*/
function init(debug) {
debug.inspectOpts = {};
var keys = Object.keys(exports.inspectOpts);
for (var i = 0; i < keys.length; i++) {
debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
}
}
module.exports = __webpack_require__(55335)(exports);
var formatters = module.exports.formatters;
/**
* Map %o to `util.inspect()`, all on a single line.
*/
formatters.o = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts)
.split('\n')
.map(function (str) { return str.trim(); })
.join(' ');
};
/**
* Map %O to `util.inspect()`, allowing multiple lines if needed.
*/
formatters.O = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts);
};
/***/ }),
/***/ 54226:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
var Buffer = __webpack_require__(2399).Buffer;
// Multibyte codec. In this scheme, a character is represented by 1 or more bytes.
// Our codec supports UTF-16 surrogates, extensions for GB18030 and unicode sequences.
// To save memory and loading time, we read table files only when requested.
exports._dbcs = DBCSCodec;
var UNASSIGNED = -1,
GB18030_CODE = -2,
SEQ_START = -10,
NODE_START = -1000,
UNASSIGNED_NODE = new Array(0x100),
DEF_CHAR = -1;
for (var i = 0; i < 0x100; i++)
UNASSIGNED_NODE[i] = UNASSIGNED;
// Class DBCSCodec reads and initializes mapping tables.
function DBCSCodec(codecOptions, iconv) {
this.encodingName = codecOptions.encodingName;
if (!codecOptions)
throw new Error("DBCS codec is called without the data.")
if (!codecOptions.table)
throw new Error("Encoding '" + this.encodingName + "' has no data.");
// Load tables.
var mappingTable = codecOptions.table();
// Decode tables: MBCS -> Unicode.
// decodeTables is a trie, encoded as an array of arrays of integers. Internal arrays are trie nodes and all have len = 256.
// Trie root is decodeTables[0].
// Values: >= 0 -> unicode character code. can be > 0xFFFF
// == UNASSIGNED -> unknown/unassigned sequence.
// == GB18030_CODE -> this is the end of a GB18030 4-byte sequence.
// <= NODE_START -> index of the next node in our trie to process next byte.
// <= SEQ_START -> index of the start of a character code sequence, in decodeTableSeq.
this.decodeTables = [];
this.decodeTables[0] = UNASSIGNED_NODE.slice(0); // Create root node.
// Sometimes a MBCS char corresponds to a sequence of unicode chars. We store them as arrays of integers here.
this.decodeTableSeq = [];
// Actual mapping tables consist of chunks. Use them to fill up decode tables.
for (var i = 0; i < mappingTable.length; i++)
this._addDecodeChunk(mappingTable[i]);
// Load & create GB18030 tables when needed.
if (typeof codecOptions.gb18030 === 'function') {
this.gb18030 = codecOptions.gb18030(); // Load GB18030 ranges.
// Add GB18030 common decode nodes.
var commonThirdByteNodeIdx = this.decodeTables.length;
this.decodeTables.push(UNASSIGNED_NODE.slice(0));
var commonFourthByteNodeIdx = this.decodeTables.length;
this.decodeTables.push(UNASSIGNED_NODE.slice(0));
// Fill out the tree
var firstByteNode = this.decodeTables[0];
for (var i = 0x81; i <= 0xFE; i++) {
var secondByteNode = this.decodeTables[NODE_START - firstByteNode[i]];
for (var j = 0x30; j <= 0x39; j++) {
if (secondByteNode[j] === UNASSIGNED) {
secondByteNode[j] = NODE_START - commonThirdByteNodeIdx;
} else if (secondByteNode[j] > NODE_START) {
throw new Error("gb18030 decode tables conflict at byte 2");
}
var thirdByteNode = this.decodeTables[NODE_START - secondByteNode[j]];
for (var k = 0x81; k <= 0xFE; k++) {
if (thirdByteNode[k] === UNASSIGNED) {
thirdByteNode[k] = NODE_START - commonFourthByteNodeIdx;
} else if (thirdByteNode[k] === NODE_START - commonFourthByteNodeIdx) {
continue;
} else if (thirdByteNode[k] > NODE_START) {
throw new Error("gb18030 decode tables conflict at byte 3");
}
var fourthByteNode = this.decodeTables[NODE_START - thirdByteNode[k]];
for (var l = 0x30; l <= 0x39; l++) {
if (fourthByteNode[l] === UNASSIGNED)
fourthByteNode[l] = GB18030_CODE;
}
}
}
}
}
this.defaultCharUnicode = iconv.defaultCharUnicode;
// Encode tables: Unicode -> DBCS.
// `encodeTable` is array mapping from unicode char to encoded char. All its values are integers for performance.
// Because it can be sparse, it is represented as array of buckets by 256 chars each. Bucket can be null.
// Values: >= 0 -> it is a normal char. Write the value (if <=256 then 1 byte, if <=65536 then 2 bytes, etc.).
// == UNASSIGNED -> no conversion found. Output a default char.
// <= SEQ_START -> it's an index in encodeTableSeq, see below. The character starts a sequence.
this.encodeTable = [];
// `encodeTableSeq` is used when a sequence of unicode characters is encoded as a single code. We use a tree of
// objects where keys correspond to characters in sequence and leafs are the encoded dbcs values. A special DEF_CHAR key
// means end of sequence (needed when one sequence is a strict subsequence of another).
// Objects are kept separately from encodeTable to increase performance.
this.encodeTableSeq = [];
// Some chars can be decoded, but need not be encoded.
var skipEncodeChars = {};
if (codecOptions.encodeSkipVals)
for (var i = 0; i < codecOptions.encodeSkipVals.length; i++) {
var val = codecOptions.encodeSkipVals[i];
if (typeof val === 'number')
skipEncodeChars[val] = true;
else
for (var j = val.from; j <= val.to; j++)
skipEncodeChars[j] = true;
}
// Use decode trie to recursively fill out encode tables.
this._fillEncodeTable(0, 0, skipEncodeChars);
// Add more encoding pairs when needed.
if (codecOptions.encodeAdd) {
for (var uChar in codecOptions.encodeAdd)
if (Object.prototype.hasOwnProperty.call(codecOptions.encodeAdd, uChar))
this._setEncodeChar(uChar.charCodeAt(0), codecOptions.encodeAdd[uChar]);
}
this.defCharSB = this.encodeTable[0][iconv.defaultCharSingleByte.charCodeAt(0)];
if (this.defCharSB === UNASSIGNED) this.defCharSB = this.encodeTable[0]['?'];
if (this.defCharSB === UNASSIGNED) this.defCharSB = "?".charCodeAt(0);
}
DBCSCodec.prototype.encoder = DBCSEncoder;
DBCSCodec.prototype.decoder = DBCSDecoder;
// Decoder helpers
DBCSCodec.prototype._getDecodeTrieNode = function(addr) {
var bytes = [];
for (; addr > 0; addr >>>= 8)
bytes.push(addr & 0xFF);
if (bytes.length == 0)
bytes.push(0);
var node = this.decodeTables[0];
for (var i = bytes.length-1; i > 0; i--) { // Traverse nodes deeper into the trie.
var val = node[bytes[i]];
if (val == UNASSIGNED) { // Create new node.
node[bytes[i]] = NODE_START - this.decodeTables.length;
this.decodeTables.push(node = UNASSIGNED_NODE.slice(0));
}
else if (val <= NODE_START) { // Existing node.
node = this.decodeTables[NODE_START - val];
}
else
throw new Error("Overwrite byte in " + this.encodingName + ", addr: " + addr.toString(16));
}
return node;
}
DBCSCodec.prototype._addDecodeChunk = function(chunk) {
// First element of chunk is the hex mbcs code where we start.
var curAddr = parseInt(chunk[0], 16);
// Choose the decoding node where we'll write our chars.
var writeTable = this._getDecodeTrieNode(curAddr);
curAddr = curAddr & 0xFF;
// Write all other elements of the chunk to the table.
for (var k = 1; k < chunk.length; k++) {
var part = chunk[k];
if (typeof part === "string") { // String, write as-is.
for (var l = 0; l < part.length;) {
var code = part.charCodeAt(l++);
if (0xD800 <= code && code < 0xDC00) { // Decode surrogate
var codeTrail = part.charCodeAt(l++);
if (0xDC00 <= codeTrail && codeTrail < 0xE000)
writeTable[curAddr++] = 0x10000 + (code - 0xD800) * 0x400 + (codeTrail - 0xDC00);
else
throw new Error("Incorrect surrogate pair in " + this.encodingName + " at chunk " + chunk[0]);
}
else if (0x0FF0 < code && code <= 0x0FFF) { // Character sequence (our own encoding used)
var len = 0xFFF - code + 2;
var seq = [];
for (var m = 0; m < len; m++)
seq.push(part.charCodeAt(l++)); // Simple variation: don't support surrogates or subsequences in seq.
writeTable[curAddr++] = SEQ_START - this.decodeTableSeq.length;
this.decodeTableSeq.push(seq);
}
else
writeTable[curAddr++] = code; // Basic char
}
}
else if (typeof part === "number") { // Integer, meaning increasing sequence starting with prev character.
var charCode = writeTable[curAddr - 1] + 1;
for (var l = 0; l < part; l++)
writeTable[curAddr++] = charCode++;
}
else
throw new Error("Incorrect type '" + typeof part + "' given in " + this.encodingName + " at chunk " + chunk[0]);
}
if (curAddr > 0xFF)
throw new Error("Incorrect chunk in " + this.encodingName + " at addr " + chunk[0] + ": too long" + curAddr);
}
// Encoder helpers
DBCSCodec.prototype._getEncodeBucket = function(uCode) {
var high = uCode >> 8; // This could be > 0xFF because of astral characters.
if (this.encodeTable[high] === undefined)
this.encodeTable[high] = UNASSIGNED_NODE.slice(0); // Create bucket on demand.
return this.encodeTable[high];
}
DBCSCodec.prototype._setEncodeChar = function(uCode, dbcsCode) {
var bucket = this._getEncodeBucket(uCode);
var low = uCode & 0xFF;
if (bucket[low] <= SEQ_START)
this.encodeTableSeq[SEQ_START-bucket[low]][DEF_CHAR] = dbcsCode; // There's already a sequence, set a single-char subsequence of it.
else if (bucket[low] == UNASSIGNED)
bucket[low] = dbcsCode;
}
DBCSCodec.prototype._setEncodeSequence = function(seq, dbcsCode) {
// Get the root of character tree according to first character of the sequence.
var uCode = seq[0];
var bucket = this._getEncodeBucket(uCode);
var low = uCode & 0xFF;
var node;
if (bucket[low] <= SEQ_START) {
// There's already a sequence with - use it.
node = this.encodeTableSeq[SEQ_START-bucket[low]];
}
else {
// There was no sequence object - allocate a new one.
node = {};
if (bucket[low] !== UNASSIGNED) node[DEF_CHAR] = bucket[low]; // If a char was set before - make it a single-char subsequence.
bucket[low] = SEQ_START - this.encodeTableSeq.length;
this.encodeTableSeq.push(node);
}
// Traverse the character tree, allocating new nodes as needed.
for (var j = 1; j < seq.length-1; j++) {
var oldVal = node[uCode];
if (typeof oldVal === 'object')
node = oldVal;
else {
node = node[uCode] = {}
if (oldVal !== undefined)
node[DEF_CHAR] = oldVal
}
}
// Set the leaf to given dbcsCode.
uCode = seq[seq.length-1];
node[uCode] = dbcsCode;
}
DBCSCodec.prototype._fillEncodeTable = function(nodeIdx, prefix, skipEncodeChars) {
var node = this.decodeTables[nodeIdx];
var hasValues = false;
var subNodeEmpty = {};
for (var i = 0; i < 0x100; i++) {
var uCode = node[i];
var mbCode = prefix + i;
if (skipEncodeChars[mbCode])
continue;
if (uCode >= 0) {
this._setEncodeChar(uCode, mbCode);
hasValues = true;
} else if (uCode <= NODE_START) {
var subNodeIdx = NODE_START - uCode;
if (!subNodeEmpty[subNodeIdx]) { // Skip empty subtrees (they are too large in gb18030).
var newPrefix = (mbCode << 8) >>> 0; // NOTE: '>>> 0' keeps 32-bit num positive.
if (this._fillEncodeTable(subNodeIdx, newPrefix, skipEncodeChars))
hasValues = true;
else
subNodeEmpty[subNodeIdx] = true;
}
} else if (uCode <= SEQ_START) {
this._setEncodeSequence(this.decodeTableSeq[SEQ_START - uCode], mbCode);
hasValues = true;
}
}
return hasValues;
}
// == Encoder ==================================================================
function DBCSEncoder(options, codec) {
// Encoder state
this.leadSurrogate = -1;
this.seqObj = undefined;
// Static data
this.encodeTable = codec.encodeTable;
this.encodeTableSeq = codec.encodeTableSeq;
this.defaultCharSingleByte = codec.defCharSB;
this.gb18030 = codec.gb18030;
}
DBCSEncoder.prototype.write = function(str) {
var newBuf = Buffer.alloc(str.length * (this.gb18030 ? 4 : 3)),
leadSurrogate = this.leadSurrogate,
seqObj = this.seqObj, nextChar = -1,
i = 0, j = 0;
while (true) {
// 0. Get next character.
if (nextChar === -1) {
if (i == str.length) break;
var uCode = str.charCodeAt(i++);
}
else {
var uCode = nextChar;
nextChar = -1;
}
// 1. Handle surrogates.
if (0xD800 <= uCode && uCode < 0xE000) { // Char is one of surrogates.
if (uCode < 0xDC00) { // We've got lead surrogate.
if (leadSurrogate === -1) {
leadSurrogate = uCode;
continue;
} else {
leadSurrogate = uCode;
// Double lead surrogate found.
uCode = UNASSIGNED;
}
} else { // We've got trail surrogate.
if (leadSurrogate !== -1) {
uCode = 0x10000 + (leadSurrogate - 0xD800) * 0x400 + (uCode - 0xDC00);
leadSurrogate = -1;
} else {
// Incomplete surrogate pair - only trail surrogate found.
uCode = UNASSIGNED;
}
}
}
else if (leadSurrogate !== -1) {
// Incomplete surrogate pair - only lead surrogate found.
nextChar = uCode; uCode = UNASSIGNED; // Write an error, then current char.
leadSurrogate = -1;
}
// 2. Convert uCode character.
var dbcsCode = UNASSIGNED;
if (seqObj !== undefined && uCode != UNASSIGNED) { // We are in the middle of the sequence
var resCode = seqObj[uCode];
if (typeof resCode === 'object') { // Sequence continues.
seqObj = resCode;
continue;
} else if (typeof resCode == 'number') { // Sequence finished. Write it.
dbcsCode = resCode;
} else if (resCode == undefined) { // Current character is not part of the sequence.
// Try default character for this sequence
resCode = seqObj[DEF_CHAR];
if (resCode !== undefined) {
dbcsCode = resCode; // Found. Write it.
nextChar = uCode; // Current character will be written too in the next iteration.
} else {
// TODO: What if we have no default? (resCode == undefined)
// Then, we should write first char of the sequence as-is and try the rest recursively.
// Didn't do it for now because no encoding has this situation yet.
// Currently, just skip the sequence and write current char.
}
}
seqObj = undefined;
}
else if (uCode >= 0) { // Regular character
var subtable = this.encodeTable[uCode >> 8];
if (subtable !== undefined)
dbcsCode = subtable[uCode & 0xFF];
if (dbcsCode <= SEQ_START) { // Sequence start
seqObj = this.encodeTableSeq[SEQ_START-dbcsCode];
continue;
}
if (dbcsCode == UNASSIGNED && this.gb18030) {
// Use GB18030 algorithm to find character(s) to write.
var idx = findIdx(this.gb18030.uChars, uCode);
if (idx != -1) {
var dbcsCode = this.gb18030.gbChars[idx] + (uCode - this.gb18030.uChars[idx]);
newBuf[j++] = 0x81 + Math.floor(dbcsCode / 12600); dbcsCode = dbcsCode % 12600;
newBuf[j++] = 0x30 + Math.floor(dbcsCode / 1260); dbcsCode = dbcsCode % 1260;
newBuf[j++] = 0x81 + Math.floor(dbcsCode / 10); dbcsCode = dbcsCode % 10;
newBuf[j++] = 0x30 + dbcsCode;
continue;
}
}
}
// 3. Write dbcsCode character.
if (dbcsCode === UNASSIGNED)
dbcsCode = this.defaultCharSingleByte;
if (dbcsCode < 0x100) {
newBuf[j++] = dbcsCode;
}
else if (dbcsCode < 0x10000) {
newBuf[j++] = dbcsCode >> 8; // high byte
newBuf[j++] = dbcsCode & 0xFF; // low byte
}
else if (dbcsCode < 0x1000000) {
newBuf[j++] = dbcsCode >> 16;
newBuf[j++] = (dbcsCode >> 8) & 0xFF;
newBuf[j++] = dbcsCode & 0xFF;
} else {
newBuf[j++] = dbcsCode >>> 24;
newBuf[j++] = (dbcsCode >>> 16) & 0xFF;
newBuf[j++] = (dbcsCode >>> 8) & 0xFF;
newBuf[j++] = dbcsCode & 0xFF;
}
}
this.seqObj = seqObj;
this.leadSurrogate = leadSurrogate;
return newBuf.slice(0, j);
}
DBCSEncoder.prototype.end = function() {
if (this.leadSurrogate === -1 && this.seqObj === undefined)
return; // All clean. Most often case.
var newBuf = Buffer.alloc(10), j = 0;
if (this.seqObj) { // We're in the sequence.
var dbcsCode = this.seqObj[DEF_CHAR];
if (dbcsCode !== undefined) { // Write beginning of the sequence.
if (dbcsCode < 0x100) {
newBuf[j++] = dbcsCode;
}
else {
newBuf[j++] = dbcsCode >> 8; // high byte
newBuf[j++] = dbcsCode & 0xFF; // low byte
}
} else {
// See todo above.
}
this.seqObj = undefined;
}
if (this.leadSurrogate !== -1) {
// Incomplete surrogate pair - only lead surrogate found.
newBuf[j++] = this.defaultCharSingleByte;
this.leadSurrogate = -1;
}
return newBuf.slice(0, j);
}
// Export for testing
DBCSEncoder.prototype.findIdx = findIdx;
// == Decoder ==================================================================
function DBCSDecoder(options, codec) {
// Decoder state
this.nodeIdx = 0;
this.prevBytes = [];
// Static data
this.decodeTables = codec.decodeTables;
this.decodeTableSeq = codec.decodeTableSeq;
this.defaultCharUnicode = codec.defaultCharUnicode;
this.gb18030 = codec.gb18030;
}
DBCSDecoder.prototype.write = function(buf) {
var newBuf = Buffer.alloc(buf.length*2),
nodeIdx = this.nodeIdx,
prevBytes = this.prevBytes, prevOffset = this.prevBytes.length,
seqStart = -this.prevBytes.length, // idx of the start of current parsed sequence.
uCode;
for (var i = 0, j = 0; i < buf.length; i++) {
var curByte = (i >= 0) ? buf[i] : prevBytes[i + prevOffset];
// Lookup in current trie node.
var uCode = this.decodeTables[nodeIdx][curByte];
if (uCode >= 0) {
// Normal character, just use it.
}
else if (uCode === UNASSIGNED) { // Unknown char.
// TODO: Callback with seq.
uCode = this.defaultCharUnicode.charCodeAt(0);
i = seqStart; // Skip one byte ('i' will be incremented by the for loop) and try to parse again.
}
else if (uCode === GB18030_CODE) {
if (i >= 3) {
var ptr = (buf[i-3]-0x81)*12600 + (buf[i-2]-0x30)*1260 + (buf[i-1]-0x81)*10 + (curByte-0x30);
} else {
var ptr = (prevBytes[i-3+prevOffset]-0x81)*12600 +
(((i-2 >= 0) ? buf[i-2] : prevBytes[i-2+prevOffset])-0x30)*1260 +
(((i-1 >= 0) ? buf[i-1] : prevBytes[i-1+prevOffset])-0x81)*10 +
(curByte-0x30);
}
var idx = findIdx(this.gb18030.gbChars, ptr);
uCode = this.gb18030.uChars[idx] + ptr - this.gb18030.gbChars[idx];
}
else if (uCode <= NODE_START) { // Go to next trie node.
nodeIdx = NODE_START - uCode;
continue;
}
else if (uCode <= SEQ_START) { // Output a sequence of chars.
var seq = this.decodeTableSeq[SEQ_START - uCode];
for (var k = 0; k < seq.length - 1; k++) {
uCode = seq[k];
newBuf[j++] = uCode & 0xFF;
newBuf[j++] = uCode >> 8;
}
uCode = seq[seq.length-1];
}
else
throw new Error("iconv-lite internal error: invalid decoding table value " + uCode + " at " + nodeIdx + "/" + curByte);
// Write the character to buffer, handling higher planes using surrogate pair.
if (uCode >= 0x10000) {
uCode -= 0x10000;
var uCodeLead = 0xD800 | (uCode >> 10);
newBuf[j++] = uCodeLead & 0xFF;
newBuf[j++] = uCodeLead >> 8;
uCode = 0xDC00 | (uCode & 0x3FF);
}
newBuf[j++] = uCode & 0xFF;
newBuf[j++] = uCode >> 8;
// Reset trie node.
nodeIdx = 0; seqStart = i+1;
}
this.nodeIdx = nodeIdx;
this.prevBytes = (seqStart >= 0)
? Array.prototype.slice.call(buf, seqStart)
: prevBytes.slice(seqStart + prevOffset).concat(Array.prototype.slice.call(buf));
return newBuf.slice(0, j).toString('ucs2');
}
DBCSDecoder.prototype.end = function() {
var ret = '';
// Try to parse all remaining chars.
while (this.prevBytes.length > 0) {
// Skip 1 character in the buffer.
ret += this.defaultCharUnicode;
var bytesArr = this.prevBytes.slice(1);
// Parse remaining as usual.
this.prevBytes = [];
this.nodeIdx = 0;
if (bytesArr.length > 0)
ret += this.write(bytesArr);
}
this.prevBytes = [];
this.nodeIdx = 0;
return ret;
}
// Binary search for GB18030. Returns largest i such that table[i] <= val.
function findIdx(table, val) {
if (table[0] > val)
return -1;
var l = 0, r = table.length;
while (l < r-1) { // always table[l] <= val < table[r]
var mid = l + ((r-l+1) >> 1);
if (table[mid] <= val)
l = mid;
else
r = mid;
}
return l;
}
/***/ }),
/***/ 77663:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
// Description of supported double byte encodings and aliases.
// Tables are not require()-d until they are needed to speed up library load.
// require()-s are direct to support Browserify.
module.exports = {
// == Japanese/ShiftJIS ====================================================
// All japanese encodings are based on JIS X set of standards:
// JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF.
// JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes.
// Has several variations in 1978, 1983, 1990 and 1997.
// JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead.
// JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233.
// 2 planes, first is superset of 0208, second - revised 0212.
// Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx)
// Byte encodings are:
// * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte
// encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC.
// Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI.
// * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes.
// 0x00-0x7F - lower part of 0201
// 0x8E, 0xA1-0xDF - upper part of 0201
// (0xA1-0xFE)x2 - 0208 plane (94x94).
// 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94).
// * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon.
// Used as-is in ISO2022 family.
// * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII,
// 0201-1976 Roman, 0208-1978, 0208-1983.
// * ISO2022-JP-1: Adds esc seq for 0212-1990.
// * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7.
// * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2.
// * ISO2022-JP-2004: Adds 0213-2004 Plane 1.
//
// After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes.
//
// Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html
'shiftjis': {
type: '_dbcs',
table: function() { return __webpack_require__(70540) },
encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
encodeSkipVals: [{from: 0xED40, to: 0xF940}],
},
'csshiftjis': 'shiftjis',
'mskanji': 'shiftjis',
'sjis': 'shiftjis',
'windows31j': 'shiftjis',
'ms31j': 'shiftjis',
'xsjis': 'shiftjis',
'windows932': 'shiftjis',
'ms932': 'shiftjis',
'932': 'shiftjis',
'cp932': 'shiftjis',
'eucjp': {
type: '_dbcs',
table: function() { return __webpack_require__(32195) },
encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
},
// TODO: KDDI extension to Shift_JIS
// TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes.
// TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars.
// == Chinese/GBK ==========================================================
// http://en.wikipedia.org/wiki/GBK
// We mostly implement W3C recommendation: https://www.w3.org/TR/encoding/#gbk-encoder
// Oldest GB2312 (1981, ~7600 chars) is a subset of CP936
'gb2312': 'cp936',
'gb231280': 'cp936',
'gb23121980': 'cp936',
'csgb2312': 'cp936',
'csiso58gb231280': 'cp936',
'euccn': 'cp936',
// Microsoft's CP936 is a subset and approximation of GBK.
'windows936': 'cp936',
'ms936': 'cp936',
'936': 'cp936',
'cp936': {
type: '_dbcs',
table: function() { return __webpack_require__(97680) },
},
// GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other.
'gbk': {
type: '_dbcs',
table: function() { return __webpack_require__(97680).concat(__webpack_require__(36775)) },
},
'xgbk': 'gbk',
'isoir58': 'gbk',
// GB18030 is an algorithmic extension of GBK.
// Main source: https://www.w3.org/TR/encoding/#gbk-encoder
// http://icu-project.org/docs/papers/gb18030.html
// http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml
// http://www.khngai.com/chinese/charmap/tblgbk.php?page=0
'gb18030': {
type: '_dbcs',
table: function() { return __webpack_require__(97680).concat(__webpack_require__(36775)) },
gb18030: function() { return __webpack_require__(99383) },
encodeSkipVals: [0x80],
encodeAdd: {'€': 0xA2E3},
},
'chinese': 'gb18030',
// == Korean ===============================================================
// EUC-KR, KS_C_5601 and KS X 1001 are exactly the same.
'windows949': 'cp949',
'ms949': 'cp949',
'949': 'cp949',
'cp949': {
type: '_dbcs',
table: function() { return __webpack_require__(79924) },
},
'cseuckr': 'cp949',
'csksc56011987': 'cp949',
'euckr': 'cp949',
'isoir149': 'cp949',
'korean': 'cp949',
'ksc56011987': 'cp949',
'ksc56011989': 'cp949',
'ksc5601': 'cp949',
// == Big5/Taiwan/Hong Kong ================================================
// There are lots of tables for Big5 and cp950. Please see the following links for history:
// http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html
// Variations, in roughly number of defined chars:
// * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
// * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/
// * Big5-2003 (Taiwan standard) almost superset of cp950.
// * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers.
// * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard.
// many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years.
// Plus, it has 4 combining sequences.
// Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299
// because big5-hkscs is the only encoding to include astral characters in non-algorithmic way.
// Implementations are not consistent within browsers; sometimes labeled as just big5.
// MS Internet Explorer switches from big5 to big5-hkscs when a patch applied.
// Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31
// In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s.
// Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt
// http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt
//
// Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder
// Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong.
'windows950': 'cp950',
'ms950': 'cp950',
'950': 'cp950',
'cp950': {
type: '_dbcs',
table: function() { return __webpack_require__(75254) },
},
// Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus.
'big5': 'big5hkscs',
'big5hkscs': {
type: '_dbcs',
table: function() { return __webpack_require__(75254).concat(__webpack_require__(60556)) },
encodeSkipVals: [
// Although Encoding Standard says we should avoid encoding to HKSCS area (See Step 1 of
// https://encoding.spec.whatwg.org/#index-big5-pointer), we still do it to increase compatibility with ICU.
// But if a single unicode point can be encoded both as HKSCS and regular Big5, we prefer the latter.
0x8e69, 0x8e6f, 0x8e7e, 0x8eab, 0x8eb4, 0x8ecd, 0x8ed0, 0x8f57, 0x8f69, 0x8f6e, 0x8fcb, 0x8ffe,
0x906d, 0x907a, 0x90c4, 0x90dc, 0x90f1, 0x91bf, 0x92af, 0x92b0, 0x92b1, 0x92b2, 0x92d1, 0x9447, 0x94ca,
0x95d9, 0x96fc, 0x9975, 0x9b76, 0x9b78, 0x9b7b, 0x9bc6, 0x9bde, 0x9bec, 0x9bf6, 0x9c42, 0x9c53, 0x9c62,
0x9c68, 0x9c6b, 0x9c77, 0x9cbc, 0x9cbd, 0x9cd0, 0x9d57, 0x9d5a, 0x9dc4, 0x9def, 0x9dfb, 0x9ea9, 0x9eef,
0x9efd, 0x9f60, 0x9fcb, 0xa077, 0xa0dc, 0xa0df, 0x8fcc, 0x92c8, 0x9644, 0x96ed,
// Step 2 of https://encoding.spec.whatwg.org/#index-big5-pointer: Use last pointer for U+2550, U+255E, U+2561, U+256A, U+5341, or U+5345
0xa2a4, 0xa2a5, 0xa2a7, 0xa2a6, 0xa2cc, 0xa2ce,
],
},
'cnbig5': 'big5hkscs',
'csbig5': 'big5hkscs',
'xxbig5': 'big5hkscs',
};
/***/ }),
/***/ 54078:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
// Update this array if you add/rename/remove files in this directory.
// We support Browserify by skipping automatic module discovery and requiring modules directly.
var modules = [
__webpack_require__(56379),
__webpack_require__(57437),
__webpack_require__(64189),
__webpack_require__(62636),
__webpack_require__(33399),
__webpack_require__(35768),
__webpack_require__(40893),
__webpack_require__(54226),
__webpack_require__(77663),
];
// Put all encoding/alias/codec definitions to single object and export it.
for (var i = 0; i < modules.length; i++) {
var module = modules[i];
for (var enc in module)
if (Object.prototype.hasOwnProperty.call(module, enc))
exports[enc] = module[enc];
}
/***/ }),
/***/ 56379:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
var Buffer = __webpack_require__(2399).Buffer;
// Export Node.js internal encodings.
module.exports = {
// Encodings
utf8: { type: "_internal", bomAware: true},
cesu8: { type: "_internal", bomAware: true},
unicode11utf8: "utf8",
ucs2: { type: "_internal", bomAware: true},
utf16le: "ucs2",
binary: { type: "_internal" },
base64: { type: "_internal" },
hex: { type: "_internal" },
// Codec.
_internal: InternalCodec,
};
//------------------------------------------------------------------------------
function InternalCodec(codecOptions, iconv) {
this.enc = codecOptions.encodingName;
this.bomAware = codecOptions.bomAware;
if (this.enc === "base64")
this.encoder = InternalEncoderBase64;
else if (this.enc === "cesu8") {
this.enc = "utf8"; // Use utf8 for decoding.
this.encoder = InternalEncoderCesu8;
// Add decoder for versions of Node not supporting CESU-8
if (Buffer.from('eda0bdedb2a9', 'hex').toString() !== '💩') {
this.decoder = InternalDecoderCesu8;
this.defaultCharUnicode = iconv.defaultCharUnicode;
}
}
}
InternalCodec.prototype.encoder = InternalEncoder;
InternalCodec.prototype.decoder = InternalDecoder;
//------------------------------------------------------------------------------
// We use node.js internal decoder. Its signature is the same as ours.
var StringDecoder = __webpack_require__(24304).StringDecoder;
if (!StringDecoder.prototype.end) // Node v0.8 doesn't have this method.
StringDecoder.prototype.end = function() {};
function InternalDecoder(options, codec) {
this.decoder = new StringDecoder(codec.enc);
}
InternalDecoder.prototype.write = function(buf) {
if (!Buffer.isBuffer(buf)) {
buf = Buffer.from(buf);
}
return this.decoder.write(buf);
}
InternalDecoder.prototype.end = function() {
return this.decoder.end();
}
//------------------------------------------------------------------------------
// Encoder is mostly trivial
function InternalEncoder(options, codec) {
this.enc = codec.enc;
}
InternalEncoder.prototype.write = function(str) {
return Buffer.from(str, this.enc);
}
InternalEncoder.prototype.end = function() {
}
//------------------------------------------------------------------------------
// Except base64 encoder, which must keep its state.
function InternalEncoderBase64(options, codec) {
this.prevStr = '';
}
InternalEncoderBase64.prototype.write = function(str) {
str = this.prevStr + str;
var completeQuads = str.length - (str.length % 4);
this.prevStr = str.slice(completeQuads);
str = str.slice(0, completeQuads);
return Buffer.from(str, "base64");
}
InternalEncoderBase64.prototype.end = function() {
return Buffer.from(this.prevStr, "base64");
}
//------------------------------------------------------------------------------
// CESU-8 encoder is also special.
function InternalEncoderCesu8(options, codec) {
}
InternalEncoderCesu8.prototype.write = function(str) {
var buf = Buffer.alloc(str.length * 3), bufIdx = 0;
for (var i = 0; i < str.length; i++) {
var charCode = str.charCodeAt(i);
// Naive implementation, but it works because CESU-8 is especially easy
// to convert from UTF-16 (which all JS strings are encoded in).
if (charCode < 0x80)
buf[bufIdx++] = charCode;
else if (charCode < 0x800) {
buf[bufIdx++] = 0xC0 + (charCode >>> 6);
buf[bufIdx++] = 0x80 + (charCode & 0x3f);
}
else { // charCode will always be < 0x10000 in javascript.
buf[bufIdx++] = 0xE0 + (charCode >>> 12);
buf[bufIdx++] = 0x80 + ((charCode >>> 6) & 0x3f);
buf[bufIdx++] = 0x80 + (charCode & 0x3f);
}
}
return buf.slice(0, bufIdx);
}
InternalEncoderCesu8.prototype.end = function() {
}
//------------------------------------------------------------------------------
// CESU-8 decoder is not implemented in Node v4.0+
function InternalDecoderCesu8(options, codec) {
this.acc = 0;
this.contBytes = 0;
this.accBytes = 0;
this.defaultCharUnicode = codec.defaultCharUnicode;
}
InternalDecoderCesu8.prototype.write = function(buf) {
var acc = this.acc, contBytes = this.contBytes, accBytes = this.accBytes,
res = '';
for (var i = 0; i < buf.length; i++) {
var curByte = buf[i];
if ((curByte & 0xC0) !== 0x80) { // Leading byte
if (contBytes > 0) { // Previous code is invalid
res += this.defaultCharUnicode;
contBytes = 0;
}
if (curByte < 0x80) { // Single-byte code
res += String.fromCharCode(curByte);
} else if (curByte < 0xE0) { // Two-byte code
acc = curByte & 0x1F;
contBytes = 1; accBytes = 1;
} else if (curByte < 0xF0) { // Three-byte code
acc = curByte & 0x0F;
contBytes = 2; accBytes = 1;
} else { // Four or more are not supported for CESU-8.
res += this.defaultCharUnicode;
}
} else { // Continuation byte
if (contBytes > 0) { // We're waiting for it.
acc = (acc << 6) | (curByte & 0x3f);
contBytes--; accBytes++;
if (contBytes === 0) {
// Check for overlong encoding, but support Modified UTF-8 (encoding NULL as C0 80)
if (accBytes === 2 && acc < 0x80 && acc > 0)
res += this.defaultCharUnicode;
else if (accBytes === 3 && acc < 0x800)
res += this.defaultCharUnicode;
else
// Actually add character.
res += String.fromCharCode(acc);
}
} else { // Unexpected continuation byte
res += this.defaultCharUnicode;
}
}
}
this.acc = acc; this.contBytes = contBytes; this.accBytes = accBytes;
return res;
}
InternalDecoderCesu8.prototype.end = function() {
var res = 0;
if (this.contBytes > 0)
res += this.defaultCharUnicode;
return res;
}
/***/ }),
/***/ 33399:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
var Buffer = __webpack_require__(2399).Buffer;
// Single-byte codec. Needs a 'chars' string parameter that contains 256 or 128 chars that
// correspond to encoded bytes (if 128 - then lower half is ASCII).
exports._sbcs = SBCSCodec;
function SBCSCodec(codecOptions, iconv) {
if (!codecOptions)
throw new Error("SBCS codec is called without the data.")
// Prepare char buffer for decoding.
if (!codecOptions.chars || (codecOptions.chars.length !== 128 && codecOptions.chars.length !== 256))
throw new Error("Encoding '"+codecOptions.type+"' has incorrect 'chars' (must be of len 128 or 256)");
if (codecOptions.chars.length === 128) {
var asciiString = "";
for (var i = 0; i < 128; i++)
asciiString += String.fromCharCode(i);
codecOptions.chars = asciiString + codecOptions.chars;
}
this.decodeBuf = Buffer.from(codecOptions.chars, 'ucs2');
// Encoding buffer.
var encodeBuf = Buffer.alloc(65536, iconv.defaultCharSingleByte.charCodeAt(0));
for (var i = 0; i < codecOptions.chars.length; i++)
encodeBuf[codecOptions.chars.charCodeAt(i)] = i;
this.encodeBuf = encodeBuf;
}
SBCSCodec.prototype.encoder = SBCSEncoder;
SBCSCodec.prototype.decoder = SBCSDecoder;
function SBCSEncoder(options, codec) {
this.encodeBuf = codec.encodeBuf;
}
SBCSEncoder.prototype.write = function(str) {
var buf = Buffer.alloc(str.length);
for (var i = 0; i < str.length; i++)
buf[i] = this.encodeBuf[str.charCodeAt(i)];
return buf;
}
SBCSEncoder.prototype.end = function() {
}
function SBCSDecoder(options, codec) {
this.decodeBuf = codec.decodeBuf;
}
SBCSDecoder.prototype.write = function(buf) {
// Strings are immutable in JS -> we use ucs2 buffer to speed up computations.
var decodeBuf = this.decodeBuf;
var newBuf = Buffer.alloc(buf.length*2);
var idx1 = 0, idx2 = 0;
for (var i = 0; i < buf.length; i++) {
idx1 = buf[i]*2; idx2 = i*2;
newBuf[idx2] = decodeBuf[idx1];
newBuf[idx2+1] = decodeBuf[idx1+1];
}
return newBuf.toString('ucs2');
}
SBCSDecoder.prototype.end = function() {
}
/***/ }),
/***/ 40893:
/***/ ((module) => {
"use strict";
// Generated data for sbcs codec. Don't edit manually. Regenerate using generation/gen-sbcs.js script.
module.exports = {
"437": "cp437",
"737": "cp737",
"775": "cp775",
"850": "cp850",
"852": "cp852",
"855": "cp855",
"856": "cp856",
"857": "cp857",
"858": "cp858",
"860": "cp860",
"861": "cp861",
"862": "cp862",
"863": "cp863",
"864": "cp864",
"865": "cp865",
"866": "cp866",
"869": "cp869",
"874": "windows874",
"922": "cp922",
"1046": "cp1046",
"1124": "cp1124",
"1125": "cp1125",
"1129": "cp1129",
"1133": "cp1133",
"1161": "cp1161",
"1162": "cp1162",
"1163": "cp1163",
"1250": "windows1250",
"1251": "windows1251",
"1252": "windows1252",
"1253": "windows1253",
"1254": "windows1254",
"1255": "windows1255",
"1256": "windows1256",
"1257": "windows1257",
"1258": "windows1258",
"28591": "iso88591",
"28592": "iso88592",
"28593": "iso88593",
"28594": "iso88594",
"28595": "iso88595",
"28596": "iso88596",
"28597": "iso88597",
"28598": "iso88598",
"28599": "iso88599",
"28600": "iso885910",
"28601": "iso885911",
"28603": "iso885913",
"28604": "iso885914",
"28605": "iso885915",
"28606": "iso885916",
"windows874": {
"type": "_sbcs",
"chars": "€����…�����������‘’“”•–—�������� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����"
},
"win874": "windows874",
"cp874": "windows874",
"windows1250": {
"type": "_sbcs",
"chars": "€�‚�„…†‡�‰Š‹ŚŤŽŹ�‘’“”•–—�™š›śťžź ˇ˘Ł¤Ą¦§¨©Ş«¬®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"
},
"win1250": "windows1250",
"cp1250": "windows1250",
"windows1251": {
"type": "_sbcs",
"chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊЌЋЏђ‘’“”•–—�™љ›њќћџ ЎўЈ¤Ґ¦§Ё©Є«¬®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
},
"win1251": "windows1251",
"cp1251": "windows1251",
"windows1252": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ�Ž��‘’“”•–—˜™š›œ�žŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"win1252": "windows1252",
"cp1252": "windows1252",
"windows1253": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡�‰�‹�����‘’“”•–—�™�›���� ΅Ά£¤¥¦§¨©�«¬®―°±²³΄µ¶·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�"
},
"win1253": "windows1253",
"cp1253": "windows1253",
"windows1254": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ����‘’“”•–—˜™š›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ"
},
"win1254": "windows1254",
"cp1254": "windows1254",
"windows1255": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡ˆ‰�‹�����‘’“”•–—˜™�›���� ¡¢£₪¥¦§¨©×«¬®¯°±²³´µ¶·¸¹÷»¼½¾¿ְֱֲֳִֵֶַָֹֺֻּֽ־ֿ׀ׁׂ׃װױײ׳״�������אבגדהוזחטיךכלםמןנסעףפץצקרשת���"
},
"win1255": "windows1255",
"cp1255": "windows1255",
"windows1256": {
"type": "_sbcs",
"chars": "€پ‚ƒ„…†‡ˆ‰ٹ‹Œچژڈگ‘’“”•–—ک™ڑ›œں ،¢£¤¥¦§¨©ھ«¬®¯°±²³´µ¶·¸¹؛»¼½¾؟ہءآأؤإئابةتثجحخدذرزسشصض×طظعغـفقكàلâمنهوçèéêëىيîïًٌٍَôُِ÷ّùْûüے"
},
"win1256": "windows1256",
"cp1256": "windows1256",
"windows1257": {
"type": "_sbcs",
"chars": "€�‚�„…†‡�‰�‹�¨ˇ¸�‘’“”•–—�™�›�¯˛� �¢£¤�¦§Ø©Ŗ«¬®Æ°±²³´µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž˙"
},
"win1257": "windows1257",
"cp1257": "windows1257",
"windows1258": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡ˆ‰�‹Œ����‘’“”•–—˜™�›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
},
"win1258": "windows1258",
"cp1258": "windows1258",
"iso88591": {
"type": "_sbcs",
"chars": "
¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"cp28591": "iso88591",
"iso88592": {
"type": "_sbcs",
"chars": "
Ą˘Ł¤ĽŚ§¨ŠŞŤŹŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"
},
"cp28592": "iso88592",
"iso88593": {
"type": "_sbcs",
"chars": "
Ħ˘£¤�Ĥ§¨İŞĞĴ�Ż°ħ²³´µĥ·¸ışğĵ½�żÀÁÂ�ÄĊĈÇÈÉÊËÌÍÎÏ�ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ�äċĉçèéêëìíîï�ñòóôġö÷ĝùúûüŭŝ˙"
},
"cp28593": "iso88593",
"iso88594": {
"type": "_sbcs",
"chars": "
ĄĸŖ¤ĨĻ§¨ŠĒĢŦŽ¯°ą˛ŗ´ĩļˇ¸šēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖ×ØŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙"
},
"cp28594": "iso88594",
"iso88595": {
"type": "_sbcs",
"chars": "
ЁЂЃЄЅІЇЈЉЊЋЌЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ"
},
"cp28595": "iso88595",
"iso88596": {
"type": "_sbcs",
"chars": "
���¤�������،�������������؛���؟�ءآأؤإئابةتثجحخدذرزسشصضطظعغ�����ـفقكلمنهوىيًٌٍَُِّْ�������������"
},
"cp28596": "iso88596",
"iso88597": {
"type": "_sbcs",
"chars": "
‘’£€₯¦§¨©ͺ«¬�―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�"
},
"cp28597": "iso88597",
"iso88598": {
"type": "_sbcs",
"chars": "
�¢£¤¥¦§¨©×«¬®¯°±²³´µ¶·¸¹÷»¼½¾��������������������������������‗אבגדהוזחטיךכלםמןנסעףפץצקרשת���"
},
"cp28598": "iso88598",
"iso88599": {
"type": "_sbcs",
"chars": "
¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ"
},
"cp28599": "iso88599",
"iso885910": {
"type": "_sbcs",
"chars": "
ĄĒĢĪĨĶ§ĻĐŠŦŽŪŊ°ąēģīĩķ·ļđšŧž―ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ"
},
"cp28600": "iso885910",
"iso885911": {
"type": "_sbcs",
"chars": "
กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����"
},
"cp28601": "iso885911",
"iso885913": {
"type": "_sbcs",
"chars": "
”¢£¤„¦§Ø©Ŗ«¬®Æ°±²³“µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž’"
},
"cp28603": "iso885913",
"iso885914": {
"type": "_sbcs",
"chars": "
Ḃḃ£ĊċḊ§Ẁ©ẂḋỲ®ŸḞḟĠġṀṁ¶ṖẁṗẃṠỳẄẅṡÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŴÑÒÓÔÕÖṪØÙÚÛÜÝŶßàáâãäåæçèéêëìíîïŵñòóôõöṫøùúûüýŷÿ"
},
"cp28604": "iso885914",
"iso885915": {
"type": "_sbcs",
"chars": "
¡¢£€¥Š§š©ª«¬®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"cp28605": "iso885915",
"iso885916": {
"type": "_sbcs",
"chars": "
ĄąŁ€„Š§š©Ș«ŹźŻ°±ČłŽ”¶·žčș»ŒœŸżÀÁÂĂÄĆÆÇÈÉÊËÌÍÎÏĐŃÒÓÔŐÖŚŰÙÚÛÜĘȚßàáâăäćæçèéêëìíîïđńòóôőöśűùúûüęțÿ"
},
"cp28606": "iso885916",
"cp437": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm437": "cp437",
"csibm437": "cp437",
"cp737": {
"type": "_sbcs",
"chars": "ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρσςτυφχψ░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ωάέήϊίόύϋώΆΈΉΊΌΎΏ±≥≤ΪΫ÷≈°∙·√ⁿ²■ "
},
"ibm737": "cp737",
"csibm737": "cp737",
"cp775": {
"type": "_sbcs",
"chars": "ĆüéāäģåćłēŖŗīŹÄÅÉæÆōöĢ¢ŚśÖÜø£ØפĀĪóŻżź”¦©®¬½¼Ł«»░▒▓│┤ĄČĘĖ╣║╗╝ĮŠ┐└┴┬├─┼ŲŪ╚╔╩╦╠═╬Žąčęėįšųūž┘┌█▄▌▐▀ÓßŌŃõÕµńĶķĻļņĒŅ’±“¾¶§÷„°∙·¹³²■ "
},
"ibm775": "cp775",
"csibm775": "cp775",
"cp850": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´±‗¾¶§÷¸°¨·¹³²■ "
},
"ibm850": "cp850",
"csibm850": "cp850",
"cp852": {
"type": "_sbcs",
"chars": "ÇüéâäůćçłëŐőîŹÄĆÉĹĺôöĽľŚśÖÜŤťŁ×čáíóúĄąŽžĘ꬟Ⱥ«»░▒▓│┤ÁÂĚŞ╣║╗╝Żż┐└┴┬├─┼Ăă╚╔╩╦╠═╬¤đĐĎËďŇÍÎě┘┌█▄ŢŮ▀ÓßÔŃńňŠšŔÚŕŰýÝţ´˝˛ˇ˘§÷¸°¨˙űŘř■ "
},
"ibm852": "cp852",
"csibm852": "cp852",
"cp855": {
"type": "_sbcs",
"chars": "ђЂѓЃёЁєЄѕЅіІїЇјЈљЉњЊћЋќЌўЎџЏюЮъЪаАбБцЦдДеЕфФгГ«»░▒▓│┤хХиИ╣║╗╝йЙ┐└┴┬├─┼кК╚╔╩╦╠═╬¤лЛмМнНоОп┘┌█▄Пя▀ЯрРсСтТуУжЖвВьЬ№ыЫзЗшШэЭщЩчЧ§■ "
},
"ibm855": "cp855",
"csibm855": "cp855",
"cp856": {
"type": "_sbcs",
"chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת�£�×����������®¬½¼�«»░▒▓│┤���©╣║╗╝¢¥┐└┴┬├─┼��╚╔╩╦╠═╬¤���������┘┌█▄¦�▀������µ�������¯´±‗¾¶§÷¸°¨·¹³²■ "
},
"ibm856": "cp856",
"csibm856": "cp856",
"cp857": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîıÄÅÉæÆôöòûùİÖÜø£ØŞşáíóúñÑĞ𿮬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ºªÊËÈ�ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµ�×ÚÛÙìÿ¯´±�¾¶§÷¸°¨·¹³²■ "
},
"ibm857": "cp857",
"csibm857": "cp857",
"cp858": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈ€ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´±‗¾¶§÷¸°¨·¹³²■ "
},
"ibm858": "cp858",
"csibm858": "cp858",
"cp860": {
"type": "_sbcs",
"chars": "ÇüéâãàÁçêÊèÍÔìÃÂÉÀÈôõòÚùÌÕÜ¢£Ù₧ÓáíóúñѪº¿Ò¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm860": "cp860",
"csibm860": "cp860",
"cp861": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèÐðÞÄÅÉæÆôöþûÝýÖÜø£Ø₧ƒáíóúÁÍÓÚ¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm861": "cp861",
"csibm861": "cp861",
"cp862": {
"type": "_sbcs",
"chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm862": "cp862",
"csibm862": "cp862",
"cp863": {
"type": "_sbcs",
"chars": "ÇüéâÂà¶çêëèïî‗À§ÉÈÊôËÏûù¤ÔÜ¢£ÙÛƒ¦´óú¨¸³¯Î⌐¬½¼¾«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm863": "cp863",
"csibm863": "cp863",
"cp864": {
"type": "_sbcs",
"chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$٪&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~°·∙√▒─│┼┤┬├┴┐┌└┘β∞φ±½¼≈«»ﻷﻸ��ﻻﻼ� ﺂ£¤ﺄ��ﺎﺏﺕﺙ،ﺝﺡﺥ٠١٢٣٤٥٦٧٨٩ﻑ؛ﺱﺵﺹ؟¢ﺀﺁﺃﺅﻊﺋﺍﺑﺓﺗﺛﺟﺣﺧﺩﺫﺭﺯﺳﺷﺻﺿﻁﻅﻋﻏ¦¬÷×ﻉـﻓﻗﻛﻟﻣﻧﻫﻭﻯﻳﺽﻌﻎﻍﻡﹽّﻥﻩﻬﻰﻲﻐﻕﻵﻶﻝﻙﻱ■�"
},
"ibm864": "cp864",
"csibm864": "cp864",
"cp865": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø₧ƒáíóúñѪº¿⌐¬½¼¡«¤░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm865": "cp865",
"csibm865": "cp865",
"cp866": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№¤■ "
},
"ibm866": "cp866",
"csibm866": "cp866",
"cp869": {
"type": "_sbcs",
"chars": "������Ά�·¬¦‘’Έ―ΉΊΪΌ��ΎΫ©Ώ²³ά£έήίϊΐόύΑΒΓΔΕΖΗ½ΘΙ«»░▒▓│┤ΚΛΜΝ╣║╗╝ΞΟ┐└┴┬├─┼ΠΡ╚╔╩╦╠═╬ΣΤΥΦΧΨΩαβγ┘┌█▄δε▀ζηθικλμνξοπρσςτ΄±υφχ§ψ΅°¨ωϋΰώ■ "
},
"ibm869": "cp869",
"csibm869": "cp869",
"cp922": {
"type": "_sbcs",
"chars": "
¡¢£¤¥¦§¨©ª«¬®‾°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŠÑÒÓÔÕÖ×ØÙÚÛÜÝŽßàáâãäåæçèéêëìíîïšñòóôõö÷øùúûüýžÿ"
},
"ibm922": "cp922",
"csibm922": "cp922",
"cp1046": {
"type": "_sbcs",
"chars": "ﺈ×÷ﹱ■│─┐┌└┘ﹹﹻﹽﹿﹷﺊﻰﻳﻲﻎﻏﻐﻶﻸﻺﻼ ¤ﺋﺑﺗﺛﺟﺣ،ﺧﺳ٠١٢٣٤٥٦٧٨٩ﺷ؛ﺻﺿﻊ؟ﻋءآأؤإئابةتثجحخدذرزسشصضطﻇعغﻌﺂﺄﺎﻓـفقكلمنهوىيًٌٍَُِّْﻗﻛﻟﻵﻷﻹﻻﻣﻧﻬﻩ�"
},
"ibm1046": "cp1046",
"csibm1046": "cp1046",
"cp1124": {
"type": "_sbcs",
"chars": "
ЁЂҐЄЅІЇЈЉЊЋЌЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђґєѕіїјљњћќ§ўџ"
},
"ibm1124": "cp1124",
"csibm1124": "cp1124",
"cp1125": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёҐґЄєІіЇї·√№¤■ "
},
"ibm1125": "cp1125",
"csibm1125": "cp1125",
"cp1129": {
"type": "_sbcs",
"chars": "
¡¢£¤¥¦§œ©ª«¬®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
},
"ibm1129": "cp1129",
"csibm1129": "cp1129",
"cp1133": {
"type": "_sbcs",
"chars": "
ກຂຄງຈສຊຍດຕຖທນບປຜຝພຟມຢຣລວຫອຮ���ຯະາຳິີຶືຸູຼັົຽ���ເແໂໃໄ່້໊໋໌ໍໆ�ໜໝ₭����������������໐໑໒໓໔໕໖໗໘໙��¢¬¦�"
},
"ibm1133": "cp1133",
"csibm1133": "cp1133",
"cp1161": {
"type": "_sbcs",
"chars": "��������������������������������่กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู้๊๋€฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛¢¬¦ "
},
"ibm1161": "cp1161",
"csibm1161": "cp1161",
"cp1162": {
"type": "_sbcs",
"chars": "€…‘’“”•–— กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����"
},
"ibm1162": "cp1162",
"csibm1162": "cp1162",
"cp1163": {
"type": "_sbcs",
"chars": "
¡¢£€¥¦§œ©ª«¬®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
},
"ibm1163": "cp1163",
"csibm1163": "cp1163",
"maccroatian": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø¿¡¬√ƒ≈Ć«Č… ÀÃÕŒœĐ—“”‘’÷◊�©⁄¤‹›Æ»–·‚„‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙıˆ˜¯πË˚¸Êæˇ"
},
"maccyrillic": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°¢£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµ∂ЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤"
},
"macgreek": {
"type": "_sbcs",
"chars": "Ĺ²É³ÖÜ΅àâä΄¨çéèê룙î‰ôö¦ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α±≤≥¥ΒΕΖΗΙΚΜΦΫΨΩάΝ¬ΟΡ≈Τ«»… ΥΧΆΈœ–―“”‘’÷ΉΊΌΎέήίόΏύαβψδεφγηιξκλμνοπώρστθωςχυζϊϋΐΰ�"
},
"maciceland": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüÝ°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤ÐðÞþý·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
},
"macroman": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
},
"macromania": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ĂŞ∞±≤≥¥µ∂∑∏π∫ªºΩăş¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›Ţţ‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
},
"macthai": {
"type": "_sbcs",
"chars": "«»…“”�•‘’� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู–—฿เแโใไๅๆ็่้๊๋์ํ™๏๐๑๒๓๔๕๖๗๘๙®©����"
},
"macturkish": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸĞğİıŞş‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙ�ˆ˜¯˘˙˚¸˝˛ˇ"
},
"macukraine": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤"
},
"koi8r": {
"type": "_sbcs",
"chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
},
"koi8u": {
"type": "_sbcs",
"chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґ╝╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪Ґ╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
},
"koi8ru": {
"type": "_sbcs",
"chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґў╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪ҐЎ©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
},
"koi8t": {
"type": "_sbcs",
"chars": "қғ‚Ғ„…†‡�‰ҳ‹ҲҷҶ�Қ‘’“”•–—�™�›�����ӯӮё¤ӣ¦§���«¬®�°±²Ё�Ӣ¶·�№�»���©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
},
"armscii8": {
"type": "_sbcs",
"chars": "
�և։)(»«—.՝,-֊…՜՛՞ԱաԲբԳգԴդԵեԶզԷէԸըԹթԺժԻիԼլԽխԾծԿկՀհՁձՂղՃճՄմՅյՆնՇշՈոՉչՊպՋջՌռՍսՎվՏտՐրՑցՒւՓփՔքՕօՖֆ՚�"
},
"rk1048": {
"type": "_sbcs",
"chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊҚҺЏђ‘’“”•–—�™љ›њқһџ ҰұӘ¤Ө¦§Ё©Ғ«¬®Ү°±Ііөµ¶·ё№ғ»әҢңүАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
},
"tcvn": {
"type": "_sbcs",
"chars": "\u0000ÚỤ\u0003ỪỬỮ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010ỨỰỲỶỸÝỴ\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ÀẢÃÁẠẶẬÈẺẼÉẸỆÌỈĨÍỊÒỎÕÓỌỘỜỞỠỚỢÙỦŨ ĂÂÊÔƠƯĐăâêôơưđẶ̀̀̉̃́àảãáạẲằẳẵắẴẮẦẨẪẤỀặầẩẫấậèỂẻẽéẹềểễếệìỉỄẾỒĩíịòỔỏõóọồổỗốộờởỡớợùỖủũúụừửữứựỳỷỹýỵỐ"
},
"georgianacademy": {
"type": "_sbcs",
"chars": "‚ƒ„…†‡ˆ‰Š‹Œ‘’“”•–—˜™š›œŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზთიკლმნოპჟრსტუფქღყშჩცძწჭხჯჰჱჲჳჴჵჶçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"georgianps": {
"type": "_sbcs",
"chars": "‚ƒ„…†‡ˆ‰Š‹Œ‘’“”•–—˜™š›œŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზჱთიკლმნჲოპჟრსტჳუფქღყშჩცძწჭხჴჯჰჵæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"pt154": {
"type": "_sbcs",
"chars": "ҖҒӮғ„…ҶҮҲүҠӢҢҚҺҸҗ‘’“”•–—ҳҷҡӣңқһҹ ЎўЈӨҘҰ§Ё©Ә«¬ӯ®Ҝ°ұІіҙө¶·ё№ә»јҪҫҝАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
},
"viscii": {
"type": "_sbcs",
"chars": "\u0000\u0001Ẳ\u0003\u0004ẴẪ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013Ỷ\u0015\u0016\u0017\u0018Ỹ\u001a\u001b\u001c\u001dỴ\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ẠẮẰẶẤẦẨẬẼẸẾỀỂỄỆỐỒỔỖỘỢỚỜỞỊỎỌỈỦŨỤỲÕắằặấầẩậẽẹếềểễệốồổỗỠƠộờởịỰỨỪỬơớƯÀÁÂÃẢĂẳẵÈÉÊẺÌÍĨỳĐứÒÓÔạỷừửÙÚỹỵÝỡưàáâãảăữẫèéêẻìíĩỉđựòóôõỏọụùúũủýợỮ"
},
"iso646cn": {
"type": "_sbcs",
"chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#¥%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������"
},
"iso646jp": {
"type": "_sbcs",
"chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[¥]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������"
},
"hproman8": {
"type": "_sbcs",
"chars": "
ÀÂÈÊËÎÏ´ˋˆ¨˜ÙÛ₤¯Ýý°ÇçÑñ¡¿¤£¥§ƒ¢âêôûáéóúàèòùäëöüÅîØÆåíøæÄìÖÜÉïßÔÁÃãÐðÍÌÓÒÕõŠšÚŸÿÞþ·µ¶¾—¼½ªº«■»±�"
},
"macintosh": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
},
"ascii": {
"type": "_sbcs",
"chars": "��������������������������������������������������������������������������������������������������������������������������������"
},
"tis620": {
"type": "_sbcs",
"chars": "���������������������������������กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����"
}
}
/***/ }),
/***/ 35768:
/***/ ((module) => {
"use strict";
// Manually added data to be used by sbcs codec in addition to generated one.
module.exports = {
// Not supported by iconv, not sure why.
"10029": "maccenteuro",
"maccenteuro": {
"type": "_sbcs",
"chars": "ÄĀāÉĄÖÜáąČäčĆć鏟ĎíďĒēĖóėôöõúĚěü†°Ę£§•¶ß®©™ę¨≠ģĮįĪ≤≥īĶ∂∑łĻļĽľĹĺŅņѬ√ńŇ∆«»… ňŐÕőŌ–—“”‘’÷◊ōŔŕŘ‹›řŖŗŠ‚„šŚśÁŤťÍŽžŪÓÔūŮÚůŰűŲųÝýķŻŁżĢˇ"
},
"808": "cp808",
"ibm808": "cp808",
"cp808": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№€■ "
},
"mik": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя└┴┬├─┼╣║╚╔╩╦╠═╬┐░▒▓│┤№§╗╝┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"cp720": {
"type": "_sbcs",
"chars": "\x80\x81éâ\x84à\x86çêëèïî\x8d\x8e\x8f\x90\u0651\u0652ô¤ـûùءآأؤ£إئابةتثجحخدذرزسشص«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ضطظعغفµقكلمنهوىي≡\u064b\u064c\u064d\u064e\u064f\u0650≈°∙·√ⁿ²■\u00a0"
},
// Aliases of generated encodings.
"ascii8bit": "ascii",
"usascii": "ascii",
"ansix34": "ascii",
"ansix341968": "ascii",
"ansix341986": "ascii",
"csascii": "ascii",
"cp367": "ascii",
"ibm367": "ascii",
"isoir6": "ascii",
"iso646us": "ascii",
"iso646irv": "ascii",
"us": "ascii",
"latin1": "iso88591",
"latin2": "iso88592",
"latin3": "iso88593",
"latin4": "iso88594",
"latin5": "iso88599",
"latin6": "iso885910",
"latin7": "iso885913",
"latin8": "iso885914",
"latin9": "iso885915",
"latin10": "iso885916",
"csisolatin1": "iso88591",
"csisolatin2": "iso88592",
"csisolatin3": "iso88593",
"csisolatin4": "iso88594",
"csisolatincyrillic": "iso88595",
"csisolatinarabic": "iso88596",
"csisolatingreek" : "iso88597",
"csisolatinhebrew": "iso88598",
"csisolatin5": "iso88599",
"csisolatin6": "iso885910",
"l1": "iso88591",
"l2": "iso88592",
"l3": "iso88593",
"l4": "iso88594",
"l5": "iso88599",
"l6": "iso885910",
"l7": "iso885913",
"l8": "iso885914",
"l9": "iso885915",
"l10": "iso885916",
"isoir14": "iso646jp",
"isoir57": "iso646cn",
"isoir100": "iso88591",
"isoir101": "iso88592",
"isoir109": "iso88593",
"isoir110": "iso88594",
"isoir144": "iso88595",
"isoir127": "iso88596",
"isoir126": "iso88597",
"isoir138": "iso88598",
"isoir148": "iso88599",
"isoir157": "iso885910",
"isoir166": "tis620",
"isoir179": "iso885913",
"isoir199": "iso885914",
"isoir203": "iso885915",
"isoir226": "iso885916",
"cp819": "iso88591",
"ibm819": "iso88591",
"cyrillic": "iso88595",
"arabic": "iso88596",
"arabic8": "iso88596",
"ecma114": "iso88596",
"asmo708": "iso88596",
"greek" : "iso88597",
"greek8" : "iso88597",
"ecma118" : "iso88597",
"elot928" : "iso88597",
"hebrew": "iso88598",
"hebrew8": "iso88598",
"turkish": "iso88599",
"turkish8": "iso88599",
"thai": "iso885911",
"thai8": "iso885911",
"celtic": "iso885914",
"celtic8": "iso885914",
"isoceltic": "iso885914",
"tis6200": "tis620",
"tis62025291": "tis620",
"tis62025330": "tis620",
"10000": "macroman",
"10006": "macgreek",
"10007": "maccyrillic",
"10079": "maciceland",
"10081": "macturkish",
"cspc8codepage437": "cp437",
"cspc775baltic": "cp775",
"cspc850multilingual": "cp850",
"cspcp852": "cp852",
"cspc862latinhebrew": "cp862",
"cpgr": "cp869",
"msee": "cp1250",
"mscyrl": "cp1251",
"msansi": "cp1252",
"msgreek": "cp1253",
"msturk": "cp1254",
"mshebr": "cp1255",
"msarab": "cp1256",
"winbaltrim": "cp1257",
"cp20866": "koi8r",
"20866": "koi8r",
"ibm878": "koi8r",
"cskoi8r": "koi8r",
"cp21866": "koi8u",
"21866": "koi8u",
"ibm1168": "koi8u",
"strk10482002": "rk1048",
"tcvn5712": "tcvn",
"tcvn57121": "tcvn",
"gb198880": "iso646cn",
"cn": "iso646cn",
"csiso14jisc6220ro": "iso646jp",
"jisc62201969ro": "iso646jp",
"jp": "iso646jp",
"cshproman8": "hproman8",
"r8": "hproman8",
"roman8": "hproman8",
"xroman8": "hproman8",
"ibm1051": "hproman8",
"mac": "macintosh",
"csmacintosh": "macintosh",
};
/***/ }),
/***/ 64189:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
var Buffer = __webpack_require__(2399).Buffer;
// Note: UTF16-LE (or UCS2) codec is Node.js native. See encodings/internal.js
// == UTF16-BE codec. ==========================================================
exports.utf16be = Utf16BECodec;
function Utf16BECodec() {
}
Utf16BECodec.prototype.encoder = Utf16BEEncoder;
Utf16BECodec.prototype.decoder = Utf16BEDecoder;
Utf16BECodec.prototype.bomAware = true;
// -- Encoding
function Utf16BEEncoder() {
}
Utf16BEEncoder.prototype.write = function(str) {
var buf = Buffer.from(str, 'ucs2');
for (var i = 0; i < buf.length; i += 2) {
var tmp = buf[i]; buf[i] = buf[i+1]; buf[i+1] = tmp;
}
return buf;
}
Utf16BEEncoder.prototype.end = function() {
}
// -- Decoding
function Utf16BEDecoder() {
this.overflowByte = -1;
}
Utf16BEDecoder.prototype.write = function(buf) {
if (buf.length == 0)
return '';
var buf2 = Buffer.alloc(buf.length + 1),
i = 0, j = 0;
if (this.overflowByte !== -1) {
buf2[0] = buf[0];
buf2[1] = this.overflowByte;
i = 1; j = 2;
}
for (; i < buf.length-1; i += 2, j+= 2) {
buf2[j] = buf[i+1];
buf2[j+1] = buf[i];
}
this.overflowByte = (i == buf.length-1) ? buf[buf.length-1] : -1;
return buf2.slice(0, j).toString('ucs2');
}
Utf16BEDecoder.prototype.end = function() {
this.overflowByte = -1;
}
// == UTF-16 codec =============================================================
// Decoder chooses automatically from UTF-16LE and UTF-16BE using BOM and space-based heuristic.
// Defaults to UTF-16LE, as it's prevalent and default in Node.
// http://en.wikipedia.org/wiki/UTF-16 and http://encoding.spec.whatwg.org/#utf-16le
// Decoder default can be changed: iconv.decode(buf, 'utf16', {defaultEncoding: 'utf-16be'});
// Encoder uses UTF-16LE and prepends BOM (which can be overridden with addBOM: false).
exports.utf16 = Utf16Codec;
function Utf16Codec(codecOptions, iconv) {
this.iconv = iconv;
}
Utf16Codec.prototype.encoder = Utf16Encoder;
Utf16Codec.prototype.decoder = Utf16Decoder;
// -- Encoding (pass-through)
function Utf16Encoder(options, codec) {
options = options || {};
if (options.addBOM === undefined)
options.addBOM = true;
this.encoder = codec.iconv.getEncoder('utf-16le', options);
}
Utf16Encoder.prototype.write = function(str) {
return this.encoder.write(str);
}
Utf16Encoder.prototype.end = function() {
return this.encoder.end();
}
// -- Decoding
function Utf16Decoder(options, codec) {
this.decoder = null;
this.initialBufs = [];
this.initialBufsLen = 0;
this.options = options || {};
this.iconv = codec.iconv;
}
Utf16Decoder.prototype.write = function(buf) {
if (!this.decoder) {
// Codec is not chosen yet. Accumulate initial bytes.
this.initialBufs.push(buf);
this.initialBufsLen += buf.length;
if (this.initialBufsLen < 16) // We need more bytes to use space heuristic (see below)
return '';
// We have enough bytes -> detect endianness.
var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding);
this.decoder = this.iconv.getDecoder(encoding, this.options);
var resStr = '';
for (var i = 0; i < this.initialBufs.length; i++)
resStr += this.decoder.write(this.initialBufs[i]);
this.initialBufs.length = this.initialBufsLen = 0;
return resStr;
}
return this.decoder.write(buf);
}
Utf16Decoder.prototype.end = function() {
if (!this.decoder) {
var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding);
this.decoder = this.iconv.getDecoder(encoding, this.options);
var resStr = '';
for (var i = 0; i < this.initialBufs.length; i++)
resStr += this.decoder.write(this.initialBufs[i]);
var trail = this.decoder.end();
if (trail)
resStr += trail;
this.initialBufs.length = this.initialBufsLen = 0;
return resStr;
}
return this.decoder.end();
}
function detectEncoding(bufs, defaultEncoding) {
var b = [];
var charsProcessed = 0;
var asciiCharsLE = 0, asciiCharsBE = 0; // Number of ASCII chars when decoded as LE or BE.
outer_loop:
for (var i = 0; i < bufs.length; i++) {
var buf = bufs[i];
for (var j = 0; j < buf.length; j++) {
b.push(buf[j]);
if (b.length === 2) {
if (charsProcessed === 0) {
// Check BOM first.
if (b[0] === 0xFF && b[1] === 0xFE) return 'utf-16le';
if (b[0] === 0xFE && b[1] === 0xFF) return 'utf-16be';
}
if (b[0] === 0 && b[1] !== 0) asciiCharsBE++;
if (b[0] !== 0 && b[1] === 0) asciiCharsLE++;
b.length = 0;
charsProcessed++;
if (charsProcessed >= 100) {
break outer_loop;
}
}
}
}
// Make decisions.
// Most of the time, the content has ASCII chars (U+00**), but the opposite (U+**00) is uncommon.
// So, we count ASCII as if it was LE or BE, and decide from that.
if (asciiCharsBE > asciiCharsLE) return 'utf-16be';
if (asciiCharsBE < asciiCharsLE) return 'utf-16le';
// Couldn't decide (likely all zeros or not enough data).
return defaultEncoding || 'utf-16le';
}
/***/ }),
/***/ 57437:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
var Buffer = __webpack_require__(2399).Buffer;
// == UTF32-LE/BE codec. ==========================================================
exports._utf32 = Utf32Codec;
function Utf32Codec(codecOptions, iconv) {
this.iconv = iconv;
this.bomAware = true;
this.isLE = codecOptions.isLE;
}
exports.utf32le = { type: '_utf32', isLE: true };
exports.utf32be = { type: '_utf32', isLE: false };
// Aliases
exports.ucs4le = 'utf32le';
exports.ucs4be = 'utf32be';
Utf32Codec.prototype.encoder = Utf32Encoder;
Utf32Codec.prototype.decoder = Utf32Decoder;
// -- Encoding
function Utf32Encoder(options, codec) {
this.isLE = codec.isLE;
this.highSurrogate = 0;
}
Utf32Encoder.prototype.write = function(str) {
var src = Buffer.from(str, 'ucs2');
var dst = Buffer.alloc(src.length * 2);
var write32 = this.isLE ? dst.writeUInt32LE : dst.writeUInt32BE;
var offset = 0;
for (var i = 0; i < src.length; i += 2) {
var code = src.readUInt16LE(i);
var isHighSurrogate = (0xD800 <= code && code < 0xDC00);
var isLowSurrogate = (0xDC00 <= code && code < 0xE000);
if (this.highSurrogate) {
if (isHighSurrogate || !isLowSurrogate) {
// There shouldn't be two high surrogates in a row, nor a high surrogate which isn't followed by a low
// surrogate. If this happens, keep the pending high surrogate as a stand-alone semi-invalid character
// (technically wrong, but expected by some applications, like Windows file names).
write32.call(dst, this.highSurrogate, offset);
offset += 4;
}
else {
// Create 32-bit value from high and low surrogates;
var codepoint = (((this.highSurrogate - 0xD800) << 10) | (code - 0xDC00)) + 0x10000;
write32.call(dst, codepoint, offset);
offset += 4;
this.highSurrogate = 0;
continue;
}
}
if (isHighSurrogate)
this.highSurrogate = code;
else {
// Even if the current character is a low surrogate, with no previous high surrogate, we'll
// encode it as a semi-invalid stand-alone character for the same reasons expressed above for
// unpaired high surrogates.
write32.call(dst, code, offset);
offset += 4;
this.highSurrogate = 0;
}
}
if (offset < dst.length)
dst = dst.slice(0, offset);
return dst;
};
Utf32Encoder.prototype.end = function() {
// Treat any leftover high surrogate as a semi-valid independent character.
if (!this.highSurrogate)
return;
var buf = Buffer.alloc(4);
if (this.isLE)
buf.writeUInt32LE(this.highSurrogate, 0);
else
buf.writeUInt32BE(this.highSurrogate, 0);
this.highSurrogate = 0;
return buf;
};
// -- Decoding
function Utf32Decoder(options, codec) {
this.isLE = codec.isLE;
this.badChar = codec.iconv.defaultCharUnicode.charCodeAt(0);
this.overflow = [];
}
Utf32Decoder.prototype.write = function(src) {
if (src.length === 0)
return '';
var i = 0;
var codepoint = 0;
var dst = Buffer.alloc(src.length + 4);
var offset = 0;
var isLE = this.isLE;
var overflow = this.overflow;
var badChar = this.badChar;
if (overflow.length > 0) {
for (; i < src.length && overflow.length < 4; i++)
overflow.push(src[i]);
if (overflow.length === 4) {
// NOTE: codepoint is a signed int32 and can be negative.
// NOTE: We copied this block from below to help V8 optimize it (it works with array, not buffer).
if (isLE) {
codepoint = overflow[i] | (overflow[i+1] << 8) | (overflow[i+2] << 16) | (overflow[i+3] << 24);
} else {
codepoint = overflow[i+3] | (overflow[i+2] << 8) | (overflow[i+1] << 16) | (overflow[i] << 24);
}
overflow.length = 0;
offset = _writeCodepoint(dst, offset, codepoint, badChar);
}
}
// Main loop. Should be as optimized as possible.
for (; i < src.length - 3; i += 4) {
// NOTE: codepoint is a signed int32 and can be negative.
if (isLE) {
codepoint = src[i] | (src[i+1] << 8) | (src[i+2] << 16) | (src[i+3] << 24);
} else {
codepoint = src[i+3] | (src[i+2] << 8) | (src[i+1] << 16) | (src[i] << 24);
}
offset = _writeCodepoint(dst, offset, codepoint, badChar);
}
// Keep overflowing bytes.
for (; i < src.length; i++) {
overflow.push(src[i]);
}
return dst.slice(0, offset).toString('ucs2');
};
function _writeCodepoint(dst, offset, codepoint, badChar) {
// NOTE: codepoint is signed int32 and can be negative. We keep it that way to help V8 with optimizations.
if (codepoint < 0 || codepoint > 0x10FFFF) {
// Not a valid Unicode codepoint
codepoint = badChar;
}
// Ephemeral Planes: Write high surrogate.
if (codepoint >= 0x10000) {
codepoint -= 0x10000;
var high = 0xD800 | (codepoint >> 10);
dst[offset++] = high & 0xff;
dst[offset++] = high >> 8;
// Low surrogate is written below.
var codepoint = 0xDC00 | (codepoint & 0x3FF);
}
// Write BMP char or low surrogate.
dst[offset++] = codepoint & 0xff;
dst[offset++] = codepoint >> 8;
return offset;
};
Utf32Decoder.prototype.end = function() {
this.overflow.length = 0;
};
// == UTF-32 Auto codec =============================================================
// Decoder chooses automatically from UTF-32LE and UTF-32BE using BOM and space-based heuristic.
// Defaults to UTF-32LE. http://en.wikipedia.org/wiki/UTF-32
// Encoder/decoder default can be changed: iconv.decode(buf, 'utf32', {defaultEncoding: 'utf-32be'});
// Encoder prepends BOM (which can be overridden with (addBOM: false}).
exports.utf32 = Utf32AutoCodec;
exports.ucs4 = 'utf32';
function Utf32AutoCodec(options, iconv) {
this.iconv = iconv;
}
Utf32AutoCodec.prototype.encoder = Utf32AutoEncoder;
Utf32AutoCodec.prototype.decoder = Utf32AutoDecoder;
// -- Encoding
function Utf32AutoEncoder(options, codec) {
options = options || {};
if (options.addBOM === undefined)
options.addBOM = true;
this.encoder = codec.iconv.getEncoder(options.defaultEncoding || 'utf-32le', options);
}
Utf32AutoEncoder.prototype.write = function(str) {
return this.encoder.write(str);
};
Utf32AutoEncoder.prototype.end = function() {
return this.encoder.end();
};
// -- Decoding
function Utf32AutoDecoder(options, codec) {
this.decoder = null;
this.initialBufs = [];
this.initialBufsLen = 0;
this.options = options || {};
this.iconv = codec.iconv;
}
Utf32AutoDecoder.prototype.write = function(buf) {
if (!this.decoder) {
// Codec is not chosen yet. Accumulate initial bytes.
this.initialBufs.push(buf);
this.initialBufsLen += buf.length;
if (this.initialBufsLen < 32) // We need more bytes to use space heuristic (see below)
return '';
// We have enough bytes -> detect endianness.
var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding);
this.decoder = this.iconv.getDecoder(encoding, this.options);
var resStr = '';
for (var i = 0; i < this.initialBufs.length; i++)
resStr += this.decoder.write(this.initialBufs[i]);
this.initialBufs.length = this.initialBufsLen = 0;
return resStr;
}
return this.decoder.write(buf);
};
Utf32AutoDecoder.prototype.end = function() {
if (!this.decoder) {
var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding);
this.decoder = this.iconv.getDecoder(encoding, this.options);
var resStr = '';
for (var i = 0; i < this.initialBufs.length; i++)
resStr += this.decoder.write(this.initialBufs[i]);
var trail = this.decoder.end();
if (trail)
resStr += trail;
this.initialBufs.length = this.initialBufsLen = 0;
return resStr;
}
return this.decoder.end();
};
function detectEncoding(bufs, defaultEncoding) {
var b = [];
var charsProcessed = 0;
var invalidLE = 0, invalidBE = 0; // Number of invalid chars when decoded as LE or BE.
var bmpCharsLE = 0, bmpCharsBE = 0; // Number of BMP chars when decoded as LE or BE.
outer_loop:
for (var i = 0; i < bufs.length; i++) {
var buf = bufs[i];
for (var j = 0; j < buf.length; j++) {
b.push(buf[j]);
if (b.length === 4) {
if (charsProcessed === 0) {
// Check BOM first.
if (b[0] === 0xFF && b[1] === 0xFE && b[2] === 0 && b[3] === 0) {
return 'utf-32le';
}
if (b[0] === 0 && b[1] === 0 && b[2] === 0xFE && b[3] === 0xFF) {
return 'utf-32be';
}
}
if (b[0] !== 0 || b[1] > 0x10) invalidBE++;
if (b[3] !== 0 || b[2] > 0x10) invalidLE++;
if (b[0] === 0 && b[1] === 0 && (b[2] !== 0 || b[3] !== 0)) bmpCharsBE++;
if ((b[0] !== 0 || b[1] !== 0) && b[2] === 0 && b[3] === 0) bmpCharsLE++;
b.length = 0;
charsProcessed++;
if (charsProcessed >= 100) {
break outer_loop;
}
}
}
}
// Make decisions.
if (bmpCharsBE - invalidBE > bmpCharsLE - invalidLE) return 'utf-32be';
if (bmpCharsBE - invalidBE < bmpCharsLE - invalidLE) return 'utf-32le';
// Couldn't decide (likely all zeros or not enough data).
return defaultEncoding || 'utf-32le';
}
/***/ }),
/***/ 62636:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
var Buffer = __webpack_require__(2399).Buffer;
// UTF-7 codec, according to https://tools.ietf.org/html/rfc2152
// See also below a UTF-7-IMAP codec, according to http://tools.ietf.org/html/rfc3501#section-5.1.3
exports.utf7 = Utf7Codec;
exports.unicode11utf7 = 'utf7'; // Alias UNICODE-1-1-UTF-7
function Utf7Codec(codecOptions, iconv) {
this.iconv = iconv;
};
Utf7Codec.prototype.encoder = Utf7Encoder;
Utf7Codec.prototype.decoder = Utf7Decoder;
Utf7Codec.prototype.bomAware = true;
// -- Encoding
var nonDirectChars = /[^A-Za-z0-9'\(\),-\.\/:\? \n\r\t]+/g;
function Utf7Encoder(options, codec) {
this.iconv = codec.iconv;
}
Utf7Encoder.prototype.write = function(str) {
// Naive implementation.
// Non-direct chars are encoded as "+-"; single "+" char is encoded as "+-".
return Buffer.from(str.replace(nonDirectChars, function(chunk) {
return "+" + (chunk === '+' ? '' :
this.iconv.encode(chunk, 'utf16-be').toString('base64').replace(/=+$/, ''))
+ "-";
}.bind(this)));
}
Utf7Encoder.prototype.end = function() {
}
// -- Decoding
function Utf7Decoder(options, codec) {
this.iconv = codec.iconv;
this.inBase64 = false;
this.base64Accum = '';
}
var base64Regex = /[A-Za-z0-9\/+]/;
var base64Chars = [];
for (var i = 0; i < 256; i++)
base64Chars[i] = base64Regex.test(String.fromCharCode(i));
var plusChar = '+'.charCodeAt(0),
minusChar = '-'.charCodeAt(0),
andChar = '&'.charCodeAt(0);
Utf7Decoder.prototype.write = function(buf) {
var res = "", lastI = 0,
inBase64 = this.inBase64,
base64Accum = this.base64Accum;
// The decoder is more involved as we must handle chunks in stream.
for (var i = 0; i < buf.length; i++) {
if (!inBase64) { // We're in direct mode.
// Write direct chars until '+'
if (buf[i] == plusChar) {
res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars.
lastI = i+1;
inBase64 = true;
}
} else { // We decode base64.
if (!base64Chars[buf[i]]) { // Base64 ended.
if (i == lastI && buf[i] == minusChar) {// "+-" -> "+"
res += "+";
} else {
var b64str = base64Accum + this.iconv.decode(buf.slice(lastI, i), "ascii");
res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be");
}
if (buf[i] != minusChar) // Minus is absorbed after base64.
i--;
lastI = i+1;
inBase64 = false;
base64Accum = '';
}
}
}
if (!inBase64) {
res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars.
} else {
var b64str = base64Accum + this.iconv.decode(buf.slice(lastI), "ascii");
var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars.
base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future.
b64str = b64str.slice(0, canBeDecoded);
res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be");
}
this.inBase64 = inBase64;
this.base64Accum = base64Accum;
return res;
}
Utf7Decoder.prototype.end = function() {
var res = "";
if (this.inBase64 && this.base64Accum.length > 0)
res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be");
this.inBase64 = false;
this.base64Accum = '';
return res;
}
// UTF-7-IMAP codec.
// RFC3501 Sec. 5.1.3 Modified UTF-7 (http://tools.ietf.org/html/rfc3501#section-5.1.3)
// Differences:
// * Base64 part is started by "&" instead of "+"
// * Direct characters are 0x20-0x7E, except "&" (0x26)
// * In Base64, "," is used instead of "/"
// * Base64 must not be used to represent direct characters.
// * No implicit shift back from Base64 (should always end with '-')
// * String must end in non-shifted position.
// * "-&" while in base64 is not allowed.
exports.utf7imap = Utf7IMAPCodec;
function Utf7IMAPCodec(codecOptions, iconv) {
this.iconv = iconv;
};
Utf7IMAPCodec.prototype.encoder = Utf7IMAPEncoder;
Utf7IMAPCodec.prototype.decoder = Utf7IMAPDecoder;
Utf7IMAPCodec.prototype.bomAware = true;
// -- Encoding
function Utf7IMAPEncoder(options, codec) {
this.iconv = codec.iconv;
this.inBase64 = false;
this.base64Accum = Buffer.alloc(6);
this.base64AccumIdx = 0;
}
Utf7IMAPEncoder.prototype.write = function(str) {
var inBase64 = this.inBase64,
base64Accum = this.base64Accum,
base64AccumIdx = this.base64AccumIdx,
buf = Buffer.alloc(str.length*5 + 10), bufIdx = 0;
for (var i = 0; i < str.length; i++) {
var uChar = str.charCodeAt(i);
if (0x20 <= uChar && uChar <= 0x7E) { // Direct character or '&'.
if (inBase64) {
if (base64AccumIdx > 0) {
bufIdx += buf.write(base64Accum.slice(0, base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx);
base64AccumIdx = 0;
}
buf[bufIdx++] = minusChar; // Write '-', then go to direct mode.
inBase64 = false;
}
if (!inBase64) {
buf[bufIdx++] = uChar; // Write direct character
if (uChar === andChar) // Ampersand -> '&-'
buf[bufIdx++] = minusChar;
}
} else { // Non-direct character
if (!inBase64) {
buf[bufIdx++] = andChar; // Write '&', then go to base64 mode.
inBase64 = true;
}
if (inBase64) {
base64Accum[base64AccumIdx++] = uChar >> 8;
base64Accum[base64AccumIdx++] = uChar & 0xFF;
if (base64AccumIdx == base64Accum.length) {
bufIdx += buf.write(base64Accum.toString('base64').replace(/\//g, ','), bufIdx);
base64AccumIdx = 0;
}
}
}
}
this.inBase64 = inBase64;
this.base64AccumIdx = base64AccumIdx;
return buf.slice(0, bufIdx);
}
Utf7IMAPEncoder.prototype.end = function() {
var buf = Buffer.alloc(10), bufIdx = 0;
if (this.inBase64) {
if (this.base64AccumIdx > 0) {
bufIdx += buf.write(this.base64Accum.slice(0, this.base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx);
this.base64AccumIdx = 0;
}
buf[bufIdx++] = minusChar; // Write '-', then go to direct mode.
this.inBase64 = false;
}
return buf.slice(0, bufIdx);
}
// -- Decoding
function Utf7IMAPDecoder(options, codec) {
this.iconv = codec.iconv;
this.inBase64 = false;
this.base64Accum = '';
}
var base64IMAPChars = base64Chars.slice();
base64IMAPChars[','.charCodeAt(0)] = true;
Utf7IMAPDecoder.prototype.write = function(buf) {
var res = "", lastI = 0,
inBase64 = this.inBase64,
base64Accum = this.base64Accum;
// The decoder is more involved as we must handle chunks in stream.
// It is forgiving, closer to standard UTF-7 (for example, '-' is optional at the end).
for (var i = 0; i < buf.length; i++) {
if (!inBase64) { // We're in direct mode.
// Write direct chars until '&'
if (buf[i] == andChar) {
res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars.
lastI = i+1;
inBase64 = true;
}
} else { // We decode base64.
if (!base64IMAPChars[buf[i]]) { // Base64 ended.
if (i == lastI && buf[i] == minusChar) { // "&-" -> "&"
res += "&";
} else {
var b64str = base64Accum + this.iconv.decode(buf.slice(lastI, i), "ascii").replace(/,/g, '/');
res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be");
}
if (buf[i] != minusChar) // Minus may be absorbed after base64.
i--;
lastI = i+1;
inBase64 = false;
base64Accum = '';
}
}
}
if (!inBase64) {
res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars.
} else {
var b64str = base64Accum + this.iconv.decode(buf.slice(lastI), "ascii").replace(/,/g, '/');
var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars.
base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future.
b64str = b64str.slice(0, canBeDecoded);
res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be");
}
this.inBase64 = inBase64;
this.base64Accum = base64Accum;
return res;
}
Utf7IMAPDecoder.prototype.end = function() {
var res = "";
if (this.inBase64 && this.base64Accum.length > 0)
res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be");
this.inBase64 = false;
this.base64Accum = '';
return res;
}
/***/ }),
/***/ 84938:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
var BOMChar = '\uFEFF';
exports.PrependBOM = PrependBOMWrapper
function PrependBOMWrapper(encoder, options) {
this.encoder = encoder;
this.addBOM = true;
}
PrependBOMWrapper.prototype.write = function(str) {
if (this.addBOM) {
str = BOMChar + str;
this.addBOM = false;
}
return this.encoder.write(str);
}
PrependBOMWrapper.prototype.end = function() {
return this.encoder.end();
}
//------------------------------------------------------------------------------
exports.StripBOM = StripBOMWrapper;
function StripBOMWrapper(decoder, options) {
this.decoder = decoder;
this.pass = false;
this.options = options || {};
}
StripBOMWrapper.prototype.write = function(buf) {
var res = this.decoder.write(buf);
if (this.pass || !res)
return res;
if (res[0] === BOMChar) {
res = res.slice(1);
if (typeof this.options.stripBOM === 'function')
this.options.stripBOM();
}
this.pass = true;
return res;
}
StripBOMWrapper.prototype.end = function() {
return this.decoder.end();
}
/***/ }),
/***/ 62649:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
var Buffer = __webpack_require__(2399).Buffer;
var bomHandling = __webpack_require__(84938),
iconv = module.exports;
// All codecs and aliases are kept here, keyed by encoding name/alias.
// They are lazy loaded in `iconv.getCodec` from `encodings/index.js`.
iconv.encodings = null;
// Characters emitted in case of error.
iconv.defaultCharUnicode = '�';
iconv.defaultCharSingleByte = '?';
// Public API.
iconv.encode = function encode(str, encoding, options) {
str = "" + (str || ""); // Ensure string.
var encoder = iconv.getEncoder(encoding, options);
var res = encoder.write(str);
var trail = encoder.end();
return (trail && trail.length > 0) ? Buffer.concat([res, trail]) : res;
}
iconv.decode = function decode(buf, encoding, options) {
if (typeof buf === 'string') {
if (!iconv.skipDecodeWarning) {
console.error('Iconv-lite warning: decode()-ing strings is deprecated. Refer to https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding');
iconv.skipDecodeWarning = true;
}
buf = Buffer.from("" + (buf || ""), "binary"); // Ensure buffer.
}
var decoder = iconv.getDecoder(encoding, options);
var res = decoder.write(buf);
var trail = decoder.end();
return trail ? (res + trail) : res;
}
iconv.encodingExists = function encodingExists(enc) {
try {
iconv.getCodec(enc);
return true;
} catch (e) {
return false;
}
}
// Legacy aliases to convert functions
iconv.toEncoding = iconv.encode;
iconv.fromEncoding = iconv.decode;
// Search for a codec in iconv.encodings. Cache codec data in iconv._codecDataCache.
iconv._codecDataCache = {};
iconv.getCodec = function getCodec(encoding) {
if (!iconv.encodings)
iconv.encodings = __webpack_require__(54078); // Lazy load all encoding definitions.
// Canonicalize encoding name: strip all non-alphanumeric chars and appended year.
var enc = iconv._canonicalizeEncoding(encoding);
// Traverse iconv.encodings to find actual codec.
var codecOptions = {};
while (true) {
var codec = iconv._codecDataCache[enc];
if (codec)
return codec;
var codecDef = iconv.encodings[enc];
switch (typeof codecDef) {
case "string": // Direct alias to other encoding.
enc = codecDef;
break;
case "object": // Alias with options. Can be layered.
for (var key in codecDef)
codecOptions[key] = codecDef[key];
if (!codecOptions.encodingName)
codecOptions.encodingName = enc;
enc = codecDef.type;
break;
case "function": // Codec itself.
if (!codecOptions.encodingName)
codecOptions.encodingName = enc;
// The codec function must load all tables and return object with .encoder and .decoder methods.
// It'll be called only once (for each different options object).
codec = new codecDef(codecOptions, iconv);
iconv._codecDataCache[codecOptions.encodingName] = codec; // Save it to be reused later.
return codec;
default:
throw new Error("Encoding not recognized: '" + encoding + "' (searched as: '"+enc+"')");
}
}
}
iconv._canonicalizeEncoding = function(encoding) {
// Canonicalize encoding name: strip all non-alphanumeric chars and appended year.
return (''+encoding).toLowerCase().replace(/:\d{4}$|[^0-9a-z]/g, "");
}
iconv.getEncoder = function getEncoder(encoding, options) {
var codec = iconv.getCodec(encoding),
encoder = new codec.encoder(options, codec);
if (codec.bomAware && options && options.addBOM)
encoder = new bomHandling.PrependBOM(encoder, options);
return encoder;
}
iconv.getDecoder = function getDecoder(encoding, options) {
var codec = iconv.getCodec(encoding),
decoder = new codec.decoder(options, codec);
if (codec.bomAware && !(options && options.stripBOM === false))
decoder = new bomHandling.StripBOM(decoder, options);
return decoder;
}
// Streaming API
// NOTE: Streaming API naturally depends on 'stream' module from Node.js. Unfortunately in browser environments this module can add
// up to 100Kb to the output bundle. To avoid unnecessary code bloat, we don't enable Streaming API in browser by default.
// If you would like to enable it explicitly, please add the following code to your app:
// > iconv.enableStreamingAPI(require('stream'));
iconv.enableStreamingAPI = function enableStreamingAPI(stream_module) {
if (iconv.supportsStreams)
return;
// Dependency-inject stream module to create IconvLite stream classes.
var streams = __webpack_require__(34638)(stream_module);
// Not public API yet, but expose the stream classes.
iconv.IconvLiteEncoderStream = streams.IconvLiteEncoderStream;
iconv.IconvLiteDecoderStream = streams.IconvLiteDecoderStream;
// Streaming API.
iconv.encodeStream = function encodeStream(encoding, options) {
return new iconv.IconvLiteEncoderStream(iconv.getEncoder(encoding, options), options);
}
iconv.decodeStream = function decodeStream(encoding, options) {
return new iconv.IconvLiteDecoderStream(iconv.getDecoder(encoding, options), options);
}
iconv.supportsStreams = true;
}
// Enable Streaming API automatically if 'stream' module is available and non-empty (the majority of environments).
var stream_module;
try {
stream_module = __webpack_require__(92413);
} catch (e) {}
if (stream_module && stream_module.Transform) {
iconv.enableStreamingAPI(stream_module);
} else {
// In rare cases where 'stream' module is not available by default, throw a helpful exception.
iconv.encodeStream = iconv.decodeStream = function() {
throw new Error("iconv-lite Streaming API is not enabled. Use iconv.enableStreamingAPI(require('stream')); to enable it.");
};
}
if (false) {}
/***/ }),
/***/ 34638:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
var Buffer = __webpack_require__(2399).Buffer;
// NOTE: Due to 'stream' module being pretty large (~100Kb, significant in browser environments),
// we opt to dependency-inject it instead of creating a hard dependency.
module.exports = function(stream_module) {
var Transform = stream_module.Transform;
// == Encoder stream =======================================================
function IconvLiteEncoderStream(conv, options) {
this.conv = conv;
options = options || {};
options.decodeStrings = false; // We accept only strings, so we don't need to decode them.
Transform.call(this, options);
}
IconvLiteEncoderStream.prototype = Object.create(Transform.prototype, {
constructor: { value: IconvLiteEncoderStream }
});
IconvLiteEncoderStream.prototype._transform = function(chunk, encoding, done) {
if (typeof chunk != 'string')
return done(new Error("Iconv encoding stream needs strings as its input."));
try {
var res = this.conv.write(chunk);
if (res && res.length) this.push(res);
done();
}
catch (e) {
done(e);
}
}
IconvLiteEncoderStream.prototype._flush = function(done) {
try {
var res = this.conv.end();
if (res && res.length) this.push(res);
done();
}
catch (e) {
done(e);
}
}
IconvLiteEncoderStream.prototype.collect = function(cb) {
var chunks = [];
this.on('error', cb);
this.on('data', function(chunk) { chunks.push(chunk); });
this.on('end', function() {
cb(null, Buffer.concat(chunks));
});
return this;
}
// == Decoder stream =======================================================
function IconvLiteDecoderStream(conv, options) {
this.conv = conv;
options = options || {};
options.encoding = this.encoding = 'utf8'; // We output strings.
Transform.call(this, options);
}
IconvLiteDecoderStream.prototype = Object.create(Transform.prototype, {
constructor: { value: IconvLiteDecoderStream }
});
IconvLiteDecoderStream.prototype._transform = function(chunk, encoding, done) {
if (!Buffer.isBuffer(chunk) && !(chunk instanceof Uint8Array))
return done(new Error("Iconv decoding stream needs buffers as its input."));
try {
var res = this.conv.write(chunk);
if (res && res.length) this.push(res, this.encoding);
done();
}
catch (e) {
done(e);
}
}
IconvLiteDecoderStream.prototype._flush = function(done) {
try {
var res = this.conv.end();
if (res && res.length) this.push(res, this.encoding);
done();
}
catch (e) {
done(e);
}
}
IconvLiteDecoderStream.prototype.collect = function(cb) {
var res = '';
this.on('error', cb);
this.on('data', function(chunk) { res += chunk; });
this.on('end', function() {
cb(null, res);
});
return this;
}
return {
IconvLiteEncoderStream: IconvLiteEncoderStream,
IconvLiteDecoderStream: IconvLiteDecoderStream,
};
};
/***/ }),
/***/ 21893:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var createHash = __webpack_require__(76417).createHash;
function get_header(header, credentials, opts) {
var type = header.split(' ')[0],
user = credentials[0],
pass = credentials[1];
if (type == 'Digest') {
return digest.generate(header, user, pass, opts.method, opts.path);
} else if (type == 'Basic') {
return basic(user, pass);
}
}
////////////////////
// basic
function md5(string) {
return createHash('md5').update(string).digest('hex');
}
function basic(user, pass) {
var str = typeof pass == 'undefined' ? user : [user, pass].join(':');
return 'Basic ' + Buffer.from(str).toString('base64');
}
////////////////////
// digest
// logic inspired from https://github.com/simme/node-http-digest-client
var digest = {};
digest.parse_header = function(header) {
var challenge = {},
matches = header.match(/([a-z0-9_-]+)="?([a-z0-9_=\/\.@\s-\+)()]+)"?/gi);
for (var i = 0, l = matches.length; i < l; i++) {
var parts = matches[i].split('='),
key = parts.shift(),
val = parts.join('=').replace(/^"/, '').replace(/"$/, '');
challenge[key] = val;
}
return challenge;
}
digest.update_nc = function(nc) {
var max = 99999999;
nc++;
if (nc > max)
nc = 1;
var padding = new Array(8).join('0') + '';
nc = nc + '';
return padding.substr(0, 8 - nc.length) + nc;
}
digest.generate = function(header, user, pass, method, path) {
var nc = 1,
cnonce = null,
challenge = digest.parse_header(header);
var ha1 = md5(user + ':' + challenge.realm + ':' + pass),
ha2 = md5(method.toUpperCase() + ':' + path),
resp = [ha1, challenge.nonce];
if (typeof challenge.qop === 'string') {
cnonce = md5(Math.random().toString(36)).substr(0, 8);
nc = digest.update_nc(nc);
resp = resp.concat(nc, cnonce);
resp = resp.concat(challenge.qop, ha2);
} else {
resp = resp.concat(ha2);
}
var params = {
uri : path,
realm : challenge.realm,
nonce : challenge.nonce,
username : user,
response : md5(resp.join(':'))
}
if (challenge.qop) {
params.qop = challenge.qop;
}
if (challenge.opaque) {
params.opaque = challenge.opaque;
}
if (cnonce) {
params.nc = nc;
params.cnonce = cnonce;
}
header = []
for (var k in params)
header.push(k + '="' + params[k] + '"')
return 'Digest ' + header.join(', ');
}
module.exports = {
header : get_header,
basic : basic,
digest : digest.generate
}
/***/ }),
/***/ 27208:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
// Simple cookie handling implementation based on the standard RFC 6265.
//
// This module just has two functionalities:
// - Parse a set-cookie-header as a key value object
// - Write a cookie-string from a key value object
//
// All cookie attributes are ignored.
var unescape = __webpack_require__(71191).unescape;
var COOKIE_PAIR = /^([^=\s]+)\s*=\s*("?)\s*(.*)\s*\2\s*$/;
var EXCLUDED_CHARS = /[\x00-\x1F\x7F\x3B\x3B\s\"\,\\"%]/g;
var TRAILING_SEMICOLON = /\x3B+$/;
var SEP_SEMICOLON = /\s*\x3B\s*/;
// i know these should be 'const', but I'd like to keep
// supporting earlier node.js versions as long as I can. :)
var KEY_INDEX = 1; // index of key from COOKIE_PAIR match
var VALUE_INDEX = 3; // index of value from COOKIE_PAIR match
// Returns a copy str trimmed and without trainling semicolon.
function cleanCookieString(str) {
return str.trim().replace(/\x3B+$/, '');
}
function getFirstPair(str) {
var index = str.indexOf('\x3B');
return index === -1 ? str : str.substr(0, index);
}
// Returns a encoded copy of str based on RFC6265 S4.1.1.
function encodeCookieComponent(str) {
return str.toString().replace(EXCLUDED_CHARS, encodeURIComponent);
}
// Parses a set-cookie-string based on the standard defined in RFC6265 S4.1.1.
function parseSetCookieString(str) {
str = cleanCookieString(str);
str = getFirstPair(str);
var res = COOKIE_PAIR.exec(str);
if (!res || !res[VALUE_INDEX]) return null;
return {
name : unescape(res[KEY_INDEX]),
value : unescape(res[VALUE_INDEX])
};
}
// Parses a set-cookie-header and returns a key/value object.
// Each key represents the name of a cookie.
function parseSetCookieHeader(header) {
if (!header) return {};
header = Array.isArray(header) ? header : [header];
return header.reduce(function(res, str) {
var cookie = parseSetCookieString(str);
if (cookie) res[cookie.name] = cookie.value;
return res;
}, {});
}
// Writes a set-cookie-string based on the standard definded in RFC6265 S4.1.1.
function writeCookieString(obj) {
return Object.keys(obj).reduce(function(str, name) {
var encodedName = encodeCookieComponent(name);
var encodedValue = encodeCookieComponent(obj[name]);
str += (str ? '; ' : '') + encodedName + '=' + encodedValue;
return str;
}, '');
}
// returns a key/val object from an array of cookie strings
exports.read = parseSetCookieHeader;
// writes a cookie string header
exports.write = writeCookieString;
/***/ }),
/***/ 30365:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var iconv,
inherits = __webpack_require__(31669).inherits,
stream = __webpack_require__(92413);
var regex = /(?:charset|encoding)\s*=\s*['"]? *([\w\-]+)/i;
inherits(StreamDecoder, stream.Transform);
function StreamDecoder(charset) {
if (!(this instanceof StreamDecoder))
return new StreamDecoder(charset);
stream.Transform.call(this, charset);
this.charset = charset;
this.parsed_chunk = false;
}
StreamDecoder.prototype._transform = function(chunk, encoding, done) {
// try to get charset from chunk, but just once
if (!this.parsed_chunk && (this.charset == 'utf-8' || this.charset == 'utf8')) {
this.parsed_chunk = true;
var matches = regex.exec(chunk.toString());
if (matches) {
var found = matches[1].toLowerCase().replace('utf8', 'utf-8'); // canonicalize;
// set charset, but only if iconv can handle it
if (iconv.encodingExists(found)) this.charset = found;
}
}
if (this.charset == 'utf-8') { // no need to decode, just pass through
this.push(chunk);
return done();
}
// initialize stream decoder if not present
var self = this;
if (!this.decoder) {
this.decoder = iconv.decodeStream(this.charset);
this.decoder.on('data', function(decoded_chunk) {
self.push(decoded_chunk);
});
};
this.decoder.write(chunk);
done();
}
module.exports = function(charset) {
try {
if (!iconv) iconv = __webpack_require__(62649);
} catch(e) {
/* iconv not found */
}
if (iconv)
return new StreamDecoder(charset);
else
return new stream.PassThrough;
}
/***/ }),
/***/ 13396:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
var readFile = __webpack_require__(35747).readFile,
basename = __webpack_require__(85622).basename;
exports.build = function(data, boundary, callback) {
if (typeof data != 'object' || typeof data.pipe == 'function')
return callback(new Error('Multipart builder expects data as key/val object.'));
var body = '',
object = flatten(data),
count = Object.keys(object).length;
if (count === 0)
return callback(new Error('Empty multipart body. Invalid data.'))
function done(err, section) {
if (err) return callback(err);
if (section) body += section;
--count || callback(null, body + '--' + boundary + '--');
};
for (var key in object) {
var value = object[key];
if (value === null || typeof value == 'undefined') {
done();
} else if (Buffer.isBuffer(value)) {
var part = { buffer: value, content_type: 'application/octet-stream' };
generate_part(key, part, boundary, done);
} else {
var part = (value.buffer || value.file || value.content_type) ? value : { value: value };
generate_part(key, part, boundary, done);
}
}
}
function generate_part(name, part, boundary, callback) {
var return_part = '--' + boundary + '\r\n';
return_part += 'Content-Disposition: form-data; name="' + name + '"';
function append(data, filename) {
if (data) {
var binary = part.content_type.indexOf('text') == -1;
return_part += '; filename="' + encodeURIComponent(filename) + '"\r\n';
if (binary) return_part += 'Content-Transfer-Encoding: binary\r\n';
return_part += 'Content-Type: ' + part.content_type + '\r\n\r\n';
return_part += binary ? data.toString('binary') : data.toString('utf8');
}
callback(null, return_part + '\r\n');
};
if ((part.file || part.buffer) && part.content_type) {
var filename = part.filename ? part.filename : part.file ? basename(part.file) : name;
if (part.buffer) return append(part.buffer, filename);
readFile(part.file, function(err, data) {
if (err) return callback(err);
append(data, filename);
});
} else {
if (typeof part.value == 'object')
return callback(new Error('Object received for ' + name + ', expected string.'))
if (part.content_type) {
return_part += '\r\n';
return_part += 'Content-Type: ' + part.content_type;
}
return_part += '\r\n\r\n';
return_part += Buffer.from(String(part.value), 'utf8').toString('binary');
append();
}
}
// flattens nested objects for multipart body
function flatten(object, into, prefix) {
into = into || {};
for(var key in object) {
var prefix_key = prefix ? prefix + '[' + key + ']' : key;
var prop = object[key];
if (prop && typeof prop === 'object' && !(prop.buffer || prop.file || prop.content_type))
flatten(prop, into, prefix_key)
else
into[prefix_key] = prop;
}
return into;
}
/***/ }),
/***/ 57441:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
//////////////////////////////////////////
// Needle -- HTTP Client for Node.js
// Written by Tomás Pollak
// (c) 2012-2020 - Fork Ltd.
// MIT Licensed
//////////////////////////////////////////
var fs = __webpack_require__(35747),
http = __webpack_require__(98605),
https = __webpack_require__(57211),
url = __webpack_require__(78835),
stream = __webpack_require__(92413),
debug = __webpack_require__(4993)('needle'),
stringify = __webpack_require__(10558)/* .build */ .J,
multipart = __webpack_require__(13396),
auth = __webpack_require__(21893),
cookies = __webpack_require__(27208),
parsers = __webpack_require__(55256),
decoder = __webpack_require__(30365);
//////////////////////////////////////////
// variabilia
var version = __webpack_require__(71551)/* .version */ .i8;
var user_agent = 'Needle/' + version;
user_agent += ' (Node.js ' + process.version + '; ' + process.platform + ' ' + process.arch + ')';
var tls_options = 'pfx key passphrase cert ca ciphers rejectUnauthorized secureProtocol checkServerIdentity family';
// older versions of node (< 0.11.4) prevent the runtime from exiting
// because of connections in keep-alive state. so if this is the case
// we'll default new requests to set a Connection: close header.
var close_by_default = !http.Agent || http.Agent.defaultMaxSockets != Infinity;
// see if we have Object.assign. otherwise fall back to util._extend
var extend = Object.assign ? Object.assign : __webpack_require__(31669)._extend;
// these are the status codes that Needle interprets as redirects.
var redirect_codes = [301, 302, 303, 307, 308];
//////////////////////////////////////////
// decompressors for gzip/deflate/br bodies
function bind_opts(fn, options) {
return fn.bind(null, options);
}
var decompressors = {};
try {
var zlib = __webpack_require__(78761);
// Enable Z_SYNC_FLUSH to avoid Z_BUF_ERROR errors (Node PR #2595)
var zlib_options = {
flush: zlib.Z_SYNC_FLUSH,
finishFlush: zlib.Z_SYNC_FLUSH
};
var br_options = {
flush: zlib.BROTLI_OPERATION_FLUSH,
finishFlush: zlib.BROTLI_OPERATION_FLUSH
};
decompressors['x-deflate'] = bind_opts(zlib.Inflate, zlib_options);
decompressors['deflate'] = bind_opts(zlib.Inflate, zlib_options);
decompressors['x-gzip'] = bind_opts(zlib.Gunzip, zlib_options);
decompressors['gzip'] = bind_opts(zlib.Gunzip, zlib_options);
if (typeof zlib.BrotliDecompress === 'function') {
decompressors['br'] = bind_opts(zlib.BrotliDecompress, br_options);
}
} catch(e) { /* zlib not available */ }
//////////////////////////////////////////
// options and aliases
var defaults = {
// data
boundary : '--------------------NODENEEDLEHTTPCLIENT',
encoding : 'utf8',
parse_response : 'all', // same as true. valid options: 'json', 'xml' or false/null
proxy : null,
// agent & headers
agent : null,
headers : {},
accept : '*/*',
user_agent : user_agent,
// numbers
open_timeout : 10000,
response_timeout : 0,
read_timeout : 0,
follow_max : 0,
stream_length : -1,
// booleans
compressed : false,
decode_response : true,
parse_cookies : true,
follow_set_cookies : false,
follow_set_referer : false,
follow_keep_method : false,
follow_if_same_host : false,
follow_if_same_protocol : false,
follow_if_same_location : false
}
var aliased = {
options: {
decode : 'decode_response',
parse : 'parse_response',
timeout : 'open_timeout',
follow : 'follow_max'
},
inverted: {}
}
// only once, invert aliased keys so we can get passed options.
Object.keys(aliased.options).map(function(k) {
var value = aliased.options[k];
aliased.inverted[value] = k;
});
//////////////////////////////////////////
// helpers
function get_env_var(keys, try_lower) {
var val, i = -1, env = process.env;
while (!val && i < keys.length-1) {
val = env[keys[++i]];
if (!val && try_lower) {
val = env[keys[i].toLowerCase()];
}
}
return val;
}
function keys_by_type(type) {
return Object.keys(defaults).map(function(el) {
if (defaults[el] !== null && defaults[el].constructor == type)
return el;
}).filter(function(el) { return el })
}
function parse_content_type(header) {
if (!header || header === '') return {};
var found, charset = 'utf8', arr = header.split(';');
if (arr.length > 1 && (found = arr[1].match(/charset=(.+)/)))
charset = found[1];
return { type: arr[0], charset: charset };
}
function is_stream(obj) {
return typeof obj.pipe === 'function';
}
function get_stream_length(stream, given_length, cb) {
if (given_length > 0)
return cb(given_length);
if (stream.end !== void 0 && stream.end !== Infinity && stream.start !== void 0)
return cb((stream.end + 1) - (stream.start || 0));
fs.stat(stream.path, function(err, stat) {
cb(stat ? stat.size - (stream.start || 0) : null);
});
}
function resolve_url(href, base) {
if (url.URL)
return new url.URL(href, base);
// older Node version (< v6.13)
return base ? url.resolve(base, href) : href;
}
function host_and_ports_match(url1, url2) {
if (url1.indexOf('http') < 0) url1 = 'http://' + url1;
if (url2.indexOf('http') < 0) url2 = 'http://' + url2;
var a = url.parse(url1), b = url.parse(url2);
return a.host == b.host
&& String(a.port || (a.protocol == 'https:' ? 443 : 80))
== String(b.port || (b.protocol == 'https:' ? 443 : 80));
}
// returns false if a no_proxy host matches given url
function should_proxy_to(url) {
var no_proxy = get_env_var(['NO_PROXY'], true);
if (!no_proxy) return true;
var host, hosts = no_proxy.split(',');
for (var i in hosts) {
host = hosts[i];
if (host_and_ports_match(host, url)) {
return false;
}
}
return true;
}
function pump_streams(streams, cb) {
if (stream.pipeline)
return stream.pipeline.apply(null, streams.concat(cb));
var tmp = streams.shift();
while (streams.length) {
tmp = tmp.pipe(streams.shift());
tmp.once('error', function(e) {
cb && cb(e);
cb = null;
})
}
}
//////////////////////////////////////////
// the main act
function Needle(method, uri, data, options, callback) {
// if (!(this instanceof Needle)) {
// return new Needle(method, uri, data, options, callback);
// }
if (typeof uri !== 'string')
throw new TypeError('URL must be a string, not ' + uri);
this.method = method.toLowerCase();
this.uri = uri;
this.data = data;
if (typeof options == 'function') {
this.callback = options;
this.options = {};
} else {
this.callback = callback;
this.options = options;
}
}
Needle.prototype.setup = function(uri, options) {
function get_option(key, fallback) {
// if original is in options, return that value
if (typeof options[key] != 'undefined') return options[key];
// otherwise, return value from alias or fallback/undefined
return typeof options[aliased.inverted[key]] != 'undefined'
? options[aliased.inverted[key]] : fallback;
}
function check_value(expected, key) {
var value = get_option(key),
type = typeof value;
if (type != 'undefined' && type != expected)
throw new TypeError(type + ' received for ' + key + ', but expected a ' + expected);
return (type == expected) ? value : defaults[key];
}
//////////////////////////////////////////////////
// the basics
var config = {
http_opts : {
agent: get_option('agent', defaults.agent),
localAddress: get_option('localAddress', undefined),
lookup: get_option('lookup', undefined)
}, // passed later to http.request() directly
headers : {},
output : options.output,
proxy : get_option('proxy', defaults.proxy),
parser : get_option('parse_response', defaults.parse_response),
encoding : options.encoding || (options.multipart ? 'binary' : defaults.encoding)
}
keys_by_type(Boolean).forEach(function(key) {
config[key] = check_value('boolean', key);
})
keys_by_type(Number).forEach(function(key) {
config[key] = check_value('number', key);
})
// populate http_opts with given TLS options
tls_options.split(' ').forEach(function(key) {
if (typeof options[key] != 'undefined') {
if (config.http_opts.agent) { // pass option to existing agent
config.http_opts.agent.options[key] = options[key];
} else {
config.http_opts[key] = options[key];
}
}
});
//////////////////////////////////////////////////
// headers, cookies
for (var key in defaults.headers)
config.headers[key] = defaults.headers[key];
config.headers['accept'] = options.accept || defaults.accept;
config.headers['user-agent'] = options.user_agent || defaults.user_agent;
if (options.content_type)
config.headers['content-type'] = options.content_type;
// set connection header if opts.connection was passed, or if node < 0.11.4 (close)
if (options.connection || close_by_default)
config.headers['connection'] = options.connection || 'close';
if ((options.compressed || defaults.compressed) && typeof zlib != 'undefined')
config.headers['accept-encoding'] = decompressors['br'] ? 'gzip, deflate, br' : 'gzip, deflate';
if (options.cookies)
config.headers['cookie'] = cookies.write(options.cookies);
//////////////////////////////////////////////////
// basic/digest auth
if (uri.match(/[^\/]@/)) { // url contains user:pass@host, so parse it.
var parts = (url.parse(uri).auth || '').split(':');
options.username = parts[0];
options.password = parts[1];
}
if (options.username) {
if (options.auth && (options.auth == 'auto' || options.auth == 'digest')) {
config.credentials = [options.username, options.password];
} else {
config.headers['authorization'] = auth.basic(options.username, options.password);
}
}
var env_proxy = get_env_var(['HTTP_PROXY', 'HTTPS_PROXY'], true);
if (!config.proxy && env_proxy) config.proxy = env_proxy;
// if proxy is present, set auth header from either url or proxy_user option.
if (config.proxy) {
if (should_proxy_to(uri)) {
if (config.proxy.indexOf('http') === -1)
config.proxy = 'http://' + config.proxy;
if (config.proxy.indexOf('@') !== -1) {
var proxy = (url.parse(config.proxy).auth || '').split(':');
options.proxy_user = proxy[0];
options.proxy_pass = proxy[1];
}
if (options.proxy_user)
config.headers['proxy-authorization'] = auth.basic(options.proxy_user, options.proxy_pass);
} else {
delete config.proxy;
}
}
// now that all our headers are set, overwrite them if instructed.
for (var h in options.headers)
config.headers[h.toLowerCase()] = options.headers[h];
config.uri_modifier = get_option('uri_modifier', null);
return config;
}
Needle.prototype.start = function() {
var out = new stream.PassThrough({ objectMode: false }),
uri = this.uri,
data = this.data,
method = this.method,
callback = (typeof this.options == 'function') ? this.options : this.callback,
options = this.options || {};
// if no 'http' is found on URL, prepend it.
if (uri.indexOf('http') === -1)
uri = uri.replace(/^(\/\/)?/, 'http://');
var self = this, body, waiting = false, config = this.setup(uri, options);
// unless options.json was set to false, assume boss also wants JSON if content-type matches.
var json = options.json || (options.json !== false && config.headers['content-type'] == 'application/json');
if (data) {
if (options.multipart) { // boss says we do multipart. so we do it.
var boundary = options.boundary || defaults.boundary;
waiting = true;
multipart.build(data, boundary, function(err, parts) {
if (err) throw(err);
config.headers['content-type'] = 'multipart/form-data; boundary=' + boundary;
next(parts);
});
} else if (is_stream(data)) {
if (method == 'get')
throw new Error('Refusing to pipe() a stream via GET. Did you mean .post?');
if (config.stream_length > 0 || (config.stream_length === 0 && data.path)) {
// ok, let's get the stream's length and set it as the content-length header.
// this prevents some servers from cutting us off before all the data is sent.
waiting = true;
get_stream_length(data, config.stream_length, function(length) {
data.length = length;
next(data);
})
} else {
// if the boss doesn't want us to get the stream's length, or if it doesn't
// have a file descriptor for that purpose, then just head on.
body = data;
}
} else if (Buffer.isBuffer(data)) {
body = data; // use the raw buffer as request body.
} else if (method == 'get' && !json) {
// append the data to the URI as a querystring.
uri = uri.replace(/\?.*|$/, '?' + stringify(data));
} else { // string or object data, no multipart.
// if string, leave it as it is, otherwise, stringify.
body = (typeof(data) === 'string') ? data
: json ? JSON.stringify(data) : stringify(data);
// ensure we have a buffer so bytecount is correct.
body = Buffer.from(body, config.encoding);
}
}
function next(body) {
if (body) {
if (body.length) config.headers['content-length'] = body.length;
// if no content-type was passed, determine if json or not.
if (!config.headers['content-type']) {
config.headers['content-type'] = json
? 'application/json; charset=utf-8'
: 'application/x-www-form-urlencoded'; // no charset says W3 spec.
}
}
// unless a specific accept header was set, assume json: true wants JSON back.
if (options.json && (!options.accept && !(options.headers || {}).accept))
config.headers['accept'] = 'application/json';
self.send_request(1, method, uri, config, body, out, callback);
}
if (!waiting) next(body);
return out;
}
Needle.prototype.get_request_opts = function(method, uri, config) {
var opts = config.http_opts,
proxy = config.proxy,
remote = proxy ? url.parse(proxy) : url.parse(uri);
opts.protocol = remote.protocol;
opts.host = remote.hostname;
opts.port = remote.port || (remote.protocol == 'https:' ? 443 : 80);
opts.path = proxy ? uri : remote.pathname + (remote.search || '');
opts.method = method;
opts.headers = config.headers;
if (!opts.headers['host']) {
// if using proxy, make sure the host header shows the final destination
var target = proxy ? url.parse(uri) : remote;
opts.headers['host'] = target.hostname;
// and if a non standard port was passed, append it to the port header
if (target.port && [80, 443].indexOf(target.port) === -1) {
opts.headers['host'] += ':' + target.port;
}
}
return opts;
}
Needle.prototype.should_follow = function(location, config, original) {
if (!location) return false;
// returns true if location contains matching property (host or protocol)
function matches(property) {
var property = original[property];
return location.indexOf(property) !== -1;
}
// first, check whether the requested location is actually different from the original
if (!config.follow_if_same_location && location === original)
return false;
if (config.follow_if_same_host && !matches('host'))
return false; // host does not match, so not following
if (config.follow_if_same_protocol && !matches('protocol'))
return false; // procotol does not match, so not following
return true;
}
Needle.prototype.send_request = function(count, method, uri, config, post_data, out, callback) {
if (typeof config.uri_modifier === 'function') {
var modified_uri = config.uri_modifier(uri);
debug('Modifying request URI', uri + ' => ' + modified_uri);
uri = modified_uri;
}
var request,
timer,
returned = 0,
self = this,
request_opts = this.get_request_opts(method, uri, config),
protocol = request_opts.protocol == 'https:' ? https : http;
function done(err, resp) {
if (returned++ > 0)
return debug('Already finished, stopping here.');
if (timer) clearTimeout(timer);
request.removeListener('error', had_error);
out.done = true;
// An error can still be fired after closing. In particular, on macOS.
// See also:
// - https://github.com/tomas/needle/issues/391
// - https://github.com/less/less.js/issues/3693
// - https://github.com/nodejs/node/issues/27916
request.once('error', function() {});
if (callback)
return callback(err, resp, resp ? resp.body : undefined);
// NOTE: this event used to be called 'end', but the behaviour was confusing
// when errors ocurred, because the stream would still emit an 'end' event.
out.emit('done', err);
// trigger the 'done' event on streams we're being piped to, if any
var pipes = out._readableState.pipes || [];
if (!pipes.forEach) pipes = [pipes];
pipes.forEach(function(st) { st.emit('done', err); })
}
function had_error(err) {
debug('Request error', err);
out.emit('err', err);
done(err || new Error('Unknown error when making request.'));
}
function set_timeout(type, milisecs) {
if (timer) clearTimeout(timer);
if (milisecs <= 0) return;
timer = setTimeout(function() {
out.emit('timeout', type);
request.abort();
// also invoke done() to terminate job on read_timeout
if (type == 'read') done(new Error(type + ' timeout'));
}, milisecs);
}
// handle errors on the underlying socket, that may be closed while writing
// for an example case, see test/long_string_spec.js. we make sure this
// scenario ocurred by verifying the socket's writable & destroyed states.
function on_socket_end() {
if (returned && !this.writable && this.destroyed === false) {
this.destroy();
had_error(new Error('Remote end closed socket abruptly.'))
}
}
debug('Making request #' + count, request_opts);
request = protocol.request(request_opts, function(resp) {
var headers = resp.headers;
debug('Got response', resp.statusCode, headers);
out.emit('response', resp);
set_timeout('read', config.read_timeout);
// if we got cookies, parse them unless we were instructed not to. make sure to include any
// cookies that might have been set on previous redirects.
if (config.parse_cookies && (headers['set-cookie'] || config.previous_resp_cookies)) {
resp.cookies = extend(config.previous_resp_cookies || {}, cookies.read(headers['set-cookie']));
debug('Got cookies', resp.cookies);
}
// if redirect code is found, determine if we should follow it according to the given options.
if (redirect_codes.indexOf(resp.statusCode) !== -1 && self.should_follow(headers.location, config, uri)) {
// clear timer before following redirects to prevent unexpected setTimeout consequence
clearTimeout(timer);
if (count <= config.follow_max) {
out.emit('redirect', headers.location);
// unless 'follow_keep_method' is true, rewrite the request to GET before continuing.
if (!config.follow_keep_method) {
method = 'GET';
post_data = null;
delete config.headers['content-length']; // in case the original was a multipart POST request.
}
// if follow_set_cookies is true, insert cookies in the next request's headers.
// we set both the original request cookies plus any response cookies we might have received.
if (config.follow_set_cookies && host_and_ports_match(headers.location, uri)) {
var request_cookies = cookies.read(config.headers['cookie']);
config.previous_resp_cookies = resp.cookies;
if (Object.keys(request_cookies).length || Object.keys(resp.cookies || {}).length) {
config.headers['cookie'] = cookies.write(extend(request_cookies, resp.cookies));
}
} else if (config.headers['cookie']) {
debug('Clearing original request cookie', config.headers['cookie']);
delete config.headers['cookie'];
}
if (config.follow_set_referer)
config.headers['referer'] = encodeURI(uri); // the original, not the destination URL.
config.headers['host'] = null; // clear previous Host header to avoid conflicts.
var redirect_url = resolve_url(headers.location, uri);
debug('Redirecting to ' + redirect_url.toString());
return self.send_request(++count, method, redirect_url.toString(), config, post_data, out, callback);
} else if (config.follow_max > 0) {
return done(new Error('Max redirects reached. Possible loop in: ' + headers.location));
}
}
// if auth is requested and credentials were not passed, resend request, provided we have user/pass.
if (resp.statusCode == 401 && headers['www-authenticate'] && config.credentials) {
if (!config.headers['authorization']) { // only if authentication hasn't been sent
var auth_header = auth.header(headers['www-authenticate'], config.credentials, request_opts);
if (auth_header) {
config.headers['authorization'] = auth_header;
return self.send_request(count, method, uri, config, post_data, out, callback);
}
}
}
// ok, so we got a valid (non-redirect & authorized) response. let's notify the stream guys.
out.emit('header', resp.statusCode, headers);
out.emit('headers', headers);
var pipeline = [],
mime = parse_content_type(headers['content-type']),
text_response = mime.type && (mime.type.indexOf('text/') != -1 || !!mime.type.match(/(\/|\+)(xml|json)$/));
// To start, if our body is compressed and we're able to inflate it, do it.
if (headers['content-encoding'] && decompressors[headers['content-encoding']]) {
var decompressor = decompressors[headers['content-encoding']]();
// make sure we catch errors triggered by the decompressor.
decompressor.on('error', had_error);
pipeline.push(decompressor);
}
// If parse is enabled and we have a parser for it, then go for it.
if (config.parser && parsers[mime.type]) {
// If a specific parser was requested, make sure we don't parse other types.
var parser_name = config.parser.toString().toLowerCase();
if (['xml', 'json'].indexOf(parser_name) == -1 || parsers[mime.type].name == parser_name) {
// OK, so either we're parsing all content types or the one requested matches.
out.parser = parsers[mime.type].name;
pipeline.push(parsers[mime.type].fn());
// Set objectMode on out stream to improve performance.
out._writableState.objectMode = true;
out._readableState.objectMode = true;
}
// If we're not parsing, and unless decoding was disabled, we'll try
// decoding non UTF-8 bodies to UTF-8, using the iconv-lite library.
} else if (text_response && config.decode_response && mime.charset) {
pipeline.push(decoder(mime.charset));
}
// And `out` is the stream we finally push the decoded/parsed output to.
pipeline.push(out);
// Now, release the kraken!
pump_streams([resp].concat(pipeline), function(err) {
if (err) debug(err)
// on node v8.x, if an error ocurrs on the receiving end,
// then we want to abort the request to avoid having dangling sockets
if (err && err.message == 'write after end') request.destroy();
});
// If the user has requested and output file, pipe the output stream to it.
// In stream mode, we will still get the response stream to play with.
if (config.output && resp.statusCode == 200) {
// for some reason, simply piping resp to the writable stream doesn't
// work all the time (stream gets cut in the middle with no warning).
// so we'll manually need to do the readable/write(chunk) trick.
var file = fs.createWriteStream(config.output);
file.on('error', had_error);
out.on('end', function() {
if (file.writable) file.end();
});
file.on('close', function() {
delete out.file;
})
out.on('readable', function() {
var chunk;
while ((chunk = this.read()) !== null) {
if (file.writable) file.write(chunk);
// if callback was requested, also push it to resp.body
if (resp.body) resp.body.push(chunk);
}
})
out.file = file;
}
// Only aggregate the full body if a callback was requested.
if (callback) {
resp.raw = [];
resp.body = [];
resp.bytes = 0;
// Gather and count the amount of (raw) bytes using a PassThrough stream.
var clean_pipe = new stream.PassThrough();
clean_pipe.on('readable', function() {
var chunk;
while ((chunk = this.read()) != null) {
resp.bytes += chunk.length;
resp.raw.push(chunk);
}
})
pump_streams([resp, clean_pipe], function(err) {
if (err) debug(err);
});
// Listen on the 'readable' event to aggregate the chunks, but only if
// file output wasn't requested. Otherwise we'd have two stream readers.
if (!config.output || resp.statusCode != 200) {
out.on('readable', function() {
var chunk;
while ((chunk = this.read()) !== null) {
// We're either pushing buffers or objects, never strings.
if (typeof chunk == 'string') chunk = Buffer.from(chunk);
// Push all chunks to resp.body. We'll bind them in resp.end().
resp.body.push(chunk);
}
})
}
}
// And set the .body property once all data is in.
out.on('end', function() {
if (resp.body) { // callback mode
// we want to be able to access to the raw data later, so keep a reference.
resp.raw = Buffer.concat(resp.raw);
// if parse was successful, we should have an array with one object
if (resp.body[0] !== undefined && !Buffer.isBuffer(resp.body[0])) {
// that's our body right there.
resp.body = resp.body[0];
// set the parser property on our response. we may want to check.
if (out.parser) resp.parser = out.parser;
} else { // we got one or several buffers. string or binary.
resp.body = Buffer.concat(resp.body);
// if we're here and parsed is true, it means we tried to but it didn't work.
// so given that we got a text response, let's stringify it.
if (text_response || out.parser) {
resp.body = resp.body.toString();
}
}
}
// if an output file is being written to, make sure the callback
// is triggered after all data has been written to it.
if (out.file) {
out.file.on('close', function() {
done(null, resp);
})
} else { // elvis has left the building.
done(null, resp);
}
});
// out.on('error', function(err) {
// had_error(err);
// if (err.code == 'ERR_STREAM_DESTROYED' || err.code == 'ERR_STREAM_PREMATURE_CLOSE') {
// request.abort();
// }
// })
}); // end request call
// unless open_timeout was disabled, set a timeout to abort the request.
set_timeout('open', config.open_timeout);
// handle errors on the request object. things might get bumpy.
request.on('error', had_error);
// make sure timer is cleared if request is aborted (issue #257)
request.once('abort', function() {
if (timer) clearTimeout(timer);
})
// handle socket 'end' event to ensure we don't get delayed EPIPE errors.
request.once('socket', function(socket) {
if (socket.connecting) {
socket.once('connect', function() {
set_timeout('response', config.response_timeout);
})
} else {
set_timeout('response', config.response_timeout);
}
// socket.once('close', function(e) {
// console.log('socket closed!', e);
// })
if (!socket.on_socket_end) {
socket.on_socket_end = on_socket_end;
socket.once('end', function() { process.nextTick(on_socket_end.bind(socket)) });
}
})
if (post_data) {
if (is_stream(post_data)) {
pump_streams([post_data, request], function(err) {
if (err) debug(err);
});
} else {
request.write(post_data, config.encoding);
request.end();
}
} else {
request.end();
}
out.abort = function() { request.abort() }; // easier access
out.request = request;
return out;
}
//////////////////////////////////////////
// exports
if (typeof Promise !== 'undefined') {
module.exports = function() {
var verb, args = [].slice.call(arguments);
if (args[0].match(/\.|\//)) // first argument looks like a URL
verb = (args.length > 2) ? 'post' : 'get';
else
verb = args.shift();
if (verb.match(/get|head/i) && args.length == 2)
args.splice(1, 0, null); // assume no data if head/get with two args (url, options)
return new Promise(function(resolve, reject) {
module.exports.request(verb, args[0], args[1], args[2], function(err, resp) {
return err ? reject(err) : resolve(resp);
});
})
}
}
module.exports.version = version;
module.exports.defaults = function(obj) {
for (var key in obj) {
var target_key = aliased.options[key] || key;
if (defaults.hasOwnProperty(target_key) && typeof obj[key] != 'undefined') {
if (target_key != 'parse_response' && target_key != 'proxy' && target_key != 'agent') {
// ensure type matches the original, except for proxy/parse_response that can be null/bool or string
var valid_type = defaults[target_key].constructor.name;
if (obj[key].constructor.name != valid_type)
throw new TypeError('Invalid type for ' + key + ', should be ' + valid_type);
}
defaults[target_key] = obj[key];
} else {
throw new Error('Invalid property for defaults:' + target_key);
}
}
return defaults;
}
'head get'.split(' ').forEach(function(method) {
module.exports[method] = function(uri, options, callback) {
return new Needle(method, uri, null, options, callback).start();
}
})
'post put patch delete'.split(' ').forEach(function(method) {
module.exports[method] = function(uri, data, options, callback) {
return new Needle(method, uri, data, options, callback).start();
}
})
module.exports.request = function(method, uri, data, opts, callback) {
return new Needle(method, uri, data, opts, callback).start();
};
/***/ }),
/***/ 55256:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
//////////////////////////////////////////
// Defines mappings between content-type
// and the appropriate parsers.
//////////////////////////////////////////
var Transform = __webpack_require__(92413).Transform;
var sax = __webpack_require__(36099);
function parseXML(str, cb) {
var obj, current, parser = sax.parser(true, { trim: true, lowercase: true })
parser.onerror = parser.onend = done;
function done(err) {
parser.onerror = parser.onend = function() { }
cb(err, obj)
}
function newElement(name, attributes) {
return {
name: name || '',
value: '',
attributes: attributes || {},
children: []
}
}
parser.oncdata = parser.ontext = function(t) {
if (current) current.value += t
}
parser.onopentag = function(node) {
var element = newElement(node.name, node.attributes)
if (current) {
element.parent = current
current.children.push(element)
} else { // root object
obj = element
}
current = element
};
parser.onclosetag = function() {
if (typeof current.parent !== 'undefined') {
var just_closed = current
current = current.parent
delete just_closed.parent
}
}
parser.write(str).close()
}
function parserFactory(name, fn) {
function parser() {
var chunks = [],
stream = new Transform({ objectMode: true });
// Buffer all our data
stream._transform = function(chunk, encoding, done) {
chunks.push(chunk);
done();
}
// And call the parser when all is there.
stream._flush = function(done) {
var self = this,
data = Buffer.concat(chunks);
try {
fn(data, function(err, result) {
if (err) throw err;
self.push(result);
});
} catch (err) {
self.push(data); // just pass the original data
} finally {
done();
}
}
return stream;
}
return { fn: parser, name: name };
}
var parsers = {}
function buildParser(name, types, fn) {
var parser = parserFactory(name, fn);
types.forEach(function(type) {
parsers[type] = parser;
})
}
buildParser('json', [
'application/json',
'text/javascript',
'application/vnd.api+json'
], function(buffer, cb) {
var err, data;
try { data = JSON.parse(buffer); } catch (e) { err = e; }
cb(err, data);
});
buildParser('xml', [
'text/xml',
'application/xml',
'application/rdf+xml',
'application/rss+xml',
'application/atom+xml'
], function(buffer, cb) {
parseXML(buffer.toString(), function(err, obj) {
cb(err, obj)
})
});
module.exports = parsers;
module.exports.use = buildParser;
/***/ }),
/***/ 10558:
/***/ ((__unused_webpack_module, exports) => {
// based on the qs module, but handles null objects as expected
// fixes by Tomas Pollak.
var toString = Object.prototype.toString;
function stringify(obj, prefix) {
if (prefix && (obj === null || typeof obj == 'undefined')) {
return prefix + '=';
} else if (toString.call(obj) == '[object Array]') {
return stringifyArray(obj, prefix);
} else if (toString.call(obj) == '[object Object]') {
return stringifyObject(obj, prefix);
} else if (toString.call(obj) == '[object Date]') {
return obj.toISOString();
} else if (prefix) { // string inside array or hash
return prefix + '=' + encodeURIComponent(String(obj));
} else if (String(obj).indexOf('=') !== -1) { // string with equal sign
return String(obj);
} else {
throw new TypeError('Cannot build a querystring out of: ' + obj);
}
};
function stringifyArray(arr, prefix) {
var ret = [];
for (var i = 0, len = arr.length; i < len; i++) {
if (prefix)
ret.push(stringify(arr[i], prefix + '[]'));
else
ret.push(stringify(arr[i]));
}
return ret.join('&');
}
function stringifyObject(obj, prefix) {
var ret = [];
Object.keys(obj).forEach(function(key) {
ret.push(stringify(obj[key], prefix
? prefix + '[' + encodeURIComponent(key) + ']'
: encodeURIComponent(key)));
})
return ret.join('&');
}
exports.J = stringify;
/***/ }),
/***/ 54336:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
var qs = __webpack_require__(71191)
, url = __webpack_require__(78835)
, xtend = __webpack_require__(47529);
const PARSE_LINK_HEADER_MAXLEN = parseInt(process.env.PARSE_LINK_HEADER_MAXLEN) || 2000;
const PARSE_LINK_HEADER_THROW_ON_MAXLEN_EXCEEDED = process.env.PARSE_LINK_HEADER_THROW_ON_MAXLEN_EXCEEDED != null
function hasRel(x) {
return x && x.rel;
}
function intoRels (acc, x) {
function splitRel (rel) {
acc[rel] = xtend(x, { rel: rel });
}
x.rel.split(/\s+/).forEach(splitRel);
return acc;
}
function createObjects (acc, p) {
// rel="next" => 1: rel 2: next
var m = p.match(/\s*(.+)\s*=\s*"?([^"]+)"?/)
if (m) acc[m[1]] = m[2];
return acc;
}
function parseLink(link) {
try {
var m = link.match(/([^>]*)>(.*)/)
, linkUrl = m[1]
, parts = m[2].split(';')
, parsedUrl = url.parse(linkUrl)
, qry = qs.parse(parsedUrl.query);
parts.shift();
var info = parts
.reduce(createObjects, {});
info = xtend(qry, info);
info.url = linkUrl;
return info;
} catch (e) {
return null;
}
}
function checkHeader(linkHeader){
if (!linkHeader) return false;
if (linkHeader.length > PARSE_LINK_HEADER_MAXLEN) {
if (PARSE_LINK_HEADER_THROW_ON_MAXLEN_EXCEEDED) {
throw new Error('Input string too long, it should be under ' + PARSE_LINK_HEADER_MAXLEN + ' characters.');
} else {
return false;
}
}
return true;
}
module.exports = function (linkHeader) {
if (!checkHeader(linkHeader)) return null;
return linkHeader.split(/,\s*)
.map(parseLink)
.filter(hasRel)
.reduce(intoRels, {});
};
/***/ }),
/***/ 8381:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
const taskManager = __webpack_require__(32972);
const async_1 = __webpack_require__(64002);
const stream_1 = __webpack_require__(3480);
const sync_1 = __webpack_require__(62512);
const settings_1 = __webpack_require__(95879);
const utils = __webpack_require__(73290);
async function FastGlob(source, options) {
assertPatternsInput(source);
const works = getWorks(source, async_1.default, options);
const result = await Promise.all(works);
return utils.array.flatten(result);
}
// https://github.com/typescript-eslint/typescript-eslint/issues/60
// eslint-disable-next-line no-redeclare
(function (FastGlob) {
function sync(source, options) {
assertPatternsInput(source);
const works = getWorks(source, sync_1.default, options);
return utils.array.flatten(works);
}
FastGlob.sync = sync;
function stream(source, options) {
assertPatternsInput(source);
const works = getWorks(source, stream_1.default, options);
/**
* The stream returned by the provider cannot work with an asynchronous iterator.
* To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.
* This affects performance (+25%). I don't see best solution right now.
*/
return utils.stream.merge(works);
}
FastGlob.stream = stream;
function generateTasks(source, options) {
assertPatternsInput(source);
const patterns = [].concat(source);
const settings = new settings_1.default(options);
return taskManager.generate(patterns, settings);
}
FastGlob.generateTasks = generateTasks;
function isDynamicPattern(source, options) {
assertPatternsInput(source);
const settings = new settings_1.default(options);
return utils.pattern.isDynamicPattern(source, settings);
}
FastGlob.isDynamicPattern = isDynamicPattern;
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escape(source);
}
FastGlob.escapePath = escapePath;
})(FastGlob || (FastGlob = {}));
function getWorks(source, _Provider, options) {
const patterns = [].concat(source);
const settings = new settings_1.default(options);
const tasks = taskManager.generate(patterns, settings);
const provider = new _Provider(settings);
return tasks.map(provider.read, provider);
}
function assertPatternsInput(input) {
const source = [].concat(input);
const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));
if (!isValidSource) {
throw new TypeError('Patterns must be a string (non empty) or an array of strings');
}
}
module.exports = FastGlob;
/***/ }),
/***/ 32972:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;
const utils = __webpack_require__(73290);
function generate(patterns, settings) {
const positivePatterns = getPositivePatterns(patterns);
const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore);
const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings));
const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings));
const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);
const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);
return staticTasks.concat(dynamicTasks);
}
exports.generate = generate;
function convertPatternsToTasks(positive, negative, dynamic) {
const positivePatternsGroup = groupPatternsByBaseDirectory(positive);
// When we have a global group – there is no reason to divide the patterns into independent tasks.
// In this case, the global task covers the rest.
if ('.' in positivePatternsGroup) {
const task = convertPatternGroupToTask('.', positive, negative, dynamic);
return [task];
}
return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic);
}
exports.convertPatternsToTasks = convertPatternsToTasks;
function getPositivePatterns(patterns) {
return utils.pattern.getPositivePatterns(patterns);
}
exports.getPositivePatterns = getPositivePatterns;
function getNegativePatternsAsPositive(patterns, ignore) {
const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore);
const positive = negative.map(utils.pattern.convertToPositivePattern);
return positive;
}
exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive;
function groupPatternsByBaseDirectory(patterns) {
const group = {};
return patterns.reduce((collection, pattern) => {
const base = utils.pattern.getBaseDirectory(pattern);
if (base in collection) {
collection[base].push(pattern);
}
else {
collection[base] = [pattern];
}
return collection;
}, group);
}
exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;
function convertPatternGroupsToTasks(positive, negative, dynamic) {
return Object.keys(positive).map((base) => {
return convertPatternGroupToTask(base, positive[base], negative, dynamic);
});
}
exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks;
function convertPatternGroupToTask(base, positive, negative, dynamic) {
return {
dynamic,
positive,
negative,
base,
patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern))
};
}
exports.convertPatternGroupToTask = convertPatternGroupToTask;
/***/ }),
/***/ 64002:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const stream_1 = __webpack_require__(3986);
const provider_1 = __webpack_require__(29834);
class ProviderAsync extends provider_1.default {
constructor() {
super(...arguments);
this._reader = new stream_1.default(this._settings);
}
read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const entries = [];
return new Promise((resolve, reject) => {
const stream = this.api(root, task, options);
stream.once('error', reject);
stream.on('data', (entry) => entries.push(options.transform(entry)));
stream.once('end', () => resolve(entries));
});
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
exports.default = ProviderAsync;
/***/ }),
/***/ 52744:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const utils = __webpack_require__(73290);
const partial_1 = __webpack_require__(1340);
class DeepFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
}
getFilter(basePath, positive, negative) {
const matcher = this._getMatcher(positive);
const negativeRe = this._getNegativePatternsRe(negative);
return (entry) => this._filter(basePath, entry, matcher, negativeRe);
}
_getMatcher(patterns) {
return new partial_1.default(patterns, this._settings, this._micromatchOptions);
}
_getNegativePatternsRe(patterns) {
const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);
return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
}
_filter(basePath, entry, matcher, negativeRe) {
if (this._isSkippedByDeep(basePath, entry.path)) {
return false;
}
if (this._isSkippedSymbolicLink(entry)) {
return false;
}
const filepath = utils.path.removeLeadingDotSegment(entry.path);
if (this._isSkippedByPositivePatterns(filepath, matcher)) {
return false;
}
return this._isSkippedByNegativePatterns(filepath, negativeRe);
}
_isSkippedByDeep(basePath, entryPath) {
/**
* Avoid unnecessary depth calculations when it doesn't matter.
*/
if (this._settings.deep === Infinity) {
return false;
}
return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
}
_getEntryLevel(basePath, entryPath) {
const entryPathDepth = entryPath.split('/').length;
if (basePath === '') {
return entryPathDepth;
}
const basePathDepth = basePath.split('/').length;
return entryPathDepth - basePathDepth;
}
_isSkippedSymbolicLink(entry) {
return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
}
_isSkippedByPositivePatterns(entryPath, matcher) {
return !this._settings.baseNameMatch && !matcher.match(entryPath);
}
_isSkippedByNegativePatterns(entryPath, patternsRe) {
return !utils.pattern.matchAny(entryPath, patternsRe);
}
}
exports.default = DeepFilter;
/***/ }),
/***/ 93579:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const utils = __webpack_require__(73290);
class EntryFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
this.index = new Map();
}
getFilter(positive, negative) {
const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions);
const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions);
return (entry) => this._filter(entry, positiveRe, negativeRe);
}
_filter(entry, positiveRe, negativeRe) {
if (this._settings.unique && this._isDuplicateEntry(entry)) {
return false;
}
if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {
return false;
}
if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {
return false;
}
const filepath = this._settings.baseNameMatch ? entry.name : entry.path;
const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);
if (this._settings.unique && isMatched) {
this._createIndexRecord(entry);
}
return isMatched;
}
_isDuplicateEntry(entry) {
return this.index.has(entry.path);
}
_createIndexRecord(entry) {
this.index.set(entry.path, undefined);
}
_onlyFileFilter(entry) {
return this._settings.onlyFiles && !entry.dirent.isFile();
}
_onlyDirectoryFilter(entry) {
return this._settings.onlyDirectories && !entry.dirent.isDirectory();
}
_isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {
if (!this._settings.absolute) {
return false;
}
const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);
return utils.pattern.matchAny(fullpath, patternsRe);
}
_isMatchToPatterns(entryPath, patternsRe) {
const filepath = utils.path.removeLeadingDotSegment(entryPath);
return utils.pattern.matchAny(filepath, patternsRe);
}
}
exports.default = EntryFilter;
/***/ }),
/***/ 8520:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const utils = __webpack_require__(73290);
class ErrorFilter {
constructor(_settings) {
this._settings = _settings;
}
getFilter() {
return (error) => this._isNonFatalError(error);
}
_isNonFatalError(error) {
return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors;
}
}
exports.default = ErrorFilter;
/***/ }),
/***/ 45177:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const utils = __webpack_require__(73290);
class Matcher {
constructor(_patterns, _settings, _micromatchOptions) {
this._patterns = _patterns;
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
this._storage = [];
this._fillStorage();
}
_fillStorage() {
/**
* The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level).
* So, before expand patterns with brace expansion into separated patterns.
*/
const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns);
for (const pattern of patterns) {
const segments = this._getPatternSegments(pattern);
const sections = this._splitSegmentsIntoSections(segments);
this._storage.push({
complete: sections.length <= 1,
pattern,
segments,
sections
});
}
}
_getPatternSegments(pattern) {
const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions);
return parts.map((part) => {
const dynamic = utils.pattern.isDynamicPattern(part, this._settings);
if (!dynamic) {
return {
dynamic: false,
pattern: part
};
}
return {
dynamic: true,
pattern: part,
patternRe: utils.pattern.makeRe(part, this._micromatchOptions)
};
});
}
_splitSegmentsIntoSections(segments) {
return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern));
}
}
exports.default = Matcher;
/***/ }),
/***/ 1340:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const matcher_1 = __webpack_require__(45177);
class PartialMatcher extends matcher_1.default {
match(filepath) {
const parts = filepath.split('/');
const levels = parts.length;
const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);
for (const pattern of patterns) {
const section = pattern.sections[0];
/**
* In this case, the pattern has a globstar and we must read all directories unconditionally,
* but only if the level has reached the end of the first group.
*
* fixtures/{a,b}/**
* ^ true/false ^ always true
*/
if (!pattern.complete && levels > section.length) {
return true;
}
const match = parts.every((part, index) => {
const segment = pattern.segments[index];
if (segment.dynamic && segment.patternRe.test(part)) {
return true;
}
if (!segment.dynamic && segment.pattern === part) {
return true;
}
return false;
});
if (match) {
return true;
}
}
return false;
}
}
exports.default = PartialMatcher;
/***/ }),
/***/ 29834:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const path = __webpack_require__(85622);
const deep_1 = __webpack_require__(52744);
const entry_1 = __webpack_require__(93579);
const error_1 = __webpack_require__(8520);
const entry_2 = __webpack_require__(54081);
class Provider {
constructor(_settings) {
this._settings = _settings;
this.errorFilter = new error_1.default(this._settings);
this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());
this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());
this.entryTransformer = new entry_2.default(this._settings);
}
_getRootDirectory(task) {
return path.resolve(this._settings.cwd, task.base);
}
_getReaderOptions(task) {
const basePath = task.base === '.' ? '' : task.base;
return {
basePath,
pathSegmentSeparator: '/',
concurrency: this._settings.concurrency,
deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),
entryFilter: this.entryFilter.getFilter(task.positive, task.negative),
errorFilter: this.errorFilter.getFilter(),
followSymbolicLinks: this._settings.followSymbolicLinks,
fs: this._settings.fs,
stats: this._settings.stats,
throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,
transform: this.entryTransformer.getTransformer()
};
}
_getMicromatchOptions() {
return {
dot: this._settings.dot,
matchBase: this._settings.baseNameMatch,
nobrace: !this._settings.braceExpansion,
nocase: !this._settings.caseSensitiveMatch,
noext: !this._settings.extglob,
noglobstar: !this._settings.globstar,
posix: true,
strictSlashes: false
};
}
}
exports.default = Provider;
/***/ }),
/***/ 3480:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const stream_1 = __webpack_require__(92413);
const stream_2 = __webpack_require__(3986);
const provider_1 = __webpack_require__(29834);
class ProviderStream extends provider_1.default {
constructor() {
super(...arguments);
this._reader = new stream_2.default(this._settings);
}
read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const source = this.api(root, task, options);
const destination = new stream_1.Readable({ objectMode: true, read: () => { } });
source
.once('error', (error) => destination.emit('error', error))
.on('data', (entry) => destination.emit('data', options.transform(entry)))
.once('end', () => destination.emit('end'));
destination
.once('close', () => source.destroy());
return destination;
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
exports.default = ProviderStream;
/***/ }),
/***/ 62512:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const sync_1 = __webpack_require__(94311);
const provider_1 = __webpack_require__(29834);
class ProviderSync extends provider_1.default {
constructor() {
super(...arguments);
this._reader = new sync_1.default(this._settings);
}
read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const entries = this.api(root, task, options);
return entries.map(options.transform);
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
exports.default = ProviderSync;
/***/ }),
/***/ 54081:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const utils = __webpack_require__(73290);
class EntryTransformer {
constructor(_settings) {
this._settings = _settings;
}
getTransformer() {
return (entry) => this._transform(entry);
}
_transform(entry) {
let filepath = entry.path;
if (this._settings.absolute) {
filepath = utils.path.makeAbsolute(this._settings.cwd, filepath);
filepath = utils.path.unixify(filepath);
}
if (this._settings.markDirectories && entry.dirent.isDirectory()) {
filepath += '/';
}
if (!this._settings.objectMode) {
return filepath;
}
return Object.assign(Object.assign({}, entry), { path: filepath });
}
}
exports.default = EntryTransformer;
/***/ }),
/***/ 35346:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const path = __webpack_require__(85622);
const fsStat = __webpack_require__(66203);
const utils = __webpack_require__(73290);
class Reader {
constructor(_settings) {
this._settings = _settings;
this._fsStatSettings = new fsStat.Settings({
followSymbolicLink: this._settings.followSymbolicLinks,
fs: this._settings.fs,
throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks
});
}
_getFullEntryPath(filepath) {
return path.resolve(this._settings.cwd, filepath);
}
_makeEntry(stats, pattern) {
const entry = {
name: pattern,
path: pattern,
dirent: utils.fs.createDirentFromStats(pattern, stats)
};
if (this._settings.stats) {
entry.stats = stats;
}
return entry;
}
_isFatalError(error) {
return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;
}
}
exports.default = Reader;
/***/ }),
/***/ 3986:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const stream_1 = __webpack_require__(92413);
const fsStat = __webpack_require__(66203);
const fsWalk = __webpack_require__(45439);
const reader_1 = __webpack_require__(35346);
class ReaderStream extends reader_1.default {
constructor() {
super(...arguments);
this._walkStream = fsWalk.walkStream;
this._stat = fsStat.stat;
}
dynamic(root, options) {
return this._walkStream(root, options);
}
static(patterns, options) {
const filepaths = patterns.map(this._getFullEntryPath, this);
const stream = new stream_1.PassThrough({ objectMode: true });
stream._write = (index, _enc, done) => {
return this._getEntry(filepaths[index], patterns[index], options)
.then((entry) => {
if (entry !== null && options.entryFilter(entry)) {
stream.push(entry);
}
if (index === filepaths.length - 1) {
stream.end();
}
done();
})
.catch(done);
};
for (let i = 0; i < filepaths.length; i++) {
stream.write(i);
}
return stream;
}
_getEntry(filepath, pattern, options) {
return this._getStat(filepath)
.then((stats) => this._makeEntry(stats, pattern))
.catch((error) => {
if (options.errorFilter(error)) {
return null;
}
throw error;
});
}
_getStat(filepath) {
return new Promise((resolve, reject) => {
this._stat(filepath, this._fsStatSettings, (error, stats) => {
return error === null ? resolve(stats) : reject(error);
});
});
}
}
exports.default = ReaderStream;
/***/ }),
/***/ 94311:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const fsStat = __webpack_require__(66203);
const fsWalk = __webpack_require__(45439);
const reader_1 = __webpack_require__(35346);
class ReaderSync extends reader_1.default {
constructor() {
super(...arguments);
this._walkSync = fsWalk.walkSync;
this._statSync = fsStat.statSync;
}
dynamic(root, options) {
return this._walkSync(root, options);
}
static(patterns, options) {
const entries = [];
for (const pattern of patterns) {
const filepath = this._getFullEntryPath(pattern);
const entry = this._getEntry(filepath, pattern, options);
if (entry === null || !options.entryFilter(entry)) {
continue;
}
entries.push(entry);
}
return entries;
}
_getEntry(filepath, pattern, options) {
try {
const stats = this._getStat(filepath);
return this._makeEntry(stats, pattern);
}
catch (error) {
if (options.errorFilter(error)) {
return null;
}
throw error;
}
}
_getStat(filepath) {
return this._statSync(filepath, this._fsStatSettings);
}
}
exports.default = ReaderSync;
/***/ }),
/***/ 95879:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
const fs = __webpack_require__(35747);
const os = __webpack_require__(12087);
/**
* The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.
* https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107
*/
const CPU_COUNT = Math.max(os.cpus().length, 1);
exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
lstatSync: fs.lstatSync,
stat: fs.stat,
statSync: fs.statSync,
readdir: fs.readdir,
readdirSync: fs.readdirSync
};
class Settings {
constructor(_options = {}) {
this._options = _options;
this.absolute = this._getValue(this._options.absolute, false);
this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);
this.braceExpansion = this._getValue(this._options.braceExpansion, true);
this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);
this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);
this.cwd = this._getValue(this._options.cwd, process.cwd());
this.deep = this._getValue(this._options.deep, Infinity);
this.dot = this._getValue(this._options.dot, false);
this.extglob = this._getValue(this._options.extglob, true);
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);
this.fs = this._getFileSystemMethods(this._options.fs);
this.globstar = this._getValue(this._options.globstar, true);
this.ignore = this._getValue(this._options.ignore, []);
this.markDirectories = this._getValue(this._options.markDirectories, false);
this.objectMode = this._getValue(this._options.objectMode, false);
this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);
this.onlyFiles = this._getValue(this._options.onlyFiles, true);
this.stats = this._getValue(this._options.stats, false);
this.suppressErrors = this._getValue(this._options.suppressErrors, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);
this.unique = this._getValue(this._options.unique, true);
if (this.onlyDirectories) {
this.onlyFiles = false;
}
if (this.stats) {
this.objectMode = true;
}
}
_getValue(option, value) {
return option === undefined ? value : option;
}
_getFileSystemMethods(methods = {}) {
return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);
}
}
exports.default = Settings;
/***/ }),
/***/ 86465:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.splitWhen = exports.flatten = void 0;
function flatten(items) {
return items.reduce((collection, item) => [].concat(collection, item), []);
}
exports.flatten = flatten;
function splitWhen(items, predicate) {
const result = [[]];
let groupIndex = 0;
for (const item of items) {
if (predicate(item)) {
groupIndex++;
result[groupIndex] = [];
}
else {
result[groupIndex].push(item);
}
}
return result;
}
exports.splitWhen = splitWhen;
/***/ }),
/***/ 66049:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.isEnoentCodeError = void 0;
function isEnoentCodeError(error) {
return error.code === 'ENOENT';
}
exports.isEnoentCodeError = isEnoentCodeError;
/***/ }),
/***/ 4906:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createDirentFromStats = void 0;
class DirentFromStats {
constructor(name, stats) {
this.name = name;
this.isBlockDevice = stats.isBlockDevice.bind(stats);
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
this.isDirectory = stats.isDirectory.bind(stats);
this.isFIFO = stats.isFIFO.bind(stats);
this.isFile = stats.isFile.bind(stats);
this.isSocket = stats.isSocket.bind(stats);
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
}
function createDirentFromStats(name, stats) {
return new DirentFromStats(name, stats);
}
exports.createDirentFromStats = createDirentFromStats;
/***/ }),
/***/ 73290:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;
const array = __webpack_require__(86465);
exports.array = array;
const errno = __webpack_require__(66049);
exports.errno = errno;
const fs = __webpack_require__(4906);
exports.fs = fs;
const path = __webpack_require__(55830);
exports.path = path;
const pattern = __webpack_require__(10038);
exports.pattern = pattern;
const stream = __webpack_require__(61726);
exports.stream = stream;
const string = __webpack_require__(63757);
exports.string = string;
/***/ }),
/***/ 55830:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;
const path = __webpack_require__(85622);
const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g;
/**
* Designed to work only with simple paths: `dir\\file`.
*/
function unixify(filepath) {
return filepath.replace(/\\/g, '/');
}
exports.unixify = unixify;
function makeAbsolute(cwd, filepath) {
return path.resolve(cwd, filepath);
}
exports.makeAbsolute = makeAbsolute;
function escape(pattern) {
return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
}
exports.escape = escape;
function removeLeadingDotSegment(entry) {
// We do not use `startsWith` because this is 10x slower than current implementation for some cases.
// eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
if (entry.charAt(0) === '.') {
const secondCharactery = entry.charAt(1);
if (secondCharactery === '/' || secondCharactery === '\\') {
return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);
}
}
return entry;
}
exports.removeLeadingDotSegment = removeLeadingDotSegment;
/***/ }),
/***/ 10038:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;
const path = __webpack_require__(85622);
const globParent = __webpack_require__(29807);
const micromatch = __webpack_require__(70850);
const picomatch = __webpack_require__(59444);
const GLOBSTAR = '**';
const ESCAPE_SYMBOL = '\\';
const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;
const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[.*]/;
const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\(.*\|.*\)/;
const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\(.*\)/;
const BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\.\.).*}/;
function isStaticPattern(pattern, options = {}) {
return !isDynamicPattern(pattern, options);
}
exports.isStaticPattern = isStaticPattern;
function isDynamicPattern(pattern, options = {}) {
/**
* A special case with an empty string is necessary for matching patterns that start with a forward slash.
* An empty string cannot be a dynamic pattern.
* For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.
*/
if (pattern === '') {
return false;
}
/**
* When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check
* filepath directly (without read directory).
*/
if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {
return true;
}
if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {
return true;
}
if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {
return true;
}
if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) {
return true;
}
return false;
}
exports.isDynamicPattern = isDynamicPattern;
function convertToPositivePattern(pattern) {
return isNegativePattern(pattern) ? pattern.slice(1) : pattern;
}
exports.convertToPositivePattern = convertToPositivePattern;
function convertToNegativePattern(pattern) {
return '!' + pattern;
}
exports.convertToNegativePattern = convertToNegativePattern;
function isNegativePattern(pattern) {
return pattern.startsWith('!') && pattern[1] !== '(';
}
exports.isNegativePattern = isNegativePattern;
function isPositivePattern(pattern) {
return !isNegativePattern(pattern);
}
exports.isPositivePattern = isPositivePattern;
function getNegativePatterns(patterns) {
return patterns.filter(isNegativePattern);
}
exports.getNegativePatterns = getNegativePatterns;
function getPositivePatterns(patterns) {
return patterns.filter(isPositivePattern);
}
exports.getPositivePatterns = getPositivePatterns;
function getBaseDirectory(pattern) {
return globParent(pattern, { flipBackslashes: false });
}
exports.getBaseDirectory = getBaseDirectory;
function hasGlobStar(pattern) {
return pattern.includes(GLOBSTAR);
}
exports.hasGlobStar = hasGlobStar;
function endsWithSlashGlobStar(pattern) {
return pattern.endsWith('/' + GLOBSTAR);
}
exports.endsWithSlashGlobStar = endsWithSlashGlobStar;
function isAffectDepthOfReadingPattern(pattern) {
const basename = path.basename(pattern);
return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);
}
exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;
function expandPatternsWithBraceExpansion(patterns) {
return patterns.reduce((collection, pattern) => {
return collection.concat(expandBraceExpansion(pattern));
}, []);
}
exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;
function expandBraceExpansion(pattern) {
return micromatch.braces(pattern, {
expand: true,
nodupes: true
});
}
exports.expandBraceExpansion = expandBraceExpansion;
function getPatternParts(pattern, options) {
let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
/**
* The scan method returns an empty array in some cases.
* See micromatch/picomatch#58 for more details.
*/
if (parts.length === 0) {
parts = [pattern];
}
/**
* The scan method does not return an empty part for the pattern with a forward slash.
* This is another part of micromatch/picomatch#58.
*/
if (parts[0].startsWith('/')) {
parts[0] = parts[0].slice(1);
parts.unshift('');
}
return parts;
}
exports.getPatternParts = getPatternParts;
function makeRe(pattern, options) {
return micromatch.makeRe(pattern, options);
}
exports.makeRe = makeRe;
function convertPatternsToRe(patterns, options) {
return patterns.map((pattern) => makeRe(pattern, options));
}
exports.convertPatternsToRe = convertPatternsToRe;
function matchAny(entry, patternsRe) {
return patternsRe.some((patternRe) => patternRe.test(entry));
}
exports.matchAny = matchAny;
/***/ }),
/***/ 61726:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.merge = void 0;
const merge2 = __webpack_require__(60155);
function merge(streams) {
const mergedStream = merge2(streams);
streams.forEach((stream) => {
stream.once('error', (error) => mergedStream.emit('error', error));
});
mergedStream.once('close', () => propagateCloseEventToSources(streams));
mergedStream.once('end', () => propagateCloseEventToSources(streams));
return mergedStream;
}
exports.merge = merge;
function propagateCloseEventToSources(streams) {
streams.forEach((stream) => stream.emit('close'));
}
/***/ }),
/***/ 63757:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.isEmpty = exports.isString = void 0;
function isString(input) {
return typeof input === 'string';
}
exports.isString = isString;
function isEmpty(input) {
return input === '';
}
exports.isEmpty = isEmpty;
/***/ }),
/***/ 10635:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
const fs = __webpack_require__(35747);
const path = __webpack_require__(85622);
const assert = __webpack_require__(42357);
const GEMFILE_DEFAULT_LOCATION = path.resolve(process.cwd(), 'Gemfile.lock');
const WHITESPACE = /^(\s*)/;
const GEMFILE_KEY_VALUE = /^\s*([^:(]*)\s*\:*\s*(.*)/;
const ORIGINS = ['GEM', 'GIT', 'PATH'];
const RUBY = /^ruby\s(.*)/;
module.exports = {
interpret,
parse,
parseSync
};
function interpret(string, extractMeta) {
assert(
typeof string === 'string',
'gemfile.interpret expects a UTF-8 Gemfile.lock string source.'
);
const gemfileMeta = {};
let line;
let index = 0;
let previousWhitespace = -1;
let keyCount = {};
let gemfile = {};
let lines = string.split('\n');
let stack = [];
while((line = lines[index++]) !== undefined) {
// Handle depth stack changes
let whitespace = WHITESPACE.exec(line)[1].length;
if (whitespace <= previousWhitespace) {
let stackIndex = stack.length - 1;
while(stack[stackIndex] && (whitespace <= stack[stackIndex].depth)) {
stack.pop();
stackIndex--;
}
}
// Make note of line's whitespace depth
previousWhitespace = whitespace;
// Handle new key/value leaf
let parts = GEMFILE_KEY_VALUE.exec(line);
let key = parts[1].trim();
let value = parts[2] || '';
if (key) {
// Handle path traversal
let level = gemfile;
for (let stackIndex = 0; stackIndex < stack.length; stackIndex++) {
if (level[stack[stackIndex].key]) {
level = level[stack[stackIndex].key];
}
}
// Handle data type inference
let data = {};
if (value.indexOf('/') > -1) {
data.path = value;
} else if (value.indexOf('(') > -1) {
if (value[value.length - 1] === '!') {
value = value.substring(0, value.length - 1);
data.outsourced = true;
}
if (value[1] !== ')') {
data.version = value.substring(1, value.length - 1);
}
} else if (/\b[0-9a-f]{7,40}\b/.test(value)) {
data.sha = value;
}
// Set key at current level
// Do not throw away additional top-level key entries
// e.g. multiple GIT/GEM blocks
if (level[key]) {
if (keyCount[key] === undefined) {
keyCount[key] = 0;
} else {
keyCount[key]++;
}
level[key + keyCount[key]] = level[key];
}
level[key] = data;
// Push key on stack
stack.push({key, depth: whitespace});
}
}
let keys = Object.keys(gemfile);
let hasGemKey = keys.indexOf('GEM') > -1;
let hasDependenciesKey = keys.indexOf('DEPENDENCIES') > -1;
let hasPlatformsKey = keys.indexOf('PLATFORMS') > -1;
if (!hasGemKey || !hasDependenciesKey || !hasPlatformsKey) {
console.warn([
'Are you sure this a Gemfile.lock?',
'If it is, please file an issue on Github: https://github.com/treycordova/gemfile/issues.',
'Regardless, gemfile parsed whatever you gave it.'
].join('\n'));
}
if (gemfile['BUNDLED WITH']) {
gemfile['BUNDLED WITH'] = Object.keys(gemfile['BUNDLED WITH'])[0];
}
if (gemfile['RUBY VERSION']) {
const rawVersion= Object.keys(gemfile['RUBY VERSION'])[0];
const version = RUBY.exec(rawVersion)[1];
gemfile['RUBY VERSION'] = version;
}
if (extractMeta) {
gemfileMeta.bundledWith = gemfile['BUNDLED WITH'];
gemfileMeta.rubyVersion = gemfile['RUBY VERSION'];
gemfileMeta.platforms = gemfile['PLATFORMS'];
gemfileMeta.dependencies = gemfile['DEPENDENCIES'];
gemfileMeta.specs = Object.keys(gemfile)
.filter(key =>
ORIGINS.some(origin => key.startsWith(origin)))
.reduce((specs, key) => {
const type = key.match(/[A-Z]+/)[0];
const meta = Object.assign({ type }, gemfile[key]);
delete meta.specs;
Object.assign(specs, Object.keys(gemfile[key].specs).reduce((specs, gem) => {
specs[gem] = Object.assign({}, gemfile[key].specs[gem], meta);
return specs;
}, {}));
return specs;
}, {});
return gemfileMeta;
}
return gemfile;
}
function parse(path, extractMeta) {
path = typeof path === 'string' ?
path :
GEMFILE_DEFAULT_LOCATION;
return new Promise(function(resolve, reject) {
let file = fs.readFile(path, {encoding: 'utf8'}, function(error, gemfile) {
if (error) {
return reject(`Couldn't find a Gemfile at the specified location: ${path}.`);
} else {
return resolve(interpret(gemfile, extractMeta));
}
});
});
}
function parseSync(path, extractMeta) {
path = typeof path === 'string' ?
path :
GEMFILE_DEFAULT_LOCATION;
return interpret(fs.readFileSync(path, 'utf8'), extractMeta);
}
/***/ }),
/***/ 29807:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
var isGlob = __webpack_require__(46830);
var pathPosixDirname = __webpack_require__(85622).posix.dirname;
var isWin32 = __webpack_require__(12087).platform() === 'win32';
var slash = '/';
var backslash = /\\/g;
var enclosure = /[\{\[].*\/.*[\}\]]$/;
var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/;
var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g;
/**
* @param {string} str
* @param {Object} opts
* @param {boolean} [opts.flipBackslashes=true]
* @returns {string}
*/
module.exports = function globParent(str, opts) {
var options = Object.assign({ flipBackslashes: true }, opts);
// flip windows path separators
if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {
str = str.replace(backslash, slash);
}
// special case for strings ending in enclosure containing path separator
if (enclosure.test(str)) {
str += slash;
}
// preserves full path in case of trailing path separator
str += 'a';
// remove path parts that are globby
do {
str = pathPosixDirname(str);
} while (isGlob(str) || globby.test(str));
// remove escape chars and return result
return str.replace(escaped, '$1');
};
/***/ }),
/***/ 39322:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
/**
* Copyright (c) 2014, Chris Pettitt
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
var lib = __webpack_require__(19943);
module.exports = {
Graph: lib.Graph,
json: __webpack_require__(40584),
alg: __webpack_require__(67550),
version: lib.version
};
/***/ }),
/***/ 94686:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
module.exports = components;
function components(g) {
var visited = {};
var cmpts = [];
var cmpt;
function dfs(v) {
if (_.has(visited, v)) return;
visited[v] = true;
cmpt.push(v);
_.each(g.successors(v), dfs);
_.each(g.predecessors(v), dfs);
}
_.each(g.nodes(), function(v) {
cmpt = [];
dfs(v);
if (cmpt.length) {
cmpts.push(cmpt);
}
});
return cmpts;
}
/***/ }),
/***/ 1647:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
module.exports = dfs;
/*
* A helper that preforms a pre- or post-order traversal on the input graph
* and returns the nodes in the order they were visited. If the graph is
* undirected then this algorithm will navigate using neighbors. If the graph
* is directed then this algorithm will navigate using successors.
*
* Order must be one of "pre" or "post".
*/
function dfs(g, vs, order) {
if (!_.isArray(vs)) {
vs = [vs];
}
var navigation = (g.isDirected() ? g.successors : g.neighbors).bind(g);
var acc = [];
var visited = {};
_.each(vs, function(v) {
if (!g.hasNode(v)) {
throw new Error("Graph does not have node: " + v);
}
doDfs(g, v, order === "post", visited, navigation, acc);
});
return acc;
}
function doDfs(g, v, postorder, visited, navigation, acc) {
if (!_.has(visited, v)) {
visited[v] = true;
if (!postorder) { acc.push(v); }
_.each(navigation(v), function(w) {
doDfs(g, w, postorder, visited, navigation, acc);
});
if (postorder) { acc.push(v); }
}
}
/***/ }),
/***/ 16342:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var dijkstra = __webpack_require__(70785);
var _ = __webpack_require__(8570);
module.exports = dijkstraAll;
function dijkstraAll(g, weightFunc, edgeFunc) {
return _.transform(g.nodes(), function(acc, v) {
acc[v] = dijkstra(g, v, weightFunc, edgeFunc);
}, {});
}
/***/ }),
/***/ 70785:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
var PriorityQueue = __webpack_require__(85474);
module.exports = dijkstra;
var DEFAULT_WEIGHT_FUNC = _.constant(1);
function dijkstra(g, source, weightFn, edgeFn) {
return runDijkstra(g, String(source),
weightFn || DEFAULT_WEIGHT_FUNC,
edgeFn || function(v) { return g.outEdges(v); });
}
function runDijkstra(g, source, weightFn, edgeFn) {
var results = {};
var pq = new PriorityQueue();
var v, vEntry;
var updateNeighbors = function(edge) {
var w = edge.v !== v ? edge.v : edge.w;
var wEntry = results[w];
var weight = weightFn(edge);
var distance = vEntry.distance + weight;
if (weight < 0) {
throw new Error("dijkstra does not allow negative edge weights. " +
"Bad edge: " + edge + " Weight: " + weight);
}
if (distance < wEntry.distance) {
wEntry.distance = distance;
wEntry.predecessor = v;
pq.decrease(w, distance);
}
};
g.nodes().forEach(function(v) {
var distance = v === source ? 0 : Number.POSITIVE_INFINITY;
results[v] = { distance: distance };
pq.add(v, distance);
});
while (pq.size() > 0) {
v = pq.removeMin();
vEntry = results[v];
if (vEntry.distance === Number.POSITIVE_INFINITY) {
break;
}
edgeFn(v).forEach(updateNeighbors);
}
return results;
}
/***/ }),
/***/ 81046:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
var tarjan = __webpack_require__(61511);
module.exports = findCycles;
function findCycles(g) {
return _.filter(tarjan(g), function(cmpt) {
return cmpt.length > 1 || (cmpt.length === 1 && g.hasEdge(cmpt[0], cmpt[0]));
});
}
/***/ }),
/***/ 71786:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
module.exports = floydWarshall;
var DEFAULT_WEIGHT_FUNC = _.constant(1);
function floydWarshall(g, weightFn, edgeFn) {
return runFloydWarshall(g,
weightFn || DEFAULT_WEIGHT_FUNC,
edgeFn || function(v) { return g.outEdges(v); });
}
function runFloydWarshall(g, weightFn, edgeFn) {
var results = {};
var nodes = g.nodes();
nodes.forEach(function(v) {
results[v] = {};
results[v][v] = { distance: 0 };
nodes.forEach(function(w) {
if (v !== w) {
results[v][w] = { distance: Number.POSITIVE_INFINITY };
}
});
edgeFn(v).forEach(function(edge) {
var w = edge.v === v ? edge.w : edge.v;
var d = weightFn(edge);
results[v][w] = { distance: d, predecessor: v };
});
});
nodes.forEach(function(k) {
var rowK = results[k];
nodes.forEach(function(i) {
var rowI = results[i];
nodes.forEach(function(j) {
var ik = rowI[k];
var kj = rowK[j];
var ij = rowI[j];
var altDistance = ik.distance + kj.distance;
if (altDistance < ij.distance) {
ij.distance = altDistance;
ij.predecessor = kj.predecessor;
}
});
});
});
return results;
}
/***/ }),
/***/ 67550:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
module.exports = {
components: __webpack_require__(94686),
dijkstra: __webpack_require__(70785),
dijkstraAll: __webpack_require__(16342),
findCycles: __webpack_require__(81046),
floydWarshall: __webpack_require__(71786),
isAcyclic: __webpack_require__(57200),
postorder: __webpack_require__(26877),
preorder: __webpack_require__(80573),
prim: __webpack_require__(8472),
tarjan: __webpack_require__(61511),
topsort: __webpack_require__(95626)
};
/***/ }),
/***/ 57200:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var topsort = __webpack_require__(95626);
module.exports = isAcyclic;
function isAcyclic(g) {
try {
topsort(g);
} catch (e) {
if (e instanceof topsort.CycleException) {
return false;
}
throw e;
}
return true;
}
/***/ }),
/***/ 26877:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var dfs = __webpack_require__(1647);
module.exports = postorder;
function postorder(g, vs) {
return dfs(g, vs, "post");
}
/***/ }),
/***/ 80573:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var dfs = __webpack_require__(1647);
module.exports = preorder;
function preorder(g, vs) {
return dfs(g, vs, "pre");
}
/***/ }),
/***/ 8472:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
var Graph = __webpack_require__(6583);
var PriorityQueue = __webpack_require__(85474);
module.exports = prim;
function prim(g, weightFunc) {
var result = new Graph();
var parents = {};
var pq = new PriorityQueue();
var v;
function updateNeighbors(edge) {
var w = edge.v === v ? edge.w : edge.v;
var pri = pq.priority(w);
if (pri !== undefined) {
var edgeWeight = weightFunc(edge);
if (edgeWeight < pri) {
parents[w] = v;
pq.decrease(w, edgeWeight);
}
}
}
if (g.nodeCount() === 0) {
return result;
}
_.each(g.nodes(), function(v) {
pq.add(v, Number.POSITIVE_INFINITY);
result.setNode(v);
});
// Start from an arbitrary node
pq.decrease(g.nodes()[0], 0);
var init = false;
while (pq.size() > 0) {
v = pq.removeMin();
if (_.has(parents, v)) {
result.setEdge(v, parents[v]);
} else if (init) {
throw new Error("Input graph is not connected: " + g);
} else {
init = true;
}
g.nodeEdges(v).forEach(updateNeighbors);
}
return result;
}
/***/ }),
/***/ 61511:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
module.exports = tarjan;
function tarjan(g) {
var index = 0;
var stack = [];
var visited = {}; // node id -> { onStack, lowlink, index }
var results = [];
function dfs(v) {
var entry = visited[v] = {
onStack: true,
lowlink: index,
index: index++
};
stack.push(v);
g.successors(v).forEach(function(w) {
if (!_.has(visited, w)) {
dfs(w);
entry.lowlink = Math.min(entry.lowlink, visited[w].lowlink);
} else if (visited[w].onStack) {
entry.lowlink = Math.min(entry.lowlink, visited[w].index);
}
});
if (entry.lowlink === entry.index) {
var cmpt = [];
var w;
do {
w = stack.pop();
visited[w].onStack = false;
cmpt.push(w);
} while (v !== w);
results.push(cmpt);
}
}
g.nodes().forEach(function(v) {
if (!_.has(visited, v)) {
dfs(v);
}
});
return results;
}
/***/ }),
/***/ 95626:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
module.exports = topsort;
topsort.CycleException = CycleException;
function topsort(g) {
var visited = {};
var stack = {};
var results = [];
function visit(node) {
if (_.has(stack, node)) {
throw new CycleException();
}
if (!_.has(visited, node)) {
stack[node] = true;
visited[node] = true;
_.each(g.predecessors(node), visit);
delete stack[node];
results.push(node);
}
}
_.each(g.sinks(), visit);
if (_.size(visited) !== g.nodeCount()) {
throw new CycleException();
}
return results;
}
function CycleException() {}
CycleException.prototype = new Error(); // must be an instance of Error to pass testing
/***/ }),
/***/ 85474:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
module.exports = PriorityQueue;
/**
* A min-priority queue data structure. This algorithm is derived from Cormen,
* et al., "Introduction to Algorithms". The basic idea of a min-priority
* queue is that you can efficiently (in O(1) time) get the smallest key in
* the queue. Adding and removing elements takes O(log n) time. A key can
* have its priority decreased in O(log n) time.
*/
function PriorityQueue() {
this._arr = [];
this._keyIndices = {};
}
/**
* Returns the number of elements in the queue. Takes `O(1)` time.
*/
PriorityQueue.prototype.size = function() {
return this._arr.length;
};
/**
* Returns the keys that are in the queue. Takes `O(n)` time.
*/
PriorityQueue.prototype.keys = function() {
return this._arr.map(function(x) { return x.key; });
};
/**
* Returns `true` if **key** is in the queue and `false` if not.
*/
PriorityQueue.prototype.has = function(key) {
return _.has(this._keyIndices, key);
};
/**
* Returns the priority for **key**. If **key** is not present in the queue
* then this function returns `undefined`. Takes `O(1)` time.
*
* @param {Object} key
*/
PriorityQueue.prototype.priority = function(key) {
var index = this._keyIndices[key];
if (index !== undefined) {
return this._arr[index].priority;
}
};
/**
* Returns the key for the minimum element in this queue. If the queue is
* empty this function throws an Error. Takes `O(1)` time.
*/
PriorityQueue.prototype.min = function() {
if (this.size() === 0) {
throw new Error("Queue underflow");
}
return this._arr[0].key;
};
/**
* Inserts a new key into the priority queue. If the key already exists in
* the queue this function returns `false`; otherwise it will return `true`.
* Takes `O(n)` time.
*
* @param {Object} key the key to add
* @param {Number} priority the initial priority for the key
*/
PriorityQueue.prototype.add = function(key, priority) {
var keyIndices = this._keyIndices;
key = String(key);
if (!_.has(keyIndices, key)) {
var arr = this._arr;
var index = arr.length;
keyIndices[key] = index;
arr.push({key: key, priority: priority});
this._decrease(index);
return true;
}
return false;
};
/**
* Removes and returns the smallest key in the queue. Takes `O(log n)` time.
*/
PriorityQueue.prototype.removeMin = function() {
this._swap(0, this._arr.length - 1);
var min = this._arr.pop();
delete this._keyIndices[min.key];
this._heapify(0);
return min.key;
};
/**
* Decreases the priority for **key** to **priority**. If the new priority is
* greater than the previous priority, this function will throw an Error.
*
* @param {Object} key the key for which to raise priority
* @param {Number} priority the new priority for the key
*/
PriorityQueue.prototype.decrease = function(key, priority) {
var index = this._keyIndices[key];
if (priority > this._arr[index].priority) {
throw new Error("New priority is greater than current priority. " +
"Key: " + key + " Old: " + this._arr[index].priority + " New: " + priority);
}
this._arr[index].priority = priority;
this._decrease(index);
};
PriorityQueue.prototype._heapify = function(i) {
var arr = this._arr;
var l = 2 * i;
var r = l + 1;
var largest = i;
if (l < arr.length) {
largest = arr[l].priority < arr[largest].priority ? l : largest;
if (r < arr.length) {
largest = arr[r].priority < arr[largest].priority ? r : largest;
}
if (largest !== i) {
this._swap(i, largest);
this._heapify(largest);
}
}
};
PriorityQueue.prototype._decrease = function(index) {
var arr = this._arr;
var priority = arr[index].priority;
var parent;
while (index !== 0) {
parent = index >> 1;
if (arr[parent].priority < priority) {
break;
}
this._swap(index, parent);
index = parent;
}
};
PriorityQueue.prototype._swap = function(i, j) {
var arr = this._arr;
var keyIndices = this._keyIndices;
var origArrI = arr[i];
var origArrJ = arr[j];
arr[i] = origArrJ;
arr[j] = origArrI;
keyIndices[origArrJ.key] = i;
keyIndices[origArrI.key] = j;
};
/***/ }),
/***/ 6583:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
var _ = __webpack_require__(8570);
module.exports = Graph;
var DEFAULT_EDGE_NAME = "\x00";
var GRAPH_NODE = "\x00";
var EDGE_KEY_DELIM = "\x01";
// Implementation notes:
//
// * Node id query functions should return string ids for the nodes
// * Edge id query functions should return an "edgeObj", edge object, that is
// composed of enough information to uniquely identify an edge: {v, w, name}.
// * Internally we use an "edgeId", a stringified form of the edgeObj, to
// reference edges. This is because we need a performant way to look these
// edges up and, object properties, which have string keys, are the closest
// we're going to get to a performant hashtable in JavaScript.
function Graph(opts) {
this._isDirected = _.has(opts, "directed") ? opts.directed : true;
this._isMultigraph = _.has(opts, "multigraph") ? opts.multigraph : false;
this._isCompound = _.has(opts, "compound") ? opts.compound : false;
// Label for the graph itself
this._label = undefined;
// Defaults to be set when creating a new node
this._defaultNodeLabelFn = _.constant(undefined);
// Defaults to be set when creating a new edge
this._defaultEdgeLabelFn = _.constant(undefined);
// v -> label
this._nodes = {};
if (this._isCompound) {
// v -> parent
this._parent = {};
// v -> children
this._children = {};
this._children[GRAPH_NODE] = {};
}
// v -> edgeObj
this._in = {};
// u -> v -> Number
this._preds = {};
// v -> edgeObj
this._out = {};
// v -> w -> Number
this._sucs = {};
// e -> edgeObj
this._edgeObjs = {};
// e -> label
this._edgeLabels = {};
}
/* Number of nodes in the graph. Should only be changed by the implementation. */
Graph.prototype._nodeCount = 0;
/* Number of edges in the graph. Should only be changed by the implementation. */
Graph.prototype._edgeCount = 0;
/* === Graph functions ========= */
Graph.prototype.isDirected = function() {
return this._isDirected;
};
Graph.prototype.isMultigraph = function() {
return this._isMultigraph;
};
Graph.prototype.isCompound = function() {
return this._isCompound;
};
Graph.prototype.setGraph = function(label) {
this._label = label;
return this;
};
Graph.prototype.graph = function() {
return this._label;
};
/* === Node functions ========== */
Graph.prototype.setDefaultNodeLabel = function(newDefault) {
if (!_.isFunction(newDefault)) {
newDefault = _.constant(newDefault);
}
this._defaultNodeLabelFn = newDefault;
return this;
};
Graph.prototype.nodeCount = function() {
return this._nodeCount;
};
Graph.prototype.nodes = function() {
return _.keys(this._nodes);
};
Graph.prototype.sources = function() {
var self = this;
return _.filter(this.nodes(), function(v) {
return _.isEmpty(self._in[v]);
});
};
Graph.prototype.sinks = function() {
var self = this;
return _.filter(this.nodes(), function(v) {
return _.isEmpty(self._out[v]);
});
};
Graph.prototype.setNodes = function(vs, value) {
var args = arguments;
var self = this;
_.each(vs, function(v) {
if (args.length > 1) {
self.setNode(v, value);
} else {
self.setNode(v);
}
});
return this;
};
Graph.prototype.setNode = function(v, value) {
if (_.has(this._nodes, v)) {
if (arguments.length > 1) {
this._nodes[v] = value;
}
return this;
}
this._nodes[v] = arguments.length > 1 ? value : this._defaultNodeLabelFn(v);
if (this._isCompound) {
this._parent[v] = GRAPH_NODE;
this._children[v] = {};
this._children[GRAPH_NODE][v] = true;
}
this._in[v] = {};
this._preds[v] = {};
this._out[v] = {};
this._sucs[v] = {};
++this._nodeCount;
return this;
};
Graph.prototype.node = function(v) {
return this._nodes[v];
};
Graph.prototype.hasNode = function(v) {
return _.has(this._nodes, v);
};
Graph.prototype.removeNode = function(v) {
var self = this;
if (_.has(this._nodes, v)) {
var removeEdge = function(e) { self.removeEdge(self._edgeObjs[e]); };
delete this._nodes[v];
if (this._isCompound) {
this._removeFromParentsChildList(v);
delete this._parent[v];
_.each(this.children(v), function(child) {
self.setParent(child);
});
delete this._children[v];
}
_.each(_.keys(this._in[v]), removeEdge);
delete this._in[v];
delete this._preds[v];
_.each(_.keys(this._out[v]), removeEdge);
delete this._out[v];
delete this._sucs[v];
--this._nodeCount;
}
return this;
};
Graph.prototype.setParent = function(v, parent) {
if (!this._isCompound) {
throw new Error("Cannot set parent in a non-compound graph");
}
if (_.isUndefined(parent)) {
parent = GRAPH_NODE;
} else {
// Coerce parent to string
parent += "";
for (var ancestor = parent;
!_.isUndefined(ancestor);
ancestor = this.parent(ancestor)) {
if (ancestor === v) {
throw new Error("Setting " + parent+ " as parent of " + v +
" would create a cycle");
}
}
this.setNode(parent);
}
this.setNode(v);
this._removeFromParentsChildList(v);
this._parent[v] = parent;
this._children[parent][v] = true;
return this;
};
Graph.prototype._removeFromParentsChildList = function(v) {
delete this._children[this._parent[v]][v];
};
Graph.prototype.parent = function(v) {
if (this._isCompound) {
var parent = this._parent[v];
if (parent !== GRAPH_NODE) {
return parent;
}
}
};
Graph.prototype.children = function(v) {
if (_.isUndefined(v)) {
v = GRAPH_NODE;
}
if (this._isCompound) {
var children = this._children[v];
if (children) {
return _.keys(children);
}
} else if (v === GRAPH_NODE) {
return this.nodes();
} else if (this.hasNode(v)) {
return [];
}
};
Graph.prototype.predecessors = function(v) {
var predsV = this._preds[v];
if (predsV) {
return _.keys(predsV);
}
};
Graph.prototype.successors = function(v) {
var sucsV = this._sucs[v];
if (sucsV) {
return _.keys(sucsV);
}
};
Graph.prototype.neighbors = function(v) {
var preds = this.predecessors(v);
if (preds) {
return _.union(preds, this.successors(v));
}
};
Graph.prototype.isLeaf = function (v) {
var neighbors;
if (this.isDirected()) {
neighbors = this.successors(v);
} else {
neighbors = this.neighbors(v);
}
return neighbors.length === 0;
};
Graph.prototype.filterNodes = function(filter) {
var copy = new this.constructor({
directed: this._isDirected,
multigraph: this._isMultigraph,
compound: this._isCompound
});
copy.setGraph(this.graph());
var self = this;
_.each(this._nodes, function(value, v) {
if (filter(v)) {
copy.setNode(v, value);
}
});
_.each(this._edgeObjs, function(e) {
if (copy.hasNode(e.v) && copy.hasNode(e.w)) {
copy.setEdge(e, self.edge(e));
}
});
var parents = {};
function findParent(v) {
var parent = self.parent(v);
if (parent === undefined || copy.hasNode(parent)) {
parents[v] = parent;
return parent;
} else if (parent in parents) {
return parents[parent];
} else {
return findParent(parent);
}
}
if (this._isCompound) {
_.each(copy.nodes(), function(v) {
copy.setParent(v, findParent(v));
});
}
return copy;
};
/* === Edge functions ========== */
Graph.prototype.setDefaultEdgeLabel = function(newDefault) {
if (!_.isFunction(newDefault)) {
newDefault = _.constant(newDefault);
}
this._defaultEdgeLabelFn = newDefault;
return this;
};
Graph.prototype.edgeCount = function() {
return this._edgeCount;
};
Graph.prototype.edges = function() {
return _.values(this._edgeObjs);
};
Graph.prototype.setPath = function(vs, value) {
var self = this;
var args = arguments;
_.reduce(vs, function(v, w) {
if (args.length > 1) {
self.setEdge(v, w, value);
} else {
self.setEdge(v, w);
}
return w;
});
return this;
};
/*
* setEdge(v, w, [value, [name]])
* setEdge({ v, w, [name] }, [value])
*/
Graph.prototype.setEdge = function() {
var v, w, name, value;
var valueSpecified = false;
var arg0 = arguments[0];
if (typeof arg0 === "object" && arg0 !== null && "v" in arg0) {
v = arg0.v;
w = arg0.w;
name = arg0.name;
if (arguments.length === 2) {
value = arguments[1];
valueSpecified = true;
}
} else {
v = arg0;
w = arguments[1];
name = arguments[3];
if (arguments.length > 2) {
value = arguments[2];
valueSpecified = true;
}
}
v = "" + v;
w = "" + w;
if (!_.isUndefined(name)) {
name = "" + name;
}
var e = edgeArgsToId(this._isDirected, v, w, name);
if (_.has(this._edgeLabels, e)) {
if (valueSpecified) {
this._edgeLabels[e] = value;
}
return this;
}
if (!_.isUndefined(name) && !this._isMultigraph) {
throw new Error("Cannot set a named edge when isMultigraph = false");
}
// It didn't exist, so we need to create it.
// First ensure the nodes exist.
this.setNode(v);
this.setNode(w);
this._edgeLabels[e] = valueSpecified ? value : this._defaultEdgeLabelFn(v, w, name);
var edgeObj = edgeArgsToObj(this._isDirected, v, w, name);
// Ensure we add undirected edges in a consistent way.
v = edgeObj.v;
w = edgeObj.w;
Object.freeze(edgeObj);
this._edgeObjs[e] = edgeObj;
incrementOrInitEntry(this._preds[w], v);
incrementOrInitEntry(this._sucs[v], w);
this._in[w][e] = edgeObj;
this._out[v][e] = edgeObj;
this._edgeCount++;
return this;
};
Graph.prototype.edge = function(v, w, name) {
var e = (arguments.length === 1
? edgeObjToId(this._isDirected, arguments[0])
: edgeArgsToId(this._isDirected, v, w, name));
return this._edgeLabels[e];
};
Graph.prototype.hasEdge = function(v, w, name) {
var e = (arguments.length === 1
? edgeObjToId(this._isDirected, arguments[0])
: edgeArgsToId(this._isDirected, v, w, name));
return _.has(this._edgeLabels, e);
};
Graph.prototype.removeEdge = function(v, w, name) {
var e = (arguments.length === 1
? edgeObjToId(this._isDirected, arguments[0])
: edgeArgsToId(this._isDirected, v, w, name));
var edge = this._edgeObjs[e];
if (edge) {
v = edge.v;
w = edge.w;
delete this._edgeLabels[e];
delete this._edgeObjs[e];
decrementOrRemoveEntry(this._preds[w], v);
decrementOrRemoveEntry(this._sucs[v], w);
delete this._in[w][e];
delete this._out[v][e];
this._edgeCount--;
}
return this;
};
Graph.prototype.inEdges = function(v, u) {
var inV = this._in[v];
if (inV) {
var edges = _.values(inV);
if (!u) {
return edges;
}
return _.filter(edges, function(edge) { return edge.v === u; });
}
};
Graph.prototype.outEdges = function(v, w) {
var outV = this._out[v];
if (outV) {
var edges = _.values(outV);
if (!w) {
return edges;
}
return _.filter(edges, function(edge) { return edge.w === w; });
}
};
Graph.prototype.nodeEdges = function(v, w) {
var inEdges = this.inEdges(v, w);
if (inEdges) {
return inEdges.concat(this.outEdges(v, w));
}
};
function incrementOrInitEntry(map, k) {
if (map[k]) {
map[k]++;
} else {
map[k] = 1;
}
}
function decrementOrRemoveEntry(map, k) {
if (!--map[k]) { delete map[k]; }
}
function edgeArgsToId(isDirected, v_, w_, name) {
var v = "" + v_;
var w = "" + w_;
if (!isDirected && v > w) {
var tmp = v;
v = w;
w = tmp;
}
return v + EDGE_KEY_DELIM + w + EDGE_KEY_DELIM +
(_.isUndefined(name) ? DEFAULT_EDGE_NAME : name);
}
function edgeArgsToObj(isDirected, v_, w_, name) {
var v = "" + v_;
var w = "" + w_;
if (!isDirected && v > w) {
var tmp = v;
v = w;
w = tmp;
}
var edgeObj = { v: v, w: w };
if (name) {
edgeObj.name = name;
}
return edgeObj;
}
function edgeObjToId(isDirected, edgeObj) {
return edgeArgsToId(isDirected, edgeObj.v, edgeObj.w, edgeObj.name);
}
/***/ }),
/***/ 19943:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
// Includes only the "core" of graphlib
module.exports = {
Graph: __webpack_require__(6583),
version: __webpack_require__(79554)
};
/***/ }),
/***/ 40584:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var _ = __webpack_require__(8570);
var Graph = __webpack_require__(6583);
module.exports = {
write: write,
read: read
};
function write(g) {
var json = {
options: {
directed: g.isDirected(),
multigraph: g.isMultigraph(),
compound: g.isCompound()
},
nodes: writeNodes(g),
edges: writeEdges(g)
};
if (!_.isUndefined(g.graph())) {
json.value = _.clone(g.graph());
}
return json;
}
function writeNodes(g) {
return _.map(g.nodes(), function(v) {
var nodeValue = g.node(v);
var parent = g.parent(v);
var node = { v: v };
if (!_.isUndefined(nodeValue)) {
node.value = nodeValue;
}
if (!_.isUndefined(parent)) {
node.parent = parent;
}
return node;
});
}
function writeEdges(g) {
return _.map(g.edges(), function(e) {
var edgeValue = g.edge(e);
var edge = { v: e.v, w: e.w };
if (!_.isUndefined(e.name)) {
edge.name = e.name;
}
if (!_.isUndefined(edgeValue)) {
edge.value = edgeValue;
}
return edge;
});
}
function read(json) {
var g = new Graph(json.options).setGraph(json.value);
_.each(json.nodes, function(entry) {
g.setNode(entry.v, entry.value);
if (entry.parent) {
g.setParent(entry.v, entry.parent);
}
});
_.each(json.edges, function(entry) {
g.setEdge({ v: entry.v, w: entry.w, name: entry.name }, entry.value);
});
return g;
}
/***/ }),
/***/ 8570:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
// eslint-disable-next-line no-redeclare
/* global window */
var lodash;
if (true) {
try {
lodash = {
clone: __webpack_require__(33543),
constant: __webpack_require__(4254),
each: __webpack_require__(43590),
filter: __webpack_require__(4474),
has: __webpack_require__(38394),
isArray: Array.isArray,
isEmpty: __webpack_require__(99245),
isFunction: __webpack_require__(98423),
isUndefined: __webpack_require__(28801),
keys: __webpack_require__(44799),
map: __webpack_require__(80820),
reduce: __webpack_require__(34766),
size: __webpack_require__(27946),
transform: __webpack_require__(10684),
union: __webpack_require__(96744),
values: __webpack_require__(17720)
};
} catch (e) {
// continue regardless of error
}
}
if (!lodash) {
lodash = window._;
}
module.exports = lodash;
/***/ }),
/***/ 79554:
/***/ ((module) => {
module.exports = '2.1.9-patch.3';
/***/ }),
/***/ 65619:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.buildCallGraph = void 0;
const graphlib_1 = __webpack_require__(39322);
const class_parsing_1 = __webpack_require__(64417);
function getNodeLabel(functionCall, classPerJarMapping) {
// com.ibm.wala.FakeRootClass:fakeRootMethod
const [className, functionName] = functionCall.split(':');
const jarName = classPerJarMapping[className];
return {
className,
functionName,
jarName,
};
}
function buildCallGraph(input, classPerJarMapping) {
const graph = new graphlib_1.Graph();
for (const line of input.trim().split('\n')) {
const [caller, callee] = line
.trim()
.split(' -> ')
.map(class_parsing_1.removeParams)
.map(class_parsing_1.toFQclassName);
graph.setNode(caller, getNodeLabel(caller, classPerJarMapping));
graph.setNode(callee, getNodeLabel(callee, classPerJarMapping));
graph.setEdge(caller, callee);
}
return graph;
}
exports.buildCallGraph = buildCallGraph;
//# sourceMappingURL=call-graph.js.map
/***/ }),
/***/ 64417:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toFQclassName = exports.removeParams = void 0;
function removeParams(functionCall) {
// com/ibm/wala/FakeRootClass.fakeRootMethod:()V
return functionCall.split(':')[0];
}
exports.removeParams = removeParams;
function toFQclassName(functionCall) {
// com/ibm/wala/FakeRootClass.fakeRootMethod -> com.ibm.wala.FakeRootClass:fakeRootMethod
return functionCall.replace('.', ':').replace(/\//g, '.');
}
exports.toFQclassName = toFQclassName;
//# sourceMappingURL=class-parsing.js.map
/***/ }),
/***/ 35034:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.ClassPath = void 0;
const path = __webpack_require__(85622);
function canonicalize(rawClasspath) {
let sanitisedClassPath = rawClasspath.trim();
while (sanitisedClassPath.startsWith(path.delimiter)) {
sanitisedClassPath = sanitisedClassPath.slice(1);
}
while (sanitisedClassPath.endsWith(path.delimiter)) {
sanitisedClassPath = sanitisedClassPath.slice(0, -1);
}
return sanitisedClassPath;
}
class ClassPath {
constructor(classPath) {
this.value = canonicalize(classPath);
}
isEmpty() {
return this.value.length === 0;
}
concat(other) {
const elements = this.value.split(path.delimiter);
const otherElements = other.value.split(path.delimiter);
const newElements = Array.from(new Set(elements.concat(otherElements)).values());
return new ClassPath(newElements.join(path.delimiter));
}
toString() {
return this.value;
}
}
exports.ClassPath = ClassPath;
//# sourceMappingURL=classpath.js.map
/***/ }),
/***/ 60725:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
const snykConfig = __webpack_require__(8658);
const path = __webpack_require__(85622);
const config = snykConfig.loadConfig(path.join(__dirname, '..'));
module.exports = config;
//# sourceMappingURL=config.js.map
/***/ }),
/***/ 1187:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.debug = void 0;
const debugModule = __webpack_require__(15158);
// To enable debugging output, use `snyk -d`
function debug(s) {
if (process.env.DEBUG) {
debugModule.enable(process.env.DEBUG);
}
return debugModule(`snyk-java-call-graph-builder`)(s);
}
exports.debug = debug;
//# sourceMappingURL=debug.js.map
/***/ }),
/***/ 38133:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.MalformedModulesSpecError = exports.SubprocessError = exports.SubprocessTimeoutError = exports.MissingTargetFolderError = exports.EmptyClassPathError = exports.ClassPathGenerationError = exports.CallGraphGenerationError = void 0;
class CallGraphGenerationError extends Error {
constructor(msg, innerError) {
super(msg);
Object.setPrototypeOf(this, CallGraphGenerationError.prototype);
this.innerError = innerError;
}
}
exports.CallGraphGenerationError = CallGraphGenerationError;
class ClassPathGenerationError extends Error {
constructor(innerError) {
super('Class path generation error');
this.userMessage = "Could not determine the project's class path. Please contact our support or submit an issue at https://github.com/snyk/java-call-graph-builder/issues. Re-running the command with the `-d` flag will provide useful information for the support engineers.";
Object.setPrototypeOf(this, ClassPathGenerationError.prototype);
this.innerError = innerError;
}
}
exports.ClassPathGenerationError = ClassPathGenerationError;
class EmptyClassPathError extends Error {
constructor(command) {
super(`The command "${command}" returned an empty class path`);
this.userMessage = 'The class path for the project is empty. Please contact our support or submit an issue at https://github.com/snyk/java-call-graph-builder/issues. Re-running the command with the `-d` flag will provide useful information for the support engineers.';
Object.setPrototypeOf(this, EmptyClassPathError.prototype);
}
}
exports.EmptyClassPathError = EmptyClassPathError;
class MissingTargetFolderError extends Error {
constructor(targetPath, packageManager) {
super(`Could not find the target folder starting in "${targetPath}"`);
this.errorMessagePerPackageManager = {
mvn: "Could not find the project's output directory. Please build your project and try again. " +
'The reachable vulnerabilities feature only supports the default Maven project layout, ' +
"where the output directory is named 'target'.",
gradle: "Could not find the project's target folder. Please compile your code and try again.",
};
Object.setPrototypeOf(this, MissingTargetFolderError.prototype);
this.userMessage = this.errorMessagePerPackageManager[packageManager];
}
}
exports.MissingTargetFolderError = MissingTargetFolderError;
class SubprocessTimeoutError extends Error {
constructor(command, args, timeout) {
super(`The command "${command} ${args}" timed out after ${timeout / 1000}s`);
this.userMessage = 'Scanning for reachable vulnerabilities took too long. Please use the --reachable-timeout flag to increase the timeout for finding reachable vulnerabilities.';
Object.setPrototypeOf(this, SubprocessTimeoutError.prototype);
}
}
exports.SubprocessTimeoutError = SubprocessTimeoutError;
class SubprocessError extends Error {
constructor(command, args, exitCode, stdError) {
super(`The command "${command} ${args}" exited with code ${exitCode}${stdError ? ', Standard Error Output: ' + stdError : ''}`);
Object.setPrototypeOf(this, SubprocessError.prototype);
}
}
exports.SubprocessError = SubprocessError;
class MalformedModulesSpecError extends Error {
constructor(modulesXml) {
super(`Malformed modules XML: ${modulesXml}`);
Object.setPrototypeOf(this, MalformedModulesSpecError.prototype);
}
}
exports.MalformedModulesSpecError = MalformedModulesSpecError;
//# sourceMappingURL=errors.js.map
/***/ }),
/***/ 6026:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.fetch = exports.JAR_NAME = void 0;
const tslib_1 = __webpack_require__(70655);
const fs = __webpack_require__(35747);
const path = __webpack_require__(85622);
const needle = __webpack_require__(64484);
const ciInfo = __webpack_require__(25943);
const ProgressBar = __webpack_require__(15157);
const tempDir = __webpack_require__(21661);
const crypto = __webpack_require__(76417);
const debug_1 = __webpack_require__(1187);
const metrics = __webpack_require__(17040);
const promisifedFs = __webpack_require__(12959);
exports.JAR_NAME = 'java-call-graph-generator.jar';
const LOCAL_PATH = path.join(tempDir, 'call-graph-generator', exports.JAR_NAME);
function createProgressBar(total, name) {
return new ProgressBar(`downloading ${name} [:bar] :rate/Kbps :percent :etas remaining`, {
complete: '=',
incomplete: '.',
width: 20,
total: total / 1000,
clear: true,
});
}
function downloadAnalyzer(url, localPath, expectedChecksum) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => {
const fsStream = fs.createWriteStream(localPath + '.part');
try {
let progressBar;
debug_1.debug(`fetching java graph generator from ${url}`);
const req = needle.get(url);
let matchChecksum;
let hasError = false;
// TODO: Try pump (https://www.npmjs.com/package/pump) for more organised flow
req
.on('response', (res) => tslib_1.__awaiter(this, void 0, void 0, function* () {
if (res.statusCode >= 400) {
const err = new Error('Bad HTTP response for snyk-call-graph-generator download');
// TODO: add custom error for status code => err.statusCode = res.statusCode;
fsStream.destroy();
hasError = true;
return reject(err);
}
matchChecksum = verifyChecksum(req, expectedChecksum);
debug_1.debug(`downloading ${exports.JAR_NAME} ...`);
if (!ciInfo.isCI) {
const total = parseInt(res.headers['content-length'], 10);
progressBar = createProgressBar(total, exports.JAR_NAME);
}
}))
.on('data', (chunk) => {
if (progressBar) {
progressBar.tick(chunk.length / 1000);
}
})
.on('error', (err) => {
return reject(err);
})
.pipe(fsStream)
.on('error', (err) => {
fsStream.destroy();
return reject(err);
})
.on('finish', () => tslib_1.__awaiter(this, void 0, void 0, function* () {
if (hasError) {
yield promisifedFs.unlink(localPath + '.part');
}
else {
if (!(yield matchChecksum)) {
return reject(new Error('Wrong checksum of downloaded call-graph-generator.'));
}
yield promisifedFs.rename(localPath + '.part', localPath);
resolve(localPath);
}
}));
}
catch (err) {
reject(err);
}
});
});
}
function verifyChecksum(localPathStream, expectedChecksum) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => {
const hash = crypto.createHash('sha256');
localPathStream
.on('error', reject)
.on('data', (chunk) => {
hash.update(chunk);
})
.on('end', () => {
resolve(hash.digest('hex') === expectedChecksum);
});
});
});
}
function fetch(url, expectedChecksum) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const localPath = LOCAL_PATH;
if (yield promisifedFs.exists(localPath)) {
if (yield verifyChecksum(fs.createReadStream(localPath), expectedChecksum)) {
return localPath;
}
debug_1.debug(`new version of ${exports.JAR_NAME} available`);
}
if (!(yield promisifedFs.exists(path.dirname(localPath)))) {
yield promisifedFs.mkdir(path.dirname(localPath));
}
return yield metrics.timeIt('fetchCallGraphBuilder', () => downloadAnalyzer(url, localPath, expectedChecksum));
});
}
exports.fetch = fetch;
//# sourceMappingURL=fetch-snyk-java-call-graph-generator.js.map
/***/ }),
/***/ 85562:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getClassPathFromGradle = exports.getGradleCommandArgs = void 0;
const tslib_1 = __webpack_require__(70655);
__webpack_require__(20406);
const sub_process_1 = __webpack_require__(28219);
const path = __webpack_require__(85622);
const os_1 = __webpack_require__(12087);
const errors_1 = __webpack_require__(38133);
const fs = __webpack_require__(35747);
const tmp = __webpack_require__(21086);
function getGradleCommandArgs(targetPath, initScript, confAttrs) {
// For binary releases, the original file would be in the binary build and inaccesible
const originalPath = path.join(__dirname, ...'../bin/init.gradle'.split('/'));
const tmpFilePath = tmp.fileSync().name;
fs.copyFileSync(originalPath, tmpFilePath);
const gradleArgs = ['printClasspath', '-I', tmpFilePath, '-q'];
if (targetPath) {
gradleArgs.push('-p', targetPath);
}
if (initScript) {
gradleArgs.push('--init-script', initScript);
}
if (confAttrs) {
const isWin = /^win/.test(os_1.platform());
const quot = isWin ? '"' : "'";
gradleArgs.push(`-PconfAttrs=${quot}${confAttrs}${quot}`);
}
return gradleArgs;
}
exports.getGradleCommandArgs = getGradleCommandArgs;
function getClassPathFromGradle(targetPath, gradlePath, initScript, confAttrs) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const args = getGradleCommandArgs(targetPath, initScript, confAttrs);
try {
const output = yield sub_process_1.execute(gradlePath, args, { cwd: targetPath });
const lines = output.trim().split(os_1.EOL);
const lastLine = lines[lines.length - 1];
return lastLine.trim();
}
catch (e) {
console.log(e);
throw new errors_1.ClassPathGenerationError(e);
}
});
}
exports.getClassPathFromGradle = getClassPathFromGradle;
//# sourceMappingURL=gradle-wrapper.js.map
/***/ }),
/***/ 16623:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.findBuildDirs = exports.runtimeMetrics = exports.getCallGraphGradle = exports.getCallGraphMvn = exports.getCallGraphMvnLegacy = void 0;
const tslib_1 = __webpack_require__(70655);
__webpack_require__(20406);
const mvn_wrapper_legacy_1 = __webpack_require__(49505);
const gradle_wrapper_1 = __webpack_require__(85562);
const java_wrapper_1 = __webpack_require__(53042);
const metrics_1 = __webpack_require__(17040);
const errors_1 = __webpack_require__(38133);
const promisified_fs_glob_1 = __webpack_require__(12959);
const path = __webpack_require__(85622);
const mvn_wrapper_1 = __webpack_require__(71418);
const debug_1 = __webpack_require__(1187);
const tmp = __webpack_require__(21086);
tmp.setGracefulCleanup();
function getCallGraphMvnLegacy(targetPath, timeout, customMavenArgs) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
try {
const [classPath, targets] = yield Promise.all([
metrics_1.timeIt('getMvnClassPath', () => mvn_wrapper_legacy_1.getClassPathFromMvn(targetPath, customMavenArgs)),
metrics_1.timeIt('getEntrypoints', () => findBuildDirs(targetPath, 'mvn')),
]);
return yield metrics_1.timeIt('getCallGraph', () => java_wrapper_1.getCallGraph(classPath, targetPath, targets, timeout));
}
catch (e) {
throw new errors_1.CallGraphGenerationError(e.userMessage ||
'Failed to scan for reachable vulnerabilities. Please contact our support or submit an issue at https://github.com/snyk/java-call-graph-builder/issues. Re-running the command with the `-d` flag will provide useful information for the support engineers.', e);
}
});
}
exports.getCallGraphMvnLegacy = getCallGraphMvnLegacy;
function getCallGraphMvn(targetPath, timeout, customMavenArgs) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
try {
const project = yield mvn_wrapper_1.makeMavenProject(targetPath, customMavenArgs);
const classPath = project.getClassPath();
const buildDirectories = yield Promise.all(project.modules.map((m) => m.buildDirectory));
return yield metrics_1.timeIt('getCallGraph', () => java_wrapper_1.getCallGraph(classPath, targetPath, buildDirectories, timeout));
}
catch (e) {
debug_1.debug(`Failed to get the call graph for the Maven project in: ${targetPath}. ' +
'Falling back to the legacy method.`);
return getCallGraphMvnLegacy(targetPath, timeout, customMavenArgs);
}
});
}
exports.getCallGraphMvn = getCallGraphMvn;
function getCallGraphGradle(targetPath, gradlePath = 'gradle', initScript, confAttrs, timeout) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const [classPath, targets] = yield Promise.all([
metrics_1.timeIt('getGradleClassPath', () => gradle_wrapper_1.getClassPathFromGradle(targetPath, gradlePath, initScript, confAttrs)),
metrics_1.timeIt('getEntrypoints', () => findBuildDirs(targetPath, 'gradle')),
]);
debug_1.debug(`got class path: ${classPath}`);
debug_1.debug(`got targets: ${targets}`);
return yield metrics_1.timeIt('getCallGraph', () => java_wrapper_1.getCallGraph(classPath, targetPath, targets, timeout));
});
}
exports.getCallGraphGradle = getCallGraphGradle;
function runtimeMetrics() {
return metrics_1.getMetrics();
}
exports.runtimeMetrics = runtimeMetrics;
function findBuildDirs(targetPath, packageManager) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const targetFoldersByPackageManager = {
mvn: 'target',
gradle: 'build',
};
const targetDirs = yield promisified_fs_glob_1.glob(path.join(targetPath, `**/${targetFoldersByPackageManager[packageManager]}`));
if (!targetDirs.length) {
throw new errors_1.MissingTargetFolderError(targetPath, packageManager);
}
return targetDirs;
});
}
exports.findBuildDirs = findBuildDirs;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 53042:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getCallGraph = exports.getClassPerJarMapping = exports.getCallGraphGenCommandArgs = void 0;
const tslib_1 = __webpack_require__(70655);
__webpack_require__(20406);
const jszip = __webpack_require__(66085);
const path = __webpack_require__(85622);
const config = __webpack_require__(60725);
const sub_process_1 = __webpack_require__(28219);
const fetch_snyk_java_call_graph_generator_1 = __webpack_require__(6026);
const call_graph_1 = __webpack_require__(65619);
const promisifedFs = __webpack_require__(12959);
const promisified_fs_glob_1 = __webpack_require__(12959);
const class_parsing_1 = __webpack_require__(64417);
const metrics_1 = __webpack_require__(17040);
const tempDir = __webpack_require__(21661);
function getCallGraphGenCommandArgs(classPath, jarPath, targets) {
return [
'-cp',
jarPath,
'io.snyk.callgraph.app.App',
'--application-classpath-file',
classPath,
'--dirs-to-get-entrypoints',
targets.join(','),
];
}
exports.getCallGraphGenCommandArgs = getCallGraphGenCommandArgs;
function getClassPerJarMapping(classPath) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const classPerJarMapping = {};
for (const classPathItem of classPath.split(path.delimiter)) {
// classpath can also contain local directories with classes - we don't need them for package mapping
if (!classPathItem.endsWith('.jar')) {
continue;
}
const jarFileContent = yield promisified_fs_glob_1.readFile(classPathItem);
const jarContent = yield jszip.loadAsync(jarFileContent);
for (const classFile of Object.keys(jarContent.files).filter((name) => name.endsWith('.class'))) {
const className = class_parsing_1.toFQclassName(classFile.replace('.class', '')); // removing .class from name
classPerJarMapping[className] = classPathItem;
}
}
return classPerJarMapping;
});
}
exports.getClassPerJarMapping = getClassPerJarMapping;
function getCallGraph(classPath, targetPath, targets, timeout) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const [jarPath, { tmpDir, classPathFile }] = yield Promise.all([
fetch_snyk_java_call_graph_generator_1.fetch(config.CALL_GRAPH_GENERATOR_URL, config.CALL_GRAPH_GENERATOR_CHECKSUM),
writeClassPathToTempDir(classPath),
]);
const callgraphGenCommandArgs = getCallGraphGenCommandArgs(classPathFile, jarPath, targets);
try {
const [javaOutput, classPerJarMapping] = yield Promise.all([
metrics_1.timeIt('generateCallGraph', () => sub_process_1.execute('java', callgraphGenCommandArgs, {
cwd: targetPath,
timeout,
})),
metrics_1.timeIt('mapClassesPerJar', () => getClassPerJarMapping(classPath)),
]);
return call_graph_1.buildCallGraph(javaOutput, classPerJarMapping);
}
finally {
// Fire and forget - we don't have to wait for a deletion of a temporary file
cleanupTempDir(classPathFile, tmpDir);
}
});
}
exports.getCallGraph = getCallGraph;
function writeClassPathToTempDir(classPath) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const tmpDir = yield promisifedFs.mkdtemp(path.join(tempDir, 'call-graph-generator'));
const classPathFile = path.join(tmpDir, 'callgraph-classpath');
yield promisifedFs.writeFile(classPathFile, classPath);
return { tmpDir, classPathFile };
});
}
function cleanupTempDir(classPathFile, tmpDir) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
try {
yield promisifedFs.unlink(classPathFile);
yield promisifedFs.rmdir(tmpDir);
}
catch (_a) {
// we couldn't delete temporary data in temporary folder, no big deal
}
});
}
//# sourceMappingURL=java-wrapper.js.map
/***/ }),
/***/ 17040:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getMetrics = exports.timeIt = void 0;
const tslib_1 = __webpack_require__(70655);
const metricsState = {
getEntrypoints: { seconds: 0, nanoseconds: 0 },
generateCallGraph: { seconds: 0, nanoseconds: 0 },
mapClassesPerJar: { seconds: 0, nanoseconds: 0 },
getCallGraph: { seconds: 0, nanoseconds: 0 },
};
function start(metric) {
const [secs, nsecs] = process.hrtime();
metricsState[metric] = { seconds: secs, nanoseconds: nsecs };
}
function stop(metric) {
const { seconds, nanoseconds } = metricsState[metric] || {
seconds: 0,
nanoseconds: 0,
};
const [secs, nsecs] = process.hrtime([seconds, nanoseconds]);
metricsState[metric] = { seconds: secs, nanoseconds: nsecs };
}
function getMetrics() {
const metrics = {};
for (const [metric, value] of Object.entries(metricsState)) {
if (!value) {
continue;
}
const { seconds, nanoseconds } = value;
metrics[metric] = seconds + nanoseconds / 1e9;
}
return metrics;
}
exports.getMetrics = getMetrics;
function timeIt(metric, fn) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
start(metric);
const x = yield fn();
stop(metric);
return x;
});
}
exports.timeIt = timeIt;
//# sourceMappingURL=metrics.js.map
/***/ }),
/***/ 49505:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getClassPathFromMvn = exports.mergeMvnClassPaths = exports.parseMvnExecCommandOutput = exports.parseMvnDependencyPluginCommandOutput = exports.getMvnCommandArgsForMvnExec = void 0;
const tslib_1 = __webpack_require__(70655);
__webpack_require__(20406);
const sub_process_1 = __webpack_require__(28219);
const errors_1 = __webpack_require__(38133);
const path = __webpack_require__(85622);
const os = __webpack_require__(12087);
function getMvnCommandArgsForMvnExec(targetPath) {
return process.platform === 'win32'
? [
'-q',
'exec:exec',
'-Dexec.classpathScope="compile"',
'-Dexec.executable="cmd"',
'-Dexec.args="/c echo %classpath"',
'-f',
targetPath,
]
: [
'-q',
'exec:exec',
'-Dexec.classpathScope="compile"',
'-Dexec.executable="echo"',
'-Dexec.args="%classpath"',
'-f',
targetPath,
];
}
exports.getMvnCommandArgsForMvnExec = getMvnCommandArgsForMvnExec;
function getMvnCommandArgsForDependencyPlugin(targetPath) {
return ['dependency:build-classpath', '-f', targetPath];
}
function parseMvnDependencyPluginCommandOutput(mvnCommandOutput) {
const outputLines = mvnCommandOutput.split(os.EOL);
const uniqueClassPaths = new Set();
let startIndex = 0;
let i = outputLines.indexOf('[INFO] Dependencies classpath:', startIndex);
while (i > -1) {
if (outputLines[i + 1] !== '') {
uniqueClassPaths.add(outputLines[i + 1]);
}
startIndex = i + 2;
i = outputLines.indexOf('[INFO] Dependencies classpath:', startIndex);
}
return Array.from(uniqueClassPaths.values()).sort();
}
exports.parseMvnDependencyPluginCommandOutput = parseMvnDependencyPluginCommandOutput;
function parseMvnExecCommandOutput(mvnCommandOutput) {
return mvnCommandOutput
.trim()
.split(os.EOL)
.sort();
}
exports.parseMvnExecCommandOutput = parseMvnExecCommandOutput;
function mergeMvnClassPaths(classPaths) {
// this magic joins all items in array with :, splits result by : again
// makes Set (to uniq items), create Array from it and join it by : to have
// proper path like format
return Array.from(new Set(classPaths.join(path.delimiter).split(path.delimiter)))
.sort()
.join(path.delimiter);
}
exports.mergeMvnClassPaths = mergeMvnClassPaths;
function getClassPathFromMvn(targetPath, customMavenArgs = []) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
let classPaths = [];
let args = [];
try {
try {
// there are two ways of getting classpath - either from maven plugin or by exec command
// try `mvn exec` for classpath
args = getMvnCommandArgsForMvnExec(targetPath).concat(customMavenArgs);
const output = yield sub_process_1.execute('mvn', args, { cwd: targetPath });
classPaths = parseMvnExecCommandOutput(output);
}
catch (e) {
// if it fails, try mvn dependency:build-classpath
// TODO send error message for further analysis
args = getMvnCommandArgsForDependencyPlugin(targetPath).concat(customMavenArgs);
const output = yield sub_process_1.execute('mvn', args, { cwd: targetPath });
classPaths = parseMvnDependencyPluginCommandOutput(output);
}
}
catch (e) {
throw new errors_1.ClassPathGenerationError(e);
}
if (classPaths.length === 0) {
throw new errors_1.EmptyClassPathError(`mvn ${args.join(' ')}`);
}
return mergeMvnClassPaths(classPaths);
});
}
exports.getClassPathFromMvn = getClassPathFromMvn;
//# sourceMappingURL=mvn-wrapper-legacy.js.map
/***/ }),
/***/ 71418:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.makeMavenProject = exports.makeMavenModule = exports.MavenProject = exports.MavenModule = exports.parseModuleNames = exports.getDepsClassPath = exports.getOutputDir = exports.getBuildDir = exports.withOutputToTemporaryFile = void 0;
const tslib_1 = __webpack_require__(70655);
__webpack_require__(20406);
const path = __webpack_require__(85622);
const fs = __webpack_require__(35747);
const xmlJs = __webpack_require__(7888);
const classpath_1 = __webpack_require__(35034);
const tmp = __webpack_require__(21086);
const sub_process_1 = __webpack_require__(28219);
const errors_1 = __webpack_require__(38133);
const metrics_1 = __webpack_require__(17040);
const debug_1 = __webpack_require__(1187);
// Low level helper functions
function withOutputToTemporaryFile(f) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
// NOTE(alexmu): We have to do this little dance with output written to files
// because that seems to be the only way to get the output without having to
// parse maven logs
const file = tmp.fileSync({ discardDescriptor: true });
try {
yield f(file.name);
}
catch (e) {
debug_1.debug(`Failed to execute command with temporary file: ${e}`);
throw e;
}
try {
return fs.readFileSync(file.name, 'utf8');
}
catch (e) {
debug_1.debug(`Failed to read temporary file: ${e}`);
throw e;
}
});
}
exports.withOutputToTemporaryFile = withOutputToTemporaryFile;
function runCommand(projectDirectory, args) {
return sub_process_1.execute('mvn', args.concat(['-f', projectDirectory]), {
cwd: projectDirectory,
});
}
// Domain specific helpers
function evaluateExpression(projectDirectory, expression, customMavenArgs = []) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
return yield withOutputToTemporaryFile((outputFile) => tslib_1.__awaiter(this, void 0, void 0, function* () {
yield runCommand(projectDirectory, [
'help:evaluate',
`-Dexpression="${expression}"`,
`-Doutput=${outputFile}`,
...customMavenArgs,
]);
}));
});
}
function getBuildDir(baseDir, customMavenArgs) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
return yield evaluateExpression(baseDir, 'project.build.directory', customMavenArgs);
});
}
exports.getBuildDir = getBuildDir;
function getOutputDir(baseDir, customMavenArgs) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
return yield evaluateExpression(baseDir, 'project.build.outputDirectory', customMavenArgs);
});
}
exports.getOutputDir = getOutputDir;
function getDepsClassPath(baseDir, customMavenArgs = []) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const classPath = yield withOutputToTemporaryFile((outputFile) => tslib_1.__awaiter(this, void 0, void 0, function* () {
yield runCommand(baseDir, [
'dependency:build-classpath',
`-Dmdep.outputFile=${outputFile}`,
...customMavenArgs,
]);
}));
return new classpath_1.ClassPath(classPath);
});
}
exports.getDepsClassPath = getDepsClassPath;
function parseModuleNames(modulesXml) {
const modulesSpec = xmlJs.xml2js(modulesXml, { compact: true });
if ('strings' in modulesSpec && 'string' in modulesSpec['strings']) {
debug_1.debug(`Found 'strings' in the modules XML`);
return modulesSpec['strings']['string'].map((s) => s['_text']);
}
else if ('modules' in modulesSpec) {
debug_1.debug(`Empty modules XML`);
return [];
}
else {
throw new errors_1.MalformedModulesSpecError(modulesXml);
}
}
exports.parseModuleNames = parseModuleNames;
// Maven model
class MavenModule {
constructor(baseDir, buildDirectory, outputDirectory, dependenciesClassPath) {
if ((buildDirectory === null || buildDirectory === void 0 ? void 0 : buildDirectory.length) === 0) {
throw new Error(`Empty build directory for the project in: ${baseDir}`);
}
if ((outputDirectory === null || outputDirectory === void 0 ? void 0 : outputDirectory.length) === 0) {
throw new Error(`Empty output directory for the project in: ${baseDir}`);
}
if (dependenciesClassPath === null || dependenciesClassPath === void 0 ? void 0 : dependenciesClassPath.isEmpty()) {
throw new Error(`Empty dependencies for the project in: ${baseDir}`);
}
this.baseDirectory = baseDir;
this.buildDirectory = buildDirectory;
this.outputDirectory = outputDirectory;
this.dependenciesClassPath = dependenciesClassPath;
}
getClassPath() {
debug_1.debug(`Dependencies class path: ${this.dependenciesClassPath}`);
debug_1.debug(`Output directory: ${this.outputDirectory}`);
return this.dependenciesClassPath.concat(new classpath_1.ClassPath(this.outputDirectory));
}
}
exports.MavenModule = MavenModule;
class MavenProject {
constructor(baseDir, modules) {
if ((modules === null || modules === void 0 ? void 0 : modules.length) === 0) {
throw new Error(`Empty module list for the project in: ${baseDir}`);
}
this.baseDir = baseDir;
this.modules = modules;
}
getClassPath() {
const classPaths = this.modules.map((module) => module.getClassPath());
const cp = classPaths.reduce((cp1, cp2) => cp1.concat(cp2));
debug_1.debug(`Project class path: ${cp}`);
return cp.toString();
}
}
exports.MavenProject = MavenProject;
// Factories that deal with the low level details
function makeMavenModule(baseDir, args) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const buildDir = yield getBuildDir(baseDir, args);
const outputDir = yield getOutputDir(baseDir, args);
const depsClassPath = yield metrics_1.timeIt('getMvnClassPath', () => tslib_1.__awaiter(this, void 0, void 0, function* () {
return yield getDepsClassPath(baseDir, args);
}));
return new MavenModule(baseDir, buildDir, outputDir, depsClassPath);
});
}
exports.makeMavenModule = makeMavenModule;
function makeMavenProject(baseDir, customMavenArgs) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const modulesXml = yield evaluateExpression(baseDir, 'project.modules', customMavenArgs);
const moduleNames = parseModuleNames(modulesXml);
const modules = [yield makeMavenModule(baseDir, customMavenArgs)];
const submodules = yield Promise.all(moduleNames.map((name) => makeMavenModule(path.join(baseDir, name))));
modules.push(...submodules);
const validModules = modules.filter((module) => fs.existsSync(module.buildDirectory));
return new MavenProject(baseDir, validModules);
});
}
exports.makeMavenProject = makeMavenProject;
//# sourceMappingURL=mvn-wrapper.js.map
/***/ }),
/***/ 12959:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.glob = exports.rmdir = exports.writeFile = exports.readFile = exports.mkdtemp = exports.mkdir = exports.unlink = exports.rename = exports.exists = void 0;
const util_1 = __webpack_require__(31669);
const fs = __webpack_require__(35747);
const globOrig = __webpack_require__(12884);
exports.exists = util_1.promisify(fs.exists);
exports.rename = util_1.promisify(fs.rename);
exports.unlink = util_1.promisify(fs.unlink);
exports.mkdir = util_1.promisify(fs.mkdir);
exports.mkdtemp = util_1.promisify(fs.mkdtemp);
exports.readFile = util_1.promisify(fs.readFile);
exports.writeFile = util_1.promisify(fs.writeFile);
exports.rmdir = util_1.promisify(fs.rmdir);
exports.glob = util_1.promisify(globOrig);
//# sourceMappingURL=promisified-fs-glob.js.map
/***/ }),
/***/ 28219:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.execute = void 0;
const childProcess = __webpack_require__(63129);
const debug_1 = __webpack_require__(1187);
const errors_1 = __webpack_require__(38133);
function execute(command, args, options) {
const spawnOptions = { shell: true };
if (options && options.cwd) {
spawnOptions.cwd = options.cwd;
}
return new Promise((resolve, reject) => {
let stdout = '';
let stderr = '';
debug_1.debug(`executing command: "${command} ${args.join(' ')}"`);
const proc = childProcess.spawn(command, args, spawnOptions);
let timerId = null;
if (options === null || options === void 0 ? void 0 : options.timeout) {
timerId = setTimeout(() => {
proc.kill();
const err = new errors_1.SubprocessTimeoutError(command, args.join(' '), options.timeout || 0);
debug_1.debug(err.message);
reject(err);
}, options.timeout);
}
proc.stdout.on('data', (data) => {
stdout = stdout + data;
});
proc.stderr.on('data', (data) => {
stderr = stderr + data;
});
proc.on('close', (code) => {
if (timerId !== null) {
clearTimeout(timerId);
}
if (code !== 0) {
const trimmedStackTrace = stderr
.replace(/\t/g, '')
.split('\n')
.slice(0, 5)
.join(', ');
const err = new errors_1.SubprocessError(command, args.join(' '), code, trimmedStackTrace);
debug_1.debug(err.message);
return reject(err);
}
resolve(stdout);
});
});
}
exports.execute = execute;
//# sourceMappingURL=sub-process.js.map
/***/ }),
/***/ 25943:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
var vendors = __webpack_require__(38184)
var env = process.env
// Used for testing only
Object.defineProperty(exports, "_vendors", ({
value: vendors.map(function (v) { return v.constant })
}))
exports.name = null
exports.isPR = null
vendors.forEach(function (vendor) {
var envs = Array.isArray(vendor.env) ? vendor.env : [vendor.env]
var isCI = envs.every(function (obj) {
return checkEnv(obj)
})
exports[vendor.constant] = isCI
if (isCI) {
exports.name = vendor.name
switch (typeof vendor.pr) {
case 'string':
// "pr": "CIRRUS_PR"
exports.isPR = !!env[vendor.pr]
break
case 'object':
if ('env' in vendor.pr) {
// "pr": { "env": "BUILDKITE_PULL_REQUEST", "ne": "false" }
exports.isPR = vendor.pr.env in env && env[vendor.pr.env] !== vendor.pr.ne
} else if ('any' in vendor.pr) {
// "pr": { "any": ["ghprbPullId", "CHANGE_ID"] }
exports.isPR = vendor.pr.any.some(function (key) {
return !!env[key]
})
} else {
// "pr": { "DRONE_BUILD_EVENT": "pull_request" }
exports.isPR = checkEnv(vendor.pr)
}
break
default:
// PR detection not supported for this vendor
exports.isPR = null
}
}
})
exports.isCI = !!(
env.CI || // Travis CI, CircleCI, Cirrus CI, Gitlab CI, Appveyor, CodeShip, dsari
env.CONTINUOUS_INTEGRATION || // Travis CI, Cirrus CI
env.BUILD_NUMBER || // Jenkins, TeamCity
env.RUN_ID || // TaskCluster, dsari
exports.name ||
false
)
function checkEnv (obj) {
if (typeof obj === 'string') return !!env[obj]
return Object.keys(obj).every(function (k) {
return env[k] === obj[k]
})
}
/***/ }),
/***/ 39445:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const assert = __webpack_require__(42357)
const path = __webpack_require__(85622)
const fs = __webpack_require__(35747)
let glob = undefined
try {
glob = __webpack_require__(12884)
} catch (_err) {
// treat glob as optional.
}
const defaultGlobOpts = {
nosort: true,
silent: true
}
// for EMFILE handling
let timeout = 0
const isWindows = (process.platform === "win32")
const defaults = options => {
const methods = [
'unlink',
'chmod',
'stat',
'lstat',
'rmdir',
'readdir'
]
methods.forEach(m => {
options[m] = options[m] || fs[m]
m = m + 'Sync'
options[m] = options[m] || fs[m]
})
options.maxBusyTries = options.maxBusyTries || 3
options.emfileWait = options.emfileWait || 1000
if (options.glob === false) {
options.disableGlob = true
}
if (options.disableGlob !== true && glob === undefined) {
throw Error('glob dependency not found, set `options.disableGlob = true` if intentional')
}
options.disableGlob = options.disableGlob || false
options.glob = options.glob || defaultGlobOpts
}
const rimraf = (p, options, cb) => {
if (typeof options === 'function') {
cb = options
options = {}
}
assert(p, 'rimraf: missing path')
assert.equal(typeof p, 'string', 'rimraf: path should be a string')
assert.equal(typeof cb, 'function', 'rimraf: callback function required')
assert(options, 'rimraf: invalid options argument provided')
assert.equal(typeof options, 'object', 'rimraf: options should be object')
defaults(options)
let busyTries = 0
let errState = null
let n = 0
const next = (er) => {
errState = errState || er
if (--n === 0)
cb(errState)
}
const afterGlob = (er, results) => {
if (er)
return cb(er)
n = results.length
if (n === 0)
return cb()
results.forEach(p => {
const CB = (er) => {
if (er) {
if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
busyTries < options.maxBusyTries) {
busyTries ++
// try again, with the same exact callback as this one.
return setTimeout(() => rimraf_(p, options, CB), busyTries * 100)
}
// this one won't happen if graceful-fs is used.
if (er.code === "EMFILE" && timeout < options.emfileWait) {
return setTimeout(() => rimraf_(p, options, CB), timeout ++)
}
// already gone
if (er.code === "ENOENT") er = null
}
timeout = 0
next(er)
}
rimraf_(p, options, CB)
})
}
if (options.disableGlob || !glob.hasMagic(p))
return afterGlob(null, [p])
options.lstat(p, (er, stat) => {
if (!er)
return afterGlob(null, [p])
glob(p, options.glob, afterGlob)
})
}
// Two possible strategies.
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
//
// Both result in an extra syscall when you guess wrong. However, there
// are likely far more normal files in the world than directories. This
// is based on the assumption that a the average number of files per
// directory is >= 1.
//
// If anyone ever complains about this, then I guess the strategy could
// be made configurable somehow. But until then, YAGNI.
const rimraf_ = (p, options, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
// sunos lets the root user unlink directories, which is... weird.
// so we have to lstat here and make sure it's not a dir.
options.lstat(p, (er, st) => {
if (er && er.code === "ENOENT")
return cb(null)
// Windows can EPERM on stat. Life is suffering.
if (er && er.code === "EPERM" && isWindows)
fixWinEPERM(p, options, er, cb)
if (st && st.isDirectory())
return rmdir(p, options, er, cb)
options.unlink(p, er => {
if (er) {
if (er.code === "ENOENT")
return cb(null)
if (er.code === "EPERM")
return (isWindows)
? fixWinEPERM(p, options, er, cb)
: rmdir(p, options, er, cb)
if (er.code === "EISDIR")
return rmdir(p, options, er, cb)
}
return cb(er)
})
})
}
const fixWinEPERM = (p, options, er, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
options.chmod(p, 0o666, er2 => {
if (er2)
cb(er2.code === "ENOENT" ? null : er)
else
options.stat(p, (er3, stats) => {
if (er3)
cb(er3.code === "ENOENT" ? null : er)
else if (stats.isDirectory())
rmdir(p, options, er, cb)
else
options.unlink(p, cb)
})
})
}
const fixWinEPERMSync = (p, options, er) => {
assert(p)
assert(options)
try {
options.chmodSync(p, 0o666)
} catch (er2) {
if (er2.code === "ENOENT")
return
else
throw er
}
let stats
try {
stats = options.statSync(p)
} catch (er3) {
if (er3.code === "ENOENT")
return
else
throw er
}
if (stats.isDirectory())
rmdirSync(p, options, er)
else
options.unlinkSync(p)
}
const rmdir = (p, options, originalEr, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
// if we guessed wrong, and it's not a directory, then
// raise the original error.
options.rmdir(p, er => {
if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
rmkids(p, options, cb)
else if (er && er.code === "ENOTDIR")
cb(originalEr)
else
cb(er)
})
}
const rmkids = (p, options, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
options.readdir(p, (er, files) => {
if (er)
return cb(er)
let n = files.length
if (n === 0)
return options.rmdir(p, cb)
let errState
files.forEach(f => {
rimraf(path.join(p, f), options, er => {
if (errState)
return
if (er)
return cb(errState = er)
if (--n === 0)
options.rmdir(p, cb)
})
})
})
}
// this looks simpler, and is strictly *faster*, but will
// tie up the JavaScript thread and fail on excessively
// deep directory trees.
const rimrafSync = (p, options) => {
options = options || {}
defaults(options)
assert(p, 'rimraf: missing path')
assert.equal(typeof p, 'string', 'rimraf: path should be a string')
assert(options, 'rimraf: missing options')
assert.equal(typeof options, 'object', 'rimraf: options should be object')
let results
if (options.disableGlob || !glob.hasMagic(p)) {
results = [p]
} else {
try {
options.lstatSync(p)
results = [p]
} catch (er) {
results = glob.sync(p, options.glob)
}
}
if (!results.length)
return
for (let i = 0; i < results.length; i++) {
const p = results[i]
let st
try {
st = options.lstatSync(p)
} catch (er) {
if (er.code === "ENOENT")
return
// Windows can EPERM on stat. Life is suffering.
if (er.code === "EPERM" && isWindows)
fixWinEPERMSync(p, options, er)
}
try {
// sunos lets the root user unlink directories, which is... weird.
if (st && st.isDirectory())
rmdirSync(p, options, null)
else
options.unlinkSync(p)
} catch (er) {
if (er.code === "ENOENT")
return
if (er.code === "EPERM")
return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
if (er.code !== "EISDIR")
throw er
rmdirSync(p, options, er)
}
}
}
const rmdirSync = (p, options, originalEr) => {
assert(p)
assert(options)
try {
options.rmdirSync(p)
} catch (er) {
if (er.code === "ENOENT")
return
if (er.code === "ENOTDIR")
throw originalEr
if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")
rmkidsSync(p, options)
}
}
const rmkidsSync = (p, options) => {
assert(p)
assert(options)
options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
// We only end up here once we got ENOTEMPTY at least once, and
// at this point, we are guaranteed to have removed all the kids.
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
// try really hard to delete stuff on windows, because it has a
// PROFOUNDLY annoying habit of not closing handles promptly when
// files are deleted, resulting in spurious ENOTEMPTY errors.
const retries = isWindows ? 100 : 1
let i = 0
do {
let threw = true
try {
const ret = options.rmdirSync(p, options)
threw = false
return ret
} finally {
if (++i < retries && threw)
continue
}
} while (true)
}
module.exports = rimraf
rimraf.sync = rimrafSync
/***/ }),
/***/ 21086:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
/*!
* Tmp
*
* Copyright (c) 2011-2017 KARASZI Istvan
*
* MIT Licensed
*/
/*
* Module dependencies.
*/
const fs = __webpack_require__(35747);
const os = __webpack_require__(12087);
const path = __webpack_require__(85622);
const crypto = __webpack_require__(76417);
const _c = { fs: fs.constants, os: os.constants };
const rimraf = __webpack_require__(39445);
/*
* The working inner variables.
*/
const
// the random characters to choose from
RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
TEMPLATE_PATTERN = /XXXXXX/,
DEFAULT_TRIES = 3,
CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR),
// constants are off on the windows platform and will not match the actual errno codes
IS_WIN32 = os.platform() === 'win32',
EBADF = _c.EBADF || _c.os.errno.EBADF,
ENOENT = _c.ENOENT || _c.os.errno.ENOENT,
DIR_MODE = 0o700 /* 448 */,
FILE_MODE = 0o600 /* 384 */,
EXIT = 'exit',
// this will hold the objects need to be removed on exit
_removeObjects = [],
// API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback
FN_RMDIR_SYNC = fs.rmdirSync.bind(fs),
FN_RIMRAF_SYNC = rimraf.sync;
let
_gracefulCleanup = false;
/**
* Gets a temporary file name.
*
* @param {(Options|tmpNameCallback)} options options or callback
* @param {?tmpNameCallback} callback the callback function
*/
function tmpName(options, callback) {
const
args = _parseArguments(options, callback),
opts = args[0],
cb = args[1];
try {
_assertAndSanitizeOptions(opts);
} catch (err) {
return cb(err);
}
let tries = opts.tries;
(function _getUniqueName() {
try {
const name = _generateTmpName(opts);
// check whether the path exists then retry if needed
fs.stat(name, function (err) {
/* istanbul ignore else */
if (!err) {
/* istanbul ignore else */
if (tries-- > 0) return _getUniqueName();
return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name));
}
cb(null, name);
});
} catch (err) {
cb(err);
}
}());
}
/**
* Synchronous version of tmpName.
*
* @param {Object} options
* @returns {string} the generated random name
* @throws {Error} if the options are invalid or could not generate a filename
*/
function tmpNameSync(options) {
const
args = _parseArguments(options),
opts = args[0];
_assertAndSanitizeOptions(opts);
let tries = opts.tries;
do {
const name = _generateTmpName(opts);
try {
fs.statSync(name);
} catch (e) {
return name;
}
} while (tries-- > 0);
throw new Error('Could not get a unique tmp filename, max tries reached');
}
/**
* Creates and opens a temporary file.
*
* @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined
* @param {?fileCallback} callback
*/
function file(options, callback) {
const
args = _parseArguments(options, callback),
opts = args[0],
cb = args[1];
// gets a temporary filename
tmpName(opts, function _tmpNameCreated(err, name) {
/* istanbul ignore else */
if (err) return cb(err);
// create and open the file
fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) {
/* istanbu ignore else */
if (err) return cb(err);
if (opts.discardDescriptor) {
return fs.close(fd, function _discardCallback(possibleErr) {
// the chance of getting an error on close here is rather low and might occur in the most edgiest cases only
return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false));
});
} else {
// detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care
// about the descriptor
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false));
}
});
});
}
/**
* Synchronous version of file.
*
* @param {Options} options
* @returns {FileSyncObject} object consists of name, fd and removeCallback
* @throws {Error} if cannot create a file
*/
function fileSync(options) {
const
args = _parseArguments(options),
opts = args[0];
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
const name = tmpNameSync(opts);
var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE);
/* istanbul ignore else */
if (opts.discardDescriptor) {
fs.closeSync(fd);
fd = undefined;
}
return {
name: name,
fd: fd,
removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true)
};
}
/**
* Creates a temporary directory.
*
* @param {(Options|dirCallback)} options the options or the callback function
* @param {?dirCallback} callback
*/
function dir(options, callback) {
const
args = _parseArguments(options, callback),
opts = args[0],
cb = args[1];
// gets a temporary filename
tmpName(opts, function _tmpNameCreated(err, name) {
/* istanbul ignore else */
if (err) return cb(err);
// create the directory
fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) {
/* istanbul ignore else */
if (err) return cb(err);
cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false));
});
});
}
/**
* Synchronous version of dir.
*
* @param {Options} options
* @returns {DirSyncObject} object consists of name and removeCallback
* @throws {Error} if it cannot create a directory
*/
function dirSync(options) {
const
args = _parseArguments(options),
opts = args[0];
const name = tmpNameSync(opts);
fs.mkdirSync(name, opts.mode || DIR_MODE);
return {
name: name,
removeCallback: _prepareTmpDirRemoveCallback(name, opts, true)
};
}
/**
* Removes files asynchronously.
*
* @param {Object} fdPath
* @param {Function} next
* @private
*/
function _removeFileAsync(fdPath, next) {
const _handler = function (err) {
if (err && !_isENOENT(err)) {
// reraise any unanticipated error
return next(err);
}
next();
};
if (0 <= fdPath[0])
fs.close(fdPath[0], function () {
fs.unlink(fdPath[1], _handler);
});
else fs.unlink(fdPath[1], _handler);
}
/**
* Removes files synchronously.
*
* @param {Object} fdPath
* @private
*/
function _removeFileSync(fdPath) {
let rethrownException = null;
try {
if (0 <= fdPath[0]) fs.closeSync(fdPath[0]);
} catch (e) {
// reraise any unanticipated error
if (!_isEBADF(e) && !_isENOENT(e)) throw e;
} finally {
try {
fs.unlinkSync(fdPath[1]);
}
catch (e) {
// reraise any unanticipated error
if (!_isENOENT(e)) rethrownException = e;
}
}
if (rethrownException !== null) {
throw rethrownException;
}
}
/**
* Prepares the callback for removal of the temporary file.
*
* Returns either a sync callback or a async callback depending on whether
* fileSync or file was called, which is expressed by the sync parameter.
*
* @param {string} name the path of the file
* @param {number} fd file descriptor
* @param {Object} opts
* @param {boolean} sync
* @returns {fileCallback | fileCallbackSync}
* @private
*/
function _prepareTmpFileRemoveCallback(name, fd, opts, sync) {
const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync);
const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync);
if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
return sync ? removeCallbackSync : removeCallback;
}
/**
* Prepares the callback for removal of the temporary directory.
*
* Returns either a sync callback or a async callback depending on whether
* tmpFileSync or tmpFile was called, which is expressed by the sync parameter.
*
* @param {string} name
* @param {Object} opts
* @param {boolean} sync
* @returns {Function} the callback
* @private
*/
function _prepareTmpDirRemoveCallback(name, opts, sync) {
const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs);
const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC;
const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync);
const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync);
if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
return sync ? removeCallbackSync : removeCallback;
}
/**
* Creates a guarded function wrapping the removeFunction call.
*
* The cleanup callback is save to be called multiple times.
* Subsequent invocations will be ignored.
*
* @param {Function} removeFunction
* @param {string} fileOrDirName
* @param {boolean} sync
* @param {cleanupCallbackSync?} cleanupCallbackSync
* @returns {cleanupCallback | cleanupCallbackSync}
* @private
*/
function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) {
let called = false;
// if sync is true, the next parameter will be ignored
return function _cleanupCallback(next) {
/* istanbul ignore else */
if (!called) {
// remove cleanupCallback from cache
const toRemove = cleanupCallbackSync || _cleanupCallback;
const index = _removeObjects.indexOf(toRemove);
/* istanbul ignore else */
if (index >= 0) _removeObjects.splice(index, 1);
called = true;
if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) {
return removeFunction(fileOrDirName);
} else {
return removeFunction(fileOrDirName, next || function() {});
}
}
};
}
/**
* The garbage collector.
*
* @private
*/
function _garbageCollector() {
/* istanbul ignore else */
if (!_gracefulCleanup) return;
// the function being called removes itself from _removeObjects,
// loop until _removeObjects is empty
while (_removeObjects.length) {
try {
_removeObjects[0]();
} catch (e) {
// already removed?
}
}
}
/**
* Random name generator based on crypto.
* Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
*
* @param {number} howMany
* @returns {string} the generated random name
* @private
*/
function _randomChars(howMany) {
let
value = [],
rnd = null;
// make sure that we do not fail because we ran out of entropy
try {
rnd = crypto.randomBytes(howMany);
} catch (e) {
rnd = crypto.pseudoRandomBytes(howMany);
}
for (var i = 0; i < howMany; i++) {
value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]);
}
return value.join('');
}
/**
* Helper which determines whether a string s is blank, that is undefined, or empty or null.
*
* @private
* @param {string} s
* @returns {Boolean} true whether the string s is blank, false otherwise
*/
function _isBlank(s) {
return s === null || _isUndefined(s) || !s.trim();
}
/**
* Checks whether the `obj` parameter is defined or not.
*
* @param {Object} obj
* @returns {boolean} true if the object is undefined
* @private
*/
function _isUndefined(obj) {
return typeof obj === 'undefined';
}
/**
* Parses the function arguments.
*
* This function helps to have optional arguments.
*
* @param {(Options|null|undefined|Function)} options
* @param {?Function} callback
* @returns {Array} parsed arguments
* @private
*/
function _parseArguments(options, callback) {
/* istanbul ignore else */
if (typeof options === 'function') {
return [{}, options];
}
/* istanbul ignore else */
if (_isUndefined(options)) {
return [{}, callback];
}
// copy options so we do not leak the changes we make internally
const actualOptions = {};
for (const key of Object.getOwnPropertyNames(options)) {
actualOptions[key] = options[key];
}
return [actualOptions, callback];
}
/**
* Generates a new temporary name.
*
* @param {Object} opts
* @returns {string} the new random name according to opts
* @private
*/
function _generateTmpName(opts) {
const tmpDir = opts.tmpdir;
/* istanbul ignore else */
if (!_isUndefined(opts.name))
return path.join(tmpDir, opts.dir, opts.name);
/* istanbul ignore else */
if (!_isUndefined(opts.template))
return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6));
// prefix and postfix
const name = [
opts.prefix ? opts.prefix : 'tmp',
'-',
process.pid,
'-',
_randomChars(12),
opts.postfix ? '-' + opts.postfix : ''
].join('');
return path.join(tmpDir, opts.dir, name);
}
/**
* Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing
* options.
*
* @param {Options} options
* @private
*/
function _assertAndSanitizeOptions(options) {
options.tmpdir = _getTmpDir(options);
const tmpDir = options.tmpdir;
/* istanbul ignore else */
if (!_isUndefined(options.name))
_assertIsRelative(options.name, 'name', tmpDir);
/* istanbul ignore else */
if (!_isUndefined(options.dir))
_assertIsRelative(options.dir, 'dir', tmpDir);
/* istanbul ignore else */
if (!_isUndefined(options.template)) {
_assertIsRelative(options.template, 'template', tmpDir);
if (!options.template.match(TEMPLATE_PATTERN))
throw new Error(`Invalid template, found "${options.template}".`);
}
/* istanbul ignore else */
if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0)
throw new Error(`Invalid tries, found "${options.tries}".`);
// if a name was specified we will try once
options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1;
options.keep = !!options.keep;
options.detachDescriptor = !!options.detachDescriptor;
options.discardDescriptor = !!options.discardDescriptor;
options.unsafeCleanup = !!options.unsafeCleanup;
// sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to
options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir));
options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir));
// sanitize further if template is relative to options.dir
options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template);
// for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to
options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name);
options.prefix = _isUndefined(options.prefix) ? '' : options.prefix;
options.postfix = _isUndefined(options.postfix) ? '' : options.postfix;
}
/**
* Resolve the specified path name in respect to tmpDir.
*
* The specified name might include relative path components, e.g. ../
* so we need to resolve in order to be sure that is is located inside tmpDir
*
* @param name
* @param tmpDir
* @returns {string}
* @private
*/
function _resolvePath(name, tmpDir) {
const sanitizedName = _sanitizeName(name);
if (sanitizedName.startsWith(tmpDir)) {
return path.resolve(sanitizedName);
} else {
return path.resolve(path.join(tmpDir, sanitizedName));
}
}
/**
* Sanitize the specified path name by removing all quote characters.
*
* @param name
* @returns {string}
* @private
*/
function _sanitizeName(name) {
if (_isBlank(name)) {
return name;
}
return name.replace(/["']/g, '');
}
/**
* Asserts whether specified name is relative to the specified tmpDir.
*
* @param {string} name
* @param {string} option
* @param {string} tmpDir
* @throws {Error}
* @private
*/
function _assertIsRelative(name, option, tmpDir) {
if (option === 'name') {
// assert that name is not absolute and does not contain a path
if (path.isAbsolute(name))
throw new Error(`${option} option must not contain an absolute path, found "${name}".`);
// must not fail on valid . or .. or similar such constructs
let basename = path.basename(name);
if (basename === '..' || basename === '.' || basename !== name)
throw new Error(`${option} option must not contain a path, found "${name}".`);
}
else { // if (option === 'dir' || option === 'template') {
// assert that dir or template are relative to tmpDir
if (path.isAbsolute(name) && !name.startsWith(tmpDir)) {
throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`);
}
let resolvedPath = _resolvePath(name, tmpDir);
if (!resolvedPath.startsWith(tmpDir))
throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`);
}
}
/**
* Helper for testing against EBADF to compensate changes made to Node 7.x under Windows.
*
* @private
*/
function _isEBADF(error) {
return _isExpectedError(error, -EBADF, 'EBADF');
}
/**
* Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows.
*
* @private
*/
function _isENOENT(error) {
return _isExpectedError(error, -ENOENT, 'ENOENT');
}
/**
* Helper to determine whether the expected error code matches the actual code and errno,
* which will differ between the supported node versions.
*
* - Node >= 7.0:
* error.code {string}
* error.errno {number} any numerical value will be negated
*
* CAVEAT
*
* On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT
* is no different here.
*
* @param {SystemError} error
* @param {number} errno
* @param {string} code
* @private
*/
function _isExpectedError(error, errno, code) {
return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno;
}
/**
* Sets the graceful cleanup.
*
* If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the
* temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary
* object removals.
*/
function setGracefulCleanup() {
_gracefulCleanup = true;
}
/**
* Returns the currently configured tmp dir from os.tmpdir().
*
* @private
* @param {?Options} options
* @returns {string} the currently configured tmp dir
*/
function _getTmpDir(options) {
return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir()));
}
// Install process exit listener
process.addListener(EXIT, _garbageCollector);
/**
* Configuration options.
*
* @typedef {Object} Options
* @property {?boolean} keep the temporary object (file or dir) will not be garbage collected
* @property {?number} tries the number of tries before give up the name generation
* @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files
* @property {?string} template the "mkstemp" like filename template
* @property {?string} name fixed name relative to tmpdir or the specified dir option
* @property {?string} dir tmp directory relative to the root tmp directory in use
* @property {?string} prefix prefix for the generated name
* @property {?string} postfix postfix for the generated name
* @property {?string} tmpdir the root tmp directory which overrides the os tmpdir
* @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty
* @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection
* @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection
*/
/**
* @typedef {Object} FileSyncObject
* @property {string} name the name of the file
* @property {string} fd the file descriptor or -1 if the fd has been discarded
* @property {fileCallback} removeCallback the callback function to remove the file
*/
/**
* @typedef {Object} DirSyncObject
* @property {string} name the name of the directory
* @property {fileCallback} removeCallback the callback function to remove the directory
*/
/**
* @callback tmpNameCallback
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
*/
/**
* @callback fileCallback
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {number} fd the file descriptor or -1 if the fd had been discarded
* @param {cleanupCallback} fn the cleanup callback function
*/
/**
* @callback fileCallbackSync
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {number} fd the file descriptor or -1 if the fd had been discarded
* @param {cleanupCallbackSync} fn the cleanup callback function
*/
/**
* @callback dirCallback
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {cleanupCallback} fn the cleanup callback function
*/
/**
* @callback dirCallbackSync
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {cleanupCallbackSync} fn the cleanup callback function
*/
/**
* Removes the temporary created file or directory.
*
* @callback cleanupCallback
* @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed
*/
/**
* Removes the temporary created file or directory.
*
* @callback cleanupCallbackSync
*/
/**
* Callback function for function composition.
* @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
*
* @callback simpleCallback
*/
// exporting all the needed methods
// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will
// allow users to reconfigure the temporary directory
Object.defineProperty(module.exports, "tmpdir", ({
enumerable: true,
configurable: false,
get: function () {
return _getTmpDir();
}
}));
module.exports.dir = dir;
module.exports.dirSync = dirSync;
module.exports.file = file;
module.exports.fileSync = fileSync;
module.exports.tmpName = tmpName;
module.exports.tmpNameSync = tmpNameSync;
module.exports.setGracefulCleanup = setGracefulCleanup;
/***/ }),
/***/ 59468:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.buildDepGraphs = void 0;
const dep_graph_1 = __webpack_require__(71479);
const out_of_sync_error_1 = __webpack_require__(71757);
const is_dev_dependency_1 = __webpack_require__(92700);
const get_non_hex_version_1 = __webpack_require__(3749);
function buildDepGraphs(mixJsonResult, includeDev = false, strict = true, allProjects = false) {
const manifest = mixJsonResult === null || mixJsonResult === void 0 ? void 0 : mixJsonResult.manifest;
if (!manifest)
throw new Error('No manifest found');
const lock = mixJsonResult.lock[0];
if (!lock)
throw new Error('No lock file found');
const lockDepMap = Object.entries(lock).reduce((acc, [key, dep]) => {
const [packageManager, name, version, hash, , dependencies] = dep;
acc[key] = {
packageManager,
name,
version,
hash,
dependencies,
};
return acc;
}, {});
if (mixJsonResult.parent_umbrella_manifest) {
const umbrella = mixJsonResult.parent_umbrella_manifest;
const prefix = `${getManifestName(umbrella)}/${umbrella.apps_path}/`;
const name = `${umbrella.apps_path}/${getManifestName(manifest)}`;
return {
[name]: getDepGraph(prefix, manifest, includeDev, lockDepMap, strict),
};
}
const apps = allProjects || isEmpty(mixJsonResult.apps) ? {} : mixJsonResult.apps;
const projects = { root: manifest, ...apps };
const appsPrefix = `${getManifestName(manifest)}/${manifest.apps_path}/`;
return Object.entries(projects).reduce((acc, [key, manifest]) => {
const prefix = key === 'root' ? '' : appsPrefix;
acc[key] = getDepGraph(prefix, manifest, includeDev, lockDepMap, strict);
return acc;
}, {});
}
exports.buildDepGraphs = buildDepGraphs;
function isEmpty(obj) {
return !obj || Object.keys(obj).length === 0;
}
function getDepGraph(prefix, manifest, includeDev, lockDepMap, strict) {
const builder = new dep_graph_1.DepGraphBuilder({ name: 'hex' }, getRootPkg(prefix, manifest));
if (!manifest.deps)
return builder.build();
const transitivesQueue = [];
const deps = getTopLevelDeps(manifest);
for (const topLevelDep of deps) {
// eslint-disable-next-line prefer-const
let [depName, depVersionSpec, options] = topLevelDep;
if (typeof depVersionSpec === 'object') {
options = depVersionSpec;
depVersionSpec = undefined;
}
const isDev = is_dev_dependency_1.isDevDependency(options);
if (!includeDev && isDev)
continue;
const scope = isDev ? 'dev' : 'prod';
const parentNodeId = builder.rootNodeId;
let dep = lockDepMap[depName];
let labels;
if (!dep) {
const nonHexVersion = get_non_hex_version_1.getNonHexVersion(options);
if (!nonHexVersion && strict)
throw new out_of_sync_error_1.OutOfSyncError(depName);
labels = {
missingLockFileEntry: 'true',
...((nonHexVersion === null || nonHexVersion === void 0 ? void 0 : nonHexVersion.labels) || {}),
};
dep = { name: depName, version: (nonHexVersion === null || nonHexVersion === void 0 ? void 0 : nonHexVersion.title) || depVersionSpec };
}
transitivesQueue.push({ dep, parentNodeId, scope, labels });
}
while (transitivesQueue.length > 0) {
const { dep, parentNodeId, scope, labels } = transitivesQueue.shift();
const nodeId = addNode(dep.name, dep.version, scope, labels);
builder.connectDep(parentNodeId, nodeId);
if (!dep.dependencies)
continue;
for (const [childName, , childOptions] of dep.dependencies) {
const lockDep = lockDepMap[childName];
if (!lockDep && childOptions && !childOptions.optional)
throw new out_of_sync_error_1.OutOfSyncError(childName);
else if (!lockDep)
continue;
transitivesQueue.push({ parentNodeId: nodeId, dep: lockDep, scope });
}
}
return builder.build();
function addNode(name, version, scope, labels) {
const nodeInfo = {
labels: {
scope,
...(labels || {}),
},
};
const nodeId = `${name}@${version || ''}`;
builder.addPkgNode({ name, version }, nodeId, nodeInfo);
return nodeId;
}
}
function getRootPkg(prefix, manifest) {
const name = getManifestName(manifest);
return { name: `${prefix}${name}`, version: manifest.version || '0.0.0' };
}
function getManifestName(manifest) {
var _a;
return (manifest.app || ((_a = manifest.module_name) === null || _a === void 0 ? void 0 : _a.replace(/\.Mix\w{4,}$/, '').toLowerCase()) ||
'no_name');
}
function getTopLevelDeps(manifest) {
return Array.isArray(manifest.deps)
? manifest.deps
: Object.entries(manifest.deps).map(([key, value]) => Array.isArray(value) ? [key, ...value] : [key, value]);
}
//# sourceMappingURL=build-dep-graphs.js.map
/***/ }),
/***/ 3749:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getNonHexVersion = void 0;
const PATH_OPTION_PROPS = ['path', 'in_umbrella'];
const GIT_OPTION_PROPS = [
'git',
'github',
'ref',
'branch',
'tag',
'submodules',
'sparse',
];
function getNonHexVersion(options) {
if (hasPathOptionProps(options))
return generatePathDepVersion(options);
else if (hasGitOptionProps(options))
return generateGitDepVersion(options);
}
exports.getNonHexVersion = getNonHexVersion;
function hasPathOptionProps(options) {
return PATH_OPTION_PROPS.some((key) => options === null || options === void 0 ? void 0 : options[key]);
}
function generatePathDepVersion(options) {
if (options.in_umbrella)
return { title: 'in_umbrella', labels: {} };
else if (options.path)
return { title: 'path', labels: { path: options.path } };
}
function hasGitOptionProps(options) {
return GIT_OPTION_PROPS.some((key) => options === null || options === void 0 ? void 0 : options[key]);
}
function generateGitDepVersion(options) {
const gitAddress = options.github
? `https://github.com/${options.github}.git`
: options.git;
const ref = options.branch || options.tag || options.ref || 'HEAD';
const title = `${gitAddress}@${ref}`;
const labels = {};
for (const prop of GIT_OPTION_PROPS) {
if (!options[prop])
continue;
labels[prop] = options[prop];
}
return { title, labels };
}
//# sourceMappingURL=get-non-hex-version.js.map
/***/ }),
/***/ 72559:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var build_dep_graphs_1 = __webpack_require__(59468);
Object.defineProperty(exports, "buildDepGraphs", ({ enumerable: true, get: function () { return build_dep_graphs_1.buildDepGraphs; } }));
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 92700:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.isDevDependency = void 0;
/*
* A dependency can be a limited to more than one environment (i.e. :dev and :test)
* The logic to decide the scope (dev/prod):
* If the dependency is not whitelisted, or if :prod is in the `only` whitelist, we consider it as a prod dependency
* Otherwise it's a dev dependency.
* */
function isDevDependency(options) {
if (!options || !options.only)
return false;
const envs = Array.isArray(options.only) ? options.only : [options.only];
return envs.every((env) => env !== 'prod');
}
exports.isDevDependency = isDevDependency;
//# sourceMappingURL=is-dev-dependency.js.map
/***/ }),
/***/ 71757:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.OutOfSyncError = void 0;
class OutOfSyncError extends Error {
constructor(dependencyName) {
super(`Dependency ${dependencyName} was not found in ` +
`mix.lock. Your mix.exs and ` +
`mix.lock are probably out of sync. Please run ` +
`"mix deps.get" and try again.`);
this.name = 'OutOfSyncError';
this.dependencyName = dependencyName;
Error.captureStackTrace(this, OutOfSyncError);
}
}
exports.OutOfSyncError = OutOfSyncError;
//# sourceMappingURL=out-of-sync-error.js.map
/***/ }),
/***/ 78217:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const types_1 = __webpack_require__(45776);
const types_2 = __webpack_require__(98590);
/**
* Extract the values from a hash index, which is stored in a Hash DB page.
* The hash index contains key/value pairs. The key is usually some number
* which is not relevant to the application. This function returns only the
* values stored in the index.
* @param data A database page.
* @param entries How many entries are stored in the index.
*/
function bufferToHashIndexValues(page, entries) {
// Hash table entries are always stored in pairs of 2.
if (entries % 2 !== 0) {
const pageNumber = page.readUInt32LE(8);
throw new types_2.ParserError('The number of entries must be a multiple of 2', {
entries,
pageNumber,
});
}
// Every entry is a 2-byte offset that points somewhere in the current database page.
const hashIndexSize = entries * types_1.HASH_INDEX_ENTRY_SIZE;
const hashIndex = page.slice(types_1.DATABASE_PAGE_HEADER_SIZE, types_1.DATABASE_PAGE_HEADER_SIZE + hashIndexSize);
// We only want the values, not the keys. Data is stored in key/value pairs.
// The following skips over all keys and stores only the values. An entry is 2 bytes long.
const keyValuePairSize = 2 * types_1.HASH_INDEX_ENTRY_SIZE;
const hashIndexValues = hashIndex.reduce((values, _, byteOffset) => {
// Is the current byte the start of a value?
if ((byteOffset - types_1.HASH_INDEX_ENTRY_SIZE) % keyValuePairSize === 0) {
const value = hashIndex.readInt16LE(byteOffset);
values.push(value);
}
return values;
}, new Array());
return hashIndexValues;
}
exports.bufferToHashIndexValues = bufferToHashIndexValues;
//# sourceMappingURL=database-pages.js.map
/***/ }),
/***/ 85959:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const types_1 = __webpack_require__(45776);
const types_2 = __webpack_require__(98590);
/**
* Traverse the data (overflow) pages and extract the data.
* The data may be spread over multiple pages, so for every page we need to:
* 1. Strip the page header of every page.
* 2. Collate with data collected so far.
* 3. Make sure to read the right data offset if we reach the last page.
* The pages are not in order, so we may have to jump all across the BerkeleyDB file.
* This is why we also need a Buffer to the database contents.
* @param berkeleydb The contents of the database.
* @param page Which page to start looking from. This should be an Overflow page.
* @param pageStartOffset Which byte in the BerkeleyDB points to the start of the page.
* @param pageSizeBytes How big is every page (typically it would be 4096 bytes).
*/
function bufferToHashValueContent(berkeleydb, page, pageStartOffset, pageSizeBytes) {
// The byte offset that describes the page type is the same regardless of the page type.
// Note there may be 5 different page types of varying length, but we are interested only one.
const pageType = page.readUInt8(pageStartOffset);
if (pageType !== types_1.HashPageType.H_OFFPAGE) {
throw new types_2.ParserError('Unsupported page type', { pageType });
}
const startPageNumber = page.readUInt32LE(pageStartOffset + 4);
const dataLengthBytes = page.readUInt32LE(pageStartOffset + 8);
const result = Buffer.alloc(dataLengthBytes);
let bytesWritten = 0;
// Traverse the pages, using "nextPageNumber" in the page metadata to see if we've reached the end.
for (let currentPageNumber = startPageNumber; currentPageNumber !== 0;) {
const pageStart = pageSizeBytes * currentPageNumber;
const pageEnd = pageStart + pageSizeBytes;
const currentPage = berkeleydb.slice(pageStart, pageEnd);
const nextPageNumber = currentPage.readUInt32LE(16);
const freeAreaOffset = currentPage.readUInt16LE(22);
const isLastPage = nextPageNumber === 0;
const bytesToWrite = isLastPage
? // The last page points to where the data ends.
currentPage.slice(types_1.DATABASE_PAGE_HEADER_SIZE, freeAreaOffset)
: // Otherwise the whole page is filled with content.
currentPage.slice(types_1.DATABASE_PAGE_HEADER_SIZE, currentPage.length);
const byteOffset = bytesWritten;
result.set(bytesToWrite, byteOffset);
bytesWritten += bytesToWrite.length;
currentPageNumber = nextPageNumber;
}
return result;
}
exports.bufferToHashValueContent = bufferToHashValueContent;
//# sourceMappingURL=hash-pages.js.map
/***/ }),
/***/ 40218:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const event_loop_spinner_1 = __webpack_require__(77158);
const database_pages_1 = __webpack_require__(78217);
exports.bufferToHashIndexValues = database_pages_1.bufferToHashIndexValues;
const hash_pages_1 = __webpack_require__(85959);
exports.bufferToHashValueContent = hash_pages_1.bufferToHashValueContent;
const types_1 = __webpack_require__(45776);
const types_2 = __webpack_require__(98590);
const validPageSizes = [
512,
1024,
2048,
4096,
8192,
16384,
32768,
65536,
];
/**
* Extract the RPM package contents from a BerkeleyDB. Note that the contents
* are returned as a Buffer of data for every package and that they need to be further
* processed to extract meaningful package information.
*
* This implementation is interested in finding only data blobs of type Hash DB.
*
* The BerkeleyDB that we are interested in contains only the following pages:
* - A metadata page -- the first page (index 0) of the database.
* - A Hash DB page -- this page basically tells us where to find the data in the BerkeleyDB.
* - An Overflow page -- this page contains the data. The data may span multiple pages (hence "overflow" pages).
* @param data The contents of a BerkeleyDB database.
*/
async function bufferToHashDbValues(data) {
validateBerkeleyDbMetadata(data);
const pageSize = data.readUInt32LE(20);
validatePageSize(pageSize);
const lastPageNumber = data.readUInt32LE(32);
const result = new Array();
// The 0th index page is the database metadata page, so start from the 1st index page.
for (let pageNumber = 1; pageNumber < lastPageNumber; pageNumber++) {
const pageStart = pageNumber * pageSize;
const pageEnd = pageStart + pageSize;
const pageType = data[pageStart + 25];
// Look only for HASH pages, we will traverse their content in subsequent steps.
if (pageType !== types_1.DatabasePageType.P_HASH) {
continue;
}
const page = data.slice(pageStart, pageEnd);
const entries = page.readUInt16LE(20);
// Hash DB entries come in key/value pairs. We are only interested in the values.
const hashIndex = database_pages_1.bufferToHashIndexValues(page, entries);
for (const hashPage of hashIndex) {
const valuePageType = page[hashPage];
// Only Overflow pages contain package data, skip anything else.
if (valuePageType !== types_1.HashPageType.H_OFFPAGE) {
continue;
}
// Traverse the page to concatenate the data that may span multiple pages.
const valueContent = hash_pages_1.bufferToHashValueContent(data, page, hashPage, pageSize);
result.push(valueContent);
}
if (event_loop_spinner_1.eventLoopSpinner.isStarving()) {
await event_loop_spinner_1.eventLoopSpinner.spin();
}
}
return result;
}
exports.bufferToHashDbValues = bufferToHashDbValues;
/**
* Exported for testing
*/
function validateBerkeleyDbMetadata(data) {
// We are only interested in Hash DB. Other types are B-Tree, Queue, Heap, etc.
const magicNumber = data.readUInt32LE(12);
if (magicNumber !== types_1.MagicNumber.DB_HASH) {
throw new types_2.ParserError('Unexpected database magic number', { magicNumber });
}
// The first page of the database must be a Hash DB metadata page.
const pageType = data.readUInt8(25);
if (pageType !== types_1.DatabasePageType.P_HASHMETA) {
throw new types_2.ParserError('Unexpected page type', { pageType });
}
const encryptionAlgorithm = data.readUInt8(24);
if (encryptionAlgorithm !== 0) {
throw new types_2.ParserError('Encrypted databases are not supported', {
encryptionAlgorithm,
});
}
// We will be pre-allocating some memory for the entries in the database.
// Choose a very high, possibly unrealistic number, for the number of installed
// packages on the system. We don't want to allocate too much memory.
const entriesCount = data.readUInt32LE(88);
if (entriesCount < 0 || entriesCount > 50000) {
throw new types_2.ParserError('Invalid number of entries in the database', {
entriesCount,
});
}
}
exports.validateBerkeleyDbMetadata = validateBerkeleyDbMetadata;
/**
* Exported for testing
*/
function validatePageSize(pageSize) {
if (!validPageSizes.includes(pageSize)) {
throw new types_2.ParserError('Invalid page size', { pageSize });
}
}
exports.validatePageSize = validatePageSize;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 45776:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
/**
* Every entry in the index is a 2-byte offset that points somewhere in the current database page.
*/
exports.HASH_INDEX_ENTRY_SIZE = 2;
/**
* Every DB page has a 26 bytes header at the start of the page.
*/
exports.DATABASE_PAGE_HEADER_SIZE = 26;
/**
* Every BerkeleyDB will contain a magic number that additionally proves that
* the file is of a particular type.
*/
var MagicNumber;
(function (MagicNumber) {
MagicNumber[MagicNumber["DB_HASH"] = 398689] = "DB_HASH";
})(MagicNumber = exports.MagicNumber || (exports.MagicNumber = {}));
/**
* Every page in the database has a particular type.
* These are the only types we need for fully reading the list of packages.
*/
var DatabasePageType;
(function (DatabasePageType) {
DatabasePageType[DatabasePageType["P_OVERFLOW"] = 7] = "P_OVERFLOW";
DatabasePageType[DatabasePageType["P_HASHMETA"] = 8] = "P_HASHMETA";
DatabasePageType[DatabasePageType["P_HASH"] = 13] = "P_HASH";
})(DatabasePageType = exports.DatabasePageType || (exports.DatabasePageType = {}));
/**
* We are only interested in Hash pages of type Overflow since they are the only ones containing data.
*/
var HashPageType;
(function (HashPageType) {
HashPageType[HashPageType["H_OFFPAGE"] = 3] = "H_OFFPAGE";
})(HashPageType = exports.HashPageType || (exports.HashPageType = {}));
//# sourceMappingURL=types.js.map
/***/ }),
/***/ 76713:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const berkeleydb_1 = __webpack_require__(40218);
const rpm_1 = __webpack_require__(22604);
const sql_js_1 = __webpack_require__(26657);
/**
* Get a list of packages given a Buffer that contains an RPM database in BerkeleyDB format.
* The database is inspected as best-effort, returning all valid/readable entries.
* @param data An RPM database in BerkeleyDB format.
* @deprecated Should use snyk/dep-graph. The response format is kept for backwards compatibility with snyk/kubernetes-monitor.
*/
async function getPackages(data) {
try {
const berkeleyDbValues = await berkeleydb_1.bufferToHashDbValues(data);
let packagesSkipped = 0;
let packagesProcessed = 0;
const rpmPackageInfos = new Array();
for (const entry of berkeleyDbValues) {
try {
const packageInfo = await rpm_1.bufferToPackageInfo(entry);
if (packageInfo !== undefined) {
rpmPackageInfos.push(packageInfo);
packagesProcessed += 1;
}
else {
packagesSkipped += 1;
}
}
catch (error) {
packagesSkipped += 1;
}
}
const formattedPackages = formatRpmPackages(rpmPackageInfos);
const response = formattedPackages.join('\n');
return {
response,
rpmMetadata: {
packagesProcessed,
packagesSkipped,
},
};
}
catch (error) {
return {
response: '',
error: error,
};
}
}
exports.getPackages = getPackages;
function formatRpmPackages(packages) {
return packages.map((packageInfo) => {
return `${packageInfo.name}\t${formatRpmPackageVersion(packageInfo)}\t${packageInfo.size}`;
});
}
function formatRpmPackageVersion(packageInfo) {
if (packageInfo.epoch === undefined || packageInfo.epoch === 0) {
return `${packageInfo.version}-${packageInfo.release}`;
}
return `${packageInfo.epoch}:${packageInfo.version}-${packageInfo.release}`;
}
exports.formatRpmPackageVersion = formatRpmPackageVersion;
/**
* Get a list of packages given a file path to an Sqlite RPM packages DB.
* The database is inspected as best-effort, returning all valid/readable entries.
* @param sqliteFilePath A path to an RPM sqlite Packages DB.
*/
async function getPackagesSqlite(sqliteDbBuffer) {
try {
const packageInfoBlobs = await getBlobsFromPackagesTableSqliteDb(sqliteDbBuffer);
const packages = await Promise.all(packageInfoBlobs.map((data) => rpm_1.bufferToPackageInfo(data)));
return { response: packages };
}
catch (error) {
return { response: [], error: error };
}
}
exports.getPackagesSqlite = getPackagesSqlite;
// TODO: revisit when new version of sql.js is available
// OR we're able to use sqlite3 (Snyk CLI limitation with native modules)
async function getBlobsFromPackagesTableSqliteDb(sqliteDbBuffer) {
const SQL = await sql_js_1.default();
const db = new SQL.Database(sqliteDbBuffer);
const dbContent = db.exec('SELECT blob FROM Packages');
const packagesInfoBlobs = dbContent[0].values;
db.close();
return packagesInfoBlobs.map((data) => Buffer.from(data[0]));
}
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 34808:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const event_loop_spinner_1 = __webpack_require__(77158);
const types_1 = __webpack_require__(61668);
const types_2 = __webpack_require__(98590);
/**
* Iterate through RPM metadata entries to build the full package data.
* @param entries Entries that were previously extracted from a BerkeleyDB blob.
*/
async function getPackageInfo(entries) {
const packageInfo = {};
for (const entry of entries) {
switch (entry.info.tag) {
case types_1.RpmTag.NAME:
if (entry.info.type !== types_1.RpmType.STRING) {
throw new types_2.ParserError('Unexpected type for name tag', {
type: entry.info.type,
});
}
packageInfo.name = extractString(entry.data);
break;
case types_1.RpmTag.RELEASE:
if (entry.info.type !== types_1.RpmType.STRING) {
throw new types_2.ParserError('Unexpected type for release tag', {
type: entry.info.type,
});
}
packageInfo.release = extractString(entry.data);
break;
case types_1.RpmTag.ARCH:
if (entry.info.type !== types_1.RpmType.STRING) {
throw new types_2.ParserError('Unexpected type for arch tag', {
type: entry.info.type,
});
}
packageInfo.arch = extractString(entry.data);
break;
case types_1.RpmTag.EPOCH:
if (entry.info.type !== types_1.RpmType.INT32) {
throw new types_2.ParserError('Unexpected type for epoch tag', {
type: entry.info.type,
});
}
packageInfo.epoch = entry.data.readInt32BE(0);
break;
case types_1.RpmTag.SIZE:
if (entry.info.type !== types_1.RpmType.INT32) {
throw new types_2.ParserError('Unexpected type for size tag', {
type: entry.info.type,
});
}
packageInfo.size = entry.data.readInt32BE(0);
break;
case types_1.RpmTag.VERSION:
if (entry.info.type !== types_1.RpmType.STRING) {
throw new types_2.ParserError('Unexpected type for version tag', {
type: entry.info.type,
});
}
packageInfo.version = extractString(entry.data);
break;
default:
break;
}
if (event_loop_spinner_1.eventLoopSpinner.isStarving()) {
await event_loop_spinner_1.eventLoopSpinner.spin();
}
}
return isPackageInfo(packageInfo) ? packageInfo : undefined;
}
exports.getPackageInfo = getPackageInfo;
/**
* The content may be padded with zeros so we can't directly convert it to string.
* Find the first 0 byte which indicates where the string ends.
*/
function extractString(data) {
const contentEnd = data.indexOf(0);
return data.slice(0, contentEnd).toString('utf-8');
}
/**
* Type checks to assert we are dealing with the expected type.
*/
function isPackageInfo(packageInfo) {
return (packageInfo.name !== undefined &&
packageInfo.version !== undefined &&
packageInfo.release !== undefined &&
packageInfo.size !== undefined);
}
//# sourceMappingURL=extensions.js.map
/***/ }),
/***/ 65224:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const event_loop_spinner_1 = __webpack_require__(77158);
const types_1 = __webpack_require__(61668);
const types_2 = __webpack_require__(98590);
/**
* Transform a blob of metadadata into addressable RPM package entries.
* The entries need to be further processed to extract package information.
* @param data A blob of RPM metadata, as stored inside BerkeleyDB.
*/
async function headerImport(data) {
const indexLength = data.readInt32BE(0);
const dataLength = data.readInt32BE(4);
if (indexLength <= 0 || indexLength > 50000) {
// Ensure we don't allocate something crazy...
throw new types_2.ParserError('Invalid index length', { indexLength });
}
const entryInfos = new Array();
// Skip the first 2 items (index and data lengths)
const dataStart = 8 + indexLength * types_1.ENTRY_INFO_SIZE;
const index = data.slice(8, indexLength * types_1.ENTRY_INFO_SIZE);
for (let i = 0; i < indexLength; i++) {
const entry = index.slice(i * types_1.ENTRY_INFO_SIZE, i * types_1.ENTRY_INFO_SIZE + types_1.ENTRY_INFO_SIZE);
if (entry.length < types_1.ENTRY_INFO_SIZE) {
continue;
}
const entryInfo = {
tag: entry.readInt32BE(0),
type: entry.readUInt32BE(4),
offset: entry.readInt32BE(8),
count: entry.readUInt32BE(12),
};
if (types_1.PRIVATE_RPM_TAGS.includes(entryInfo.tag)) {
continue;
}
entryInfos.push(entryInfo);
if (event_loop_spinner_1.eventLoopSpinner.isStarving()) {
await event_loop_spinner_1.eventLoopSpinner.spin();
}
}
return regionSwab(data, entryInfos, dataStart, dataLength);
}
exports.headerImport = headerImport;
async function regionSwab(data, entryInfos, dataStart, dataLength) {
const indexEntries = new Array(entryInfos.length);
for (let i = 0; i < entryInfos.length; i++) {
const entryInfo = entryInfos[i];
const entryLength = i < entryInfos.length - 1
? entryInfos[i + 1].offset - entryInfo.offset
: dataLength - entryInfo.offset;
const entryStart = dataStart + entryInfo.offset;
const entryEnd = entryStart + entryLength;
const indexEntry = {
info: entryInfo,
data: data.slice(entryStart, entryEnd),
length: entryLength,
};
indexEntries[i] = indexEntry;
if (event_loop_spinner_1.eventLoopSpinner.isStarving()) {
await event_loop_spinner_1.eventLoopSpinner.spin();
}
}
return indexEntries;
}
//# sourceMappingURL=header.js.map
/***/ }),
/***/ 22604:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const header_1 = __webpack_require__(65224);
const extensions_1 = __webpack_require__(34808);
/**
* Extracts as much package information as available from a blob of RPM metadata.
* Returns undefined if the package cannot be constructed due to missing or corrupt data.
* @param data A blob of RPM metadata, as stored inside BerkeleyDB.
*/
async function bufferToPackageInfo(data) {
const entries = await header_1.headerImport(data);
const packageInfo = await extensions_1.getPackageInfo(entries);
return packageInfo;
}
exports.bufferToPackageInfo = bufferToPackageInfo;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 61668:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
/**
* Size of an RPM metadata entry in bytes.
*/
exports.ENTRY_INFO_SIZE = 16;
/** https://github.com/rpm-software-management/rpm/blob/ad1cad7e6a5def8b6036b90f2634297eda79dc7d/lib/rpmtag.h#L16-L25 */
exports.PRIVATE_RPM_TAGS = [
61,
62,
63,
64,
100,
256,
];
var RpmTag;
(function (RpmTag) {
RpmTag[RpmTag["NAME"] = 1000] = "NAME";
RpmTag[RpmTag["VERSION"] = 1001] = "VERSION";
RpmTag[RpmTag["RELEASE"] = 1002] = "RELEASE";
RpmTag[RpmTag["EPOCH"] = 1003] = "EPOCH";
RpmTag[RpmTag["SIZE"] = 1009] = "SIZE";
RpmTag[RpmTag["ARCH"] = 1022] = "ARCH";
})(RpmTag = exports.RpmTag || (exports.RpmTag = {}));
var RpmType;
(function (RpmType) {
RpmType[RpmType["NULL"] = 0] = "NULL";
RpmType[RpmType["CHAR"] = 1] = "CHAR";
RpmType[RpmType["INT8"] = 2] = "INT8";
RpmType[RpmType["INT16"] = 3] = "INT16";
RpmType[RpmType["INT32"] = 4] = "INT32";
RpmType[RpmType["INT64"] = 5] = "INT64";
RpmType[RpmType["STRING"] = 6] = "STRING";
RpmType[RpmType["BIN"] = 7] = "BIN";
RpmType[RpmType["STRING_ARRAY"] = 8] = "STRING_ARRAY";
RpmType[RpmType["I18NSTRING"] = 9] = "I18NSTRING";
})(RpmType = exports.RpmType || (exports.RpmType = {}));
//# sourceMappingURL=types.js.map
/***/ }),
/***/ 98590:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
class ParserError extends Error {
constructor(message, context) {
super(message);
Error.captureStackTrace(this, this.constructor);
this.context = context;
}
}
exports.ParserError = ParserError;
//# sourceMappingURL=types.js.map
/***/ }),
/***/ 49556:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.OutOfSyncError = exports.inspect = void 0;
const tslib_1 = __webpack_require__(58432);
const fs = __webpack_require__(35747);
const path = __webpack_require__(85622);
const crypto = __webpack_require__(76417);
const subProcess = __webpack_require__(85468);
const cocoapods_lockfile_parser_1 = __webpack_require__(84537);
const legacy_1 = __webpack_require__(67790);
// Compile-time check that we are implementing the plugin API properly
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const _ = {
pluginName() {
return "snyk-cocoapods-plugin";
},
inspect,
};
const MANIFEST_FILE_NAMES = [
"CocoaPods.podfile.yaml",
"CocoaPods.podfile",
"Podfile",
"Podfile.rb",
];
const LOCKFILE_NAME = "Podfile.lock";
function inspect(root, targetFile, options) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
if (!options) {
options = { dev: false };
}
if (!("strictOutOfSync" in options)) {
options.strictOutOfSync = false;
}
if (options.subProject) {
throw new Error("The CocoaPods plugin doesn't support specifying a subProject!");
}
let lockfilePath;
function expectToFindLockfile(dir = '.') {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const discoveredLockfilePath = yield findLockfile(root, dir);
if (!discoveredLockfilePath) {
throw new Error("Could not find lockfile \"Podfile.lock\"! This might be resolved by running `pod install`.");
}
return discoveredLockfilePath;
});
}
let manifestFilePath;
if (targetFile) {
const { base, dir } = path.parse(targetFile);
if (base === LOCKFILE_NAME) {
lockfilePath = targetFile;
manifestFilePath = yield findManifestFile(root, dir);
}
else if (MANIFEST_FILE_NAMES.indexOf(base) !== -1) {
const absTargetFilePath = path.join(root, targetFile);
if (!(yield fsExists(absTargetFilePath))) {
throw new Error(`Given target file ("${targetFile}") doesn't exist!`);
}
manifestFilePath = targetFile;
lockfilePath = yield expectToFindLockfile(dir);
}
else {
throw new Error("Unexpected name for target file!");
}
}
else {
manifestFilePath = yield findManifestFile(root);
lockfilePath = yield expectToFindLockfile();
}
const absLockfilePath = path.join(root, lockfilePath);
if (options.strictOutOfSync) {
if (!manifestFilePath) {
throw new Error("Option `--strict-out-of-sync=true` given, but no manifest file could be found!");
}
const absManifestFilePath = path.join(root, manifestFilePath);
const result = yield verifyChecksum(absManifestFilePath, absLockfilePath);
if (result === ChecksumVerificationResult.NoChecksumInLockfile) {
throw new Error("Option `--strict-out-of-sync=true` given, but lockfile doesn't encode checksum of Podfile! "
+ "Try to update the CocoaPods integration via \"pod install\" or omit the option.");
}
if (result === ChecksumVerificationResult.Invalid) {
throw new OutOfSyncError(manifestFilePath, lockfilePath);
}
}
const plugin = {
meta: {},
name: 'cocoapods',
runtime: yield cocoapodsVersion(root),
targetFile: manifestFilePath || lockfilePath,
};
const depTree = yield getAllDeps(absLockfilePath);
return {
package: depTree,
plugin,
};
});
}
exports.inspect = inspect;
function fsExists(pathToTest) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => {
try {
fs.exists(pathToTest, (exists) => resolve(exists));
}
catch (error) {
reject(error);
}
});
});
}
function fsReadFile(filename) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => {
fs.readFile(filename, 'utf8', (err, data) => {
if (err) {
reject(err);
return;
}
resolve(data);
});
});
});
}
function findManifestFile(root, dir = '.') {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
for (const manifestFileName of MANIFEST_FILE_NAMES) {
const targetFilePath = path.join(root, dir, manifestFileName);
if (yield fsExists(targetFilePath)) {
return path.join(dir, manifestFileName);
}
}
});
}
function findLockfile(root, dir = '.') {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const lockfilePath = path.join(root, dir, LOCKFILE_NAME);
if (yield fsExists(lockfilePath)) {
return path.join(dir, LOCKFILE_NAME);
}
});
}
var ChecksumVerificationResult;
(function (ChecksumVerificationResult) {
ChecksumVerificationResult[ChecksumVerificationResult["Valid"] = 0] = "Valid";
ChecksumVerificationResult[ChecksumVerificationResult["Invalid"] = 1] = "Invalid";
ChecksumVerificationResult[ChecksumVerificationResult["NoChecksumInLockfile"] = 2] = "NoChecksumInLockfile";
})(ChecksumVerificationResult || (ChecksumVerificationResult = {}));
function verifyChecksum(manifestFilePath, lockfilePath) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const manifestFileContents = yield fsReadFile(manifestFilePath);
const checksum = crypto.createHash('sha1').update(manifestFileContents).digest('hex');
const parser = yield cocoapods_lockfile_parser_1.LockfileParser.readFile(lockfilePath);
if (parser.podfileChecksum === undefined) {
return ChecksumVerificationResult.NoChecksumInLockfile;
}
else if (parser.podfileChecksum === checksum) {
return ChecksumVerificationResult.Valid;
}
else {
return ChecksumVerificationResult.Invalid;
}
});
}
function getAllDeps(lockfilePath) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
let parser;
try {
parser = yield cocoapods_lockfile_parser_1.LockfileParser.readFile(lockfilePath);
}
catch (error) {
throw new Error(`Error while parsing ${LOCKFILE_NAME}:\n${error.message}`);
}
const graph = parser.toDepGraph();
return legacy_1.graphToDepTree(graph, "cocoapods");
});
}
function cocoapodsVersion(root) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
let podVersionOutput = '';
try {
// 1st: try to run CocoaPods via bundler
podVersionOutput = yield subProcess.execute('bundle exec pod', ['--version'], { cwd: root });
}
catch (_a) {
try {
// 2nd: try to run CocoaPods directly
podVersionOutput = yield subProcess.execute('pod', ['--version'], { cwd: root });
}
catch (_b) {
// intentionally empty
}
}
return podVersionOutput.trim();
});
}
class OutOfSyncError extends Error {
constructor(manifestFileName, lockfileName) {
super(`Your Podfile ("${manifestFileName}") is not in sync ` +
`with your lockfile ("${lockfileName}"). ` +
`Please run "pod install" and try again.`);
this.code = 422;
this.name = 'OutOfSyncError';
Error.captureStackTrace(this, OutOfSyncError);
}
}
exports.OutOfSyncError = OutOfSyncError;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 85468:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.execute = void 0;
const childProcess = __webpack_require__(63129);
function execute(command, args = [], options) {
const spawnOptions = { shell: true };
if (options && options.cwd) {
spawnOptions.cwd = options.cwd;
}
return new Promise((resolve, reject) => {
let stdout = '';
let stderr = '';
const proc = childProcess.spawn(command, args, spawnOptions);
proc.stdout.on('data', (data) => {
stdout = stdout + data;
});
proc.stderr.on('data', (data) => {
stderr = stderr + data;
});
proc.on('close', (code) => {
if (code !== 0) {
return reject(new Error(stdout || stderr));
}
resolve(stdout || stderr);
});
});
}
exports.execute = execute;
//# sourceMappingURL=sub-process.js.map
/***/ }),
/***/ 58432:
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "__extends": () => (/* binding */ __extends),
/* harmony export */ "__assign": () => (/* binding */ __assign),
/* harmony export */ "__rest": () => (/* binding */ __rest),
/* harmony export */ "__decorate": () => (/* binding */ __decorate),
/* harmony export */ "__param": () => (/* binding */ __param),
/* harmony export */ "__metadata": () => (/* binding */ __metadata),
/* harmony export */ "__awaiter": () => (/* binding */ __awaiter),
/* harmony export */ "__generator": () => (/* binding */ __generator),
/* harmony export */ "__createBinding": () => (/* binding */ __createBinding),
/* harmony export */ "__exportStar": () => (/* binding */ __exportStar),
/* harmony export */ "__values": () => (/* binding */ __values),
/* harmony export */ "__read": () => (/* binding */ __read),
/* harmony export */ "__spread": () => (/* binding */ __spread),
/* harmony export */ "__spreadArrays": () => (/* binding */ __spreadArrays),
/* harmony export */ "__spreadArray": () => (/* binding */ __spreadArray),
/* harmony export */ "__await": () => (/* binding */ __await),
/* harmony export */ "__asyncGenerator": () => (/* binding */ __asyncGenerator),
/* harmony export */ "__asyncDelegator": () => (/* binding */ __asyncDelegator),
/* harmony export */ "__asyncValues": () => (/* binding */ __asyncValues),
/* harmony export */ "__makeTemplateObject": () => (/* binding */ __makeTemplateObject),
/* harmony export */ "__importStar": () => (/* binding */ __importStar),
/* harmony export */ "__importDefault": () => (/* binding */ __importDefault),
/* harmony export */ "__classPrivateFieldGet": () => (/* binding */ __classPrivateFieldGet),
/* harmony export */ "__classPrivateFieldSet": () => (/* binding */ __classPrivateFieldSet)
/* harmony export */ });
/*! *****************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
/* global Reflect, Promise */
var extendStatics = function(d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
function __extends(d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
}
var __assign = function() {
__assign = Object.assign || function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
return t;
}
return __assign.apply(this, arguments);
}
function __rest(s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
}
function __decorate(decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
}
function __param(paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); }
}
function __metadata(metadataKey, metadataValue) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
}
function __awaiter(thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
}
function __generator(thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
}
var __createBinding = Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
});
function __exportStar(m, o) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
}
function __values(o) {
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
if (m) return m.call(o);
if (o && typeof o.length === "number") return {
next: function () {
if (o && i >= o.length) o = void 0;
return { value: o && o[i++], done: !o };
}
};
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
}
function __read(o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o;
var i = m.call(o), r, ar = [], e;
try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
}
catch (error) { e = { error: error }; }
finally {
try {
if (r && !r.done && (m = i["return"])) m.call(i);
}
finally { if (e) throw e.error; }
}
return ar;
}
/** @deprecated */
function __spread() {
for (var ar = [], i = 0; i < arguments.length; i++)
ar = ar.concat(__read(arguments[i]));
return ar;
}
/** @deprecated */
function __spreadArrays() {
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
for (var r = Array(s), k = 0, i = 0; i < il; i++)
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
r[k] = a[j];
return r;
}
function __spreadArray(to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
}
function __await(v) {
return this instanceof __await ? (this.v = v, this) : new __await(v);
}
function __asyncGenerator(thisArg, _arguments, generator) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var g = generator.apply(thisArg, _arguments || []), i, q = [];
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
function fulfill(value) { resume("next", value); }
function reject(value) { resume("throw", value); }
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
}
function __asyncDelegator(o) {
var i, p;
return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; }
}
function __asyncValues(o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
}
function __makeTemplateObject(cooked, raw) {
if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
return cooked;
};
var __setModuleDefault = Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
};
function __importStar(mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
}
function __importDefault(mod) {
return (mod && mod.__esModule) ? mod : { default: mod };
}
function __classPrivateFieldGet(receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
}
function __classPrivateFieldSet(receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
}
/***/ }),
/***/ 17603:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DockerPull = void 0;
const registryClient = __webpack_require__(28310);
const crypto = __webpack_require__(76417);
const fs = __webpack_require__(35747);
const os = __webpack_require__(12087);
const path = __webpack_require__(85622);
const tmp = __webpack_require__(29033);
const subProcess = __webpack_require__(77685);
const tar = __webpack_require__(53871);
const util_1 = __webpack_require__(31669);
const errors_1 = __webpack_require__(26518);
const readFile = util_1.promisify(fs.readFile);
const link = util_1.promisify(fs.link);
const stat = util_1.promisify(fs.stat);
const DEFAULT_LAYER_JSON = {
created: "0001-01-01T00:00:00Z",
// eslint-disable-next-line @typescript-eslint/camelcase
container_config: {
Hostname: "",
Domainname: "",
User: "",
AttachStdin: false,
AttachStdout: false,
AttachStderr: false,
Tty: false,
OpenStdin: false,
StdinOnce: false,
Env: null,
Cmd: null,
Image: "",
Volumes: null,
WorkingDir: "",
Entrypoint: null,
OnBuild: null,
Labels: null
}
};
class DockerPull {
static async findDockerBinary() {
return subProcess
.execute("which", ["docker"], undefined, undefined, true)
.then(cmdOutput => cmdOutput.stdout.trim())
.catch(cmdOutput => {
throw new Error(cmdOutput.stderr);
});
}
async pull(registryBase, repo, tag, opt) {
const loadImage = (opt === null || opt === void 0 ? void 0 : opt.loadImage) === undefined ? true : opt.loadImage;
const manifest = await registryClient.getManifest(registryBase, repo, tag, opt === null || opt === void 0 ? void 0 : opt.username, opt === null || opt === void 0 ? void 0 : opt.password, opt === null || opt === void 0 ? void 0 : opt.reqOptions);
if (manifest.schemaVersion !== 2) {
throw new errors_1.InvalidManifestSchemaVersionError(manifest.schemaVersion);
}
const imageConfigMetadata = manifest.config;
const imageConfig = await registryClient.getImageConfig(registryBase, repo, imageConfigMetadata.digest, opt === null || opt === void 0 ? void 0 : opt.username, opt === null || opt === void 0 ? void 0 : opt.password, opt === null || opt === void 0 ? void 0 : opt.reqOptions);
const t0 = Date.now();
const layersConfigs = manifest.layers;
const missingLayers = await this.getLayers(layersConfigs, registryBase, repo, opt === null || opt === void 0 ? void 0 : opt.username, opt === null || opt === void 0 ? void 0 : opt.password, opt === null || opt === void 0 ? void 0 : opt.reqOptions);
const pullDuration = Date.now() - t0;
let imageDigest;
const stagingDir = this.createDownloadedImageDestination(opt === null || opt === void 0 ? void 0 : opt.imageSavePath);
try {
await this.buildImage(imageConfigMetadata.digest, imageConfig, layersConfigs, missingLayers, stagingDir);
if (loadImage) {
imageDigest = await this.loadImage(registryBase, repo, tag, stagingDir);
}
}
catch (err) {
throw new Error(err.stderr);
}
finally {
try {
// Check is the image should be saved for debugging
const saveMatcher = Object.assign(Object.assign({}, opt), { registryBase,
repo,
tag });
for (const [name, requestMatcher] of Object.entries(await this.saveRequests())) {
if (Object.keys(requestMatcher).every(key => requestMatcher[key] === saveMatcher[key])) {
await link(path.join(stagingDir.name, "image.tar"), tmp.tmpNameSync({ prefix: `${name}-`, postfix: ".tar" }));
break;
}
}
}
catch (err) {
console.error("pullSaveRequest error: ", err);
}
if (loadImage) {
stagingDir.removeCallback();
}
}
return {
imageDigest,
stagingDir: loadImage ? null : stagingDir,
cachedLayersDigests: [],
missingLayersDigests: missingLayers.map(layer => layer.config.digest),
pullDuration,
missingLayersCalculatedDigests: opt.calculateMissingLayersDigests
? missingLayers.map(layer => this.calculateLayerDigest(layer))
: []
};
}
async getLayers(layersConfigs, registryBase, repo, username, password,
// weak typing on the client
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reqOptions = {}) {
return await Promise.all(layersConfigs.map(async (config) => {
const blob = await registryClient.getLayer(registryBase, repo, config.digest, username, password, reqOptions);
return { config, blob };
}));
}
calculateLayerDigest(layer) {
const hashAlgorithm = layer.config.digest.split(":")[0];
const calculatedDigest = crypto
.createHash(hashAlgorithm)
.update(layer.blob)
.digest("hex");
return `${hashAlgorithm}:${calculatedDigest}`;
}
async saveRequests() {
const saveRequestsPath = path.join(os.tmpdir(), "pullSaveRequest.json");
try {
if (await stat(saveRequestsPath)) {
return JSON.parse((await readFile(saveRequestsPath)).toString("utf-8"));
}
}
catch (err) {
return {};
}
}
async buildImage(imageDigest, imageConfig, layersConfigs, layers, stagingDir) {
const pack = tar.pack();
// write layers
let parentDigest;
for (const layerConfig of layersConfigs) {
const digest = layerConfig.digest.replace("sha256:", "");
// write layer.tar
let blob;
for (const layer of layers) {
if (layerConfig.digest === layer.config.digest) {
blob = layer.blob;
break;
}
}
if (!blob) {
throw new Error(`missing blob during build: ${digest}`);
}
pack.entry({ name: path.join(digest, "layer.tar") }, blob);
// write json
let json = Object.assign({}, { id: digest }, DEFAULT_LAYER_JSON);
if (parentDigest) {
json = Object.assign({ parent: parentDigest });
}
pack.entry({ name: path.join(digest, "json") }, JSON.stringify(json));
parentDigest = digest;
// write version
pack.entry({ name: path.join(digest, "VERSION") }, "1.0");
}
imageDigest = imageDigest.replace("sha256:", "");
// write image json
pack.entry({ name: `${imageDigest}.json` }, JSON.stringify(imageConfig));
// write manifest.json
const manifestJson = [
{
Config: `${imageDigest}.json`,
RepoTags: null,
Layers: layersConfigs.map(config => `${config.digest.replace("sha256:", "")}/layer.tar`)
}
];
pack.entry({ name: "manifest.json" }, JSON.stringify(manifestJson), () => {
pack.finalize();
});
const imagePath = path.join(stagingDir.name, "image.tar");
const file = fs.createWriteStream(imagePath);
pack.pipe(file);
return new Promise(resolve => {
file.on("close", () => {
resolve(path.join(imagePath));
});
});
}
async loadImage(registryBase, repo, tag, stagingDir) {
const dockerBinary = await DockerPull.findDockerBinary();
const stdout = (await subProcess.execute(dockerBinary, ["load", "-i", "image.tar"], stagingDir.name)).stdout;
// Loaded image ID: sha256:36456e9e9cb7c4b17d97461a5aeb062a481401e3d2b559285c7083d8e7f8efa6
const imgDigest = stdout.split("sha256:")[1].trim();
await subProcess.execute(dockerBinary, [
"tag",
`${imgDigest}`,
`${registryBase}/${repo}:${tag}`
]);
return imgDigest;
}
createDownloadedImageDestination(imageSavePath) {
if (!imageSavePath) {
return tmp.dirSync({ unsafeCleanup: true });
}
const dirResult = {
name: imageSavePath,
removeCallback: () => {
/* do nothing */
}
};
return dirResult;
}
}
exports.DockerPull = DockerPull;
//# sourceMappingURL=docker-pull.js.map
/***/ }),
/***/ 26518:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.InvalidManifestSchemaVersionError = void 0;
class InvalidManifestSchemaVersionError extends Error {
constructor(version) {
super(`Invalid manifest schema version ${version}`);
this.code = 422;
}
}
exports.InvalidManifestSchemaVersionError = InvalidManifestSchemaVersionError;
//# sourceMappingURL=errors.js.map
/***/ }),
/***/ 39106:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var docker_pull_1 = __webpack_require__(17603);
Object.defineProperty(exports, "DockerPull", ({ enumerable: true, get: function () { return docker_pull_1.DockerPull; } }));
var errors_1 = __webpack_require__(26518);
Object.defineProperty(exports, "InvalidManifestSchemaVersionError", ({ enumerable: true, get: function () { return errors_1.InvalidManifestSchemaVersionError; } }));
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 77685:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.execute = void 0;
const childProcess = __webpack_require__(63129);
async function execute(command, args, cwd, env, shell = false) {
const spawnOptions = { shell };
if (cwd) {
spawnOptions.cwd = cwd;
}
if (env) {
spawnOptions.env = env;
}
return new Promise((resolve, reject) => {
let stdout = "";
let stderr = "";
const proc = childProcess.spawn(command, args, spawnOptions);
proc.stdout.on("data", data => {
stdout = stdout + data;
});
proc.stderr.on("data", data => {
stderr = stderr + data;
});
proc.on("close", code => {
const output = { stdout, stderr };
if (code !== 0) {
return reject(output);
}
resolve(output);
});
});
}
exports.execute = execute;
//# sourceMappingURL=sub-process.js.map
/***/ }),
/***/ 37129:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const assert = __webpack_require__(42357)
const path = __webpack_require__(85622)
const fs = __webpack_require__(35747)
let glob = undefined
try {
glob = __webpack_require__(12884)
} catch (_err) {
// treat glob as optional.
}
const defaultGlobOpts = {
nosort: true,
silent: true
}
// for EMFILE handling
let timeout = 0
const isWindows = (process.platform === "win32")
const defaults = options => {
const methods = [
'unlink',
'chmod',
'stat',
'lstat',
'rmdir',
'readdir'
]
methods.forEach(m => {
options[m] = options[m] || fs[m]
m = m + 'Sync'
options[m] = options[m] || fs[m]
})
options.maxBusyTries = options.maxBusyTries || 3
options.emfileWait = options.emfileWait || 1000
if (options.glob === false) {
options.disableGlob = true
}
if (options.disableGlob !== true && glob === undefined) {
throw Error('glob dependency not found, set `options.disableGlob = true` if intentional')
}
options.disableGlob = options.disableGlob || false
options.glob = options.glob || defaultGlobOpts
}
const rimraf = (p, options, cb) => {
if (typeof options === 'function') {
cb = options
options = {}
}
assert(p, 'rimraf: missing path')
assert.equal(typeof p, 'string', 'rimraf: path should be a string')
assert.equal(typeof cb, 'function', 'rimraf: callback function required')
assert(options, 'rimraf: invalid options argument provided')
assert.equal(typeof options, 'object', 'rimraf: options should be object')
defaults(options)
let busyTries = 0
let errState = null
let n = 0
const next = (er) => {
errState = errState || er
if (--n === 0)
cb(errState)
}
const afterGlob = (er, results) => {
if (er)
return cb(er)
n = results.length
if (n === 0)
return cb()
results.forEach(p => {
const CB = (er) => {
if (er) {
if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
busyTries < options.maxBusyTries) {
busyTries ++
// try again, with the same exact callback as this one.
return setTimeout(() => rimraf_(p, options, CB), busyTries * 100)
}
// this one won't happen if graceful-fs is used.
if (er.code === "EMFILE" && timeout < options.emfileWait) {
return setTimeout(() => rimraf_(p, options, CB), timeout ++)
}
// already gone
if (er.code === "ENOENT") er = null
}
timeout = 0
next(er)
}
rimraf_(p, options, CB)
})
}
if (options.disableGlob || !glob.hasMagic(p))
return afterGlob(null, [p])
options.lstat(p, (er, stat) => {
if (!er)
return afterGlob(null, [p])
glob(p, options.glob, afterGlob)
})
}
// Two possible strategies.
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
//
// Both result in an extra syscall when you guess wrong. However, there
// are likely far more normal files in the world than directories. This
// is based on the assumption that a the average number of files per
// directory is >= 1.
//
// If anyone ever complains about this, then I guess the strategy could
// be made configurable somehow. But until then, YAGNI.
const rimraf_ = (p, options, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
// sunos lets the root user unlink directories, which is... weird.
// so we have to lstat here and make sure it's not a dir.
options.lstat(p, (er, st) => {
if (er && er.code === "ENOENT")
return cb(null)
// Windows can EPERM on stat. Life is suffering.
if (er && er.code === "EPERM" && isWindows)
fixWinEPERM(p, options, er, cb)
if (st && st.isDirectory())
return rmdir(p, options, er, cb)
options.unlink(p, er => {
if (er) {
if (er.code === "ENOENT")
return cb(null)
if (er.code === "EPERM")
return (isWindows)
? fixWinEPERM(p, options, er, cb)
: rmdir(p, options, er, cb)
if (er.code === "EISDIR")
return rmdir(p, options, er, cb)
}
return cb(er)
})
})
}
const fixWinEPERM = (p, options, er, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
options.chmod(p, 0o666, er2 => {
if (er2)
cb(er2.code === "ENOENT" ? null : er)
else
options.stat(p, (er3, stats) => {
if (er3)
cb(er3.code === "ENOENT" ? null : er)
else if (stats.isDirectory())
rmdir(p, options, er, cb)
else
options.unlink(p, cb)
})
})
}
const fixWinEPERMSync = (p, options, er) => {
assert(p)
assert(options)
try {
options.chmodSync(p, 0o666)
} catch (er2) {
if (er2.code === "ENOENT")
return
else
throw er
}
let stats
try {
stats = options.statSync(p)
} catch (er3) {
if (er3.code === "ENOENT")
return
else
throw er
}
if (stats.isDirectory())
rmdirSync(p, options, er)
else
options.unlinkSync(p)
}
const rmdir = (p, options, originalEr, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
// if we guessed wrong, and it's not a directory, then
// raise the original error.
options.rmdir(p, er => {
if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
rmkids(p, options, cb)
else if (er && er.code === "ENOTDIR")
cb(originalEr)
else
cb(er)
})
}
const rmkids = (p, options, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
options.readdir(p, (er, files) => {
if (er)
return cb(er)
let n = files.length
if (n === 0)
return options.rmdir(p, cb)
let errState
files.forEach(f => {
rimraf(path.join(p, f), options, er => {
if (errState)
return
if (er)
return cb(errState = er)
if (--n === 0)
options.rmdir(p, cb)
})
})
})
}
// this looks simpler, and is strictly *faster*, but will
// tie up the JavaScript thread and fail on excessively
// deep directory trees.
const rimrafSync = (p, options) => {
options = options || {}
defaults(options)
assert(p, 'rimraf: missing path')
assert.equal(typeof p, 'string', 'rimraf: path should be a string')
assert(options, 'rimraf: missing options')
assert.equal(typeof options, 'object', 'rimraf: options should be object')
let results
if (options.disableGlob || !glob.hasMagic(p)) {
results = [p]
} else {
try {
options.lstatSync(p)
results = [p]
} catch (er) {
results = glob.sync(p, options.glob)
}
}
if (!results.length)
return
for (let i = 0; i < results.length; i++) {
const p = results[i]
let st
try {
st = options.lstatSync(p)
} catch (er) {
if (er.code === "ENOENT")
return
// Windows can EPERM on stat. Life is suffering.
if (er.code === "EPERM" && isWindows)
fixWinEPERMSync(p, options, er)
}
try {
// sunos lets the root user unlink directories, which is... weird.
if (st && st.isDirectory())
rmdirSync(p, options, null)
else
options.unlinkSync(p)
} catch (er) {
if (er.code === "ENOENT")
return
if (er.code === "EPERM")
return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
if (er.code !== "EISDIR")
throw er
rmdirSync(p, options, er)
}
}
}
const rmdirSync = (p, options, originalEr) => {
assert(p)
assert(options)
try {
options.rmdirSync(p)
} catch (er) {
if (er.code === "ENOENT")
return
if (er.code === "ENOTDIR")
throw originalEr
if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")
rmkidsSync(p, options)
}
}
const rmkidsSync = (p, options) => {
assert(p)
assert(options)
options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
// We only end up here once we got ENOTEMPTY at least once, and
// at this point, we are guaranteed to have removed all the kids.
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
// try really hard to delete stuff on windows, because it has a
// PROFOUNDLY annoying habit of not closing handles promptly when
// files are deleted, resulting in spurious ENOTEMPTY errors.
const retries = isWindows ? 100 : 1
let i = 0
do {
let threw = true
try {
const ret = options.rmdirSync(p, options)
threw = false
return ret
} finally {
if (++i < retries && threw)
continue
}
} while (true)
}
module.exports = rimraf
rimraf.sync = rimrafSync
/***/ }),
/***/ 29033:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
/*!
* Tmp
*
* Copyright (c) 2011-2017 KARASZI Istvan
*
* MIT Licensed
*/
/*
* Module dependencies.
*/
const fs = __webpack_require__(35747);
const os = __webpack_require__(12087);
const path = __webpack_require__(85622);
const crypto = __webpack_require__(76417);
const _c = { fs: fs.constants, os: os.constants };
const rimraf = __webpack_require__(37129);
/*
* The working inner variables.
*/
const
// the random characters to choose from
RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
TEMPLATE_PATTERN = /XXXXXX/,
DEFAULT_TRIES = 3,
CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR),
// constants are off on the windows platform and will not match the actual errno codes
IS_WIN32 = os.platform() === 'win32',
EBADF = _c.EBADF || _c.os.errno.EBADF,
ENOENT = _c.ENOENT || _c.os.errno.ENOENT,
DIR_MODE = 0o700 /* 448 */,
FILE_MODE = 0o600 /* 384 */,
EXIT = 'exit',
// this will hold the objects need to be removed on exit
_removeObjects = [],
// API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback
FN_RMDIR_SYNC = fs.rmdirSync.bind(fs),
FN_RIMRAF_SYNC = rimraf.sync;
let
_gracefulCleanup = false;
/**
* Gets a temporary file name.
*
* @param {(Options|tmpNameCallback)} options options or callback
* @param {?tmpNameCallback} callback the callback function
*/
function tmpName(options, callback) {
const
args = _parseArguments(options, callback),
opts = args[0],
cb = args[1];
try {
_assertAndSanitizeOptions(opts);
} catch (err) {
return cb(err);
}
let tries = opts.tries;
(function _getUniqueName() {
try {
const name = _generateTmpName(opts);
// check whether the path exists then retry if needed
fs.stat(name, function (err) {
/* istanbul ignore else */
if (!err) {
/* istanbul ignore else */
if (tries-- > 0) return _getUniqueName();
return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name));
}
cb(null, name);
});
} catch (err) {
cb(err);
}
}());
}
/**
* Synchronous version of tmpName.
*
* @param {Object} options
* @returns {string} the generated random name
* @throws {Error} if the options are invalid or could not generate a filename
*/
function tmpNameSync(options) {
const
args = _parseArguments(options),
opts = args[0];
_assertAndSanitizeOptions(opts);
let tries = opts.tries;
do {
const name = _generateTmpName(opts);
try {
fs.statSync(name);
} catch (e) {
return name;
}
} while (tries-- > 0);
throw new Error('Could not get a unique tmp filename, max tries reached');
}
/**
* Creates and opens a temporary file.
*
* @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined
* @param {?fileCallback} callback
*/
function file(options, callback) {
const
args = _parseArguments(options, callback),
opts = args[0],
cb = args[1];
// gets a temporary filename
tmpName(opts, function _tmpNameCreated(err, name) {
/* istanbul ignore else */
if (err) return cb(err);
// create and open the file
fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) {
/* istanbu ignore else */
if (err) return cb(err);
if (opts.discardDescriptor) {
return fs.close(fd, function _discardCallback(possibleErr) {
// the chance of getting an error on close here is rather low and might occur in the most edgiest cases only
return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false));
});
} else {
// detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care
// about the descriptor
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false));
}
});
});
}
/**
* Synchronous version of file.
*
* @param {Options} options
* @returns {FileSyncObject} object consists of name, fd and removeCallback
* @throws {Error} if cannot create a file
*/
function fileSync(options) {
const
args = _parseArguments(options),
opts = args[0];
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
const name = tmpNameSync(opts);
var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE);
/* istanbul ignore else */
if (opts.discardDescriptor) {
fs.closeSync(fd);
fd = undefined;
}
return {
name: name,
fd: fd,
removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true)
};
}
/**
* Creates a temporary directory.
*
* @param {(Options|dirCallback)} options the options or the callback function
* @param {?dirCallback} callback
*/
function dir(options, callback) {
const
args = _parseArguments(options, callback),
opts = args[0],
cb = args[1];
// gets a temporary filename
tmpName(opts, function _tmpNameCreated(err, name) {
/* istanbul ignore else */
if (err) return cb(err);
// create the directory
fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) {
/* istanbul ignore else */
if (err) return cb(err);
cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false));
});
});
}
/**
* Synchronous version of dir.
*
* @param {Options} options
* @returns {DirSyncObject} object consists of name and removeCallback
* @throws {Error} if it cannot create a directory
*/
function dirSync(options) {
const
args = _parseArguments(options),
opts = args[0];
const name = tmpNameSync(opts);
fs.mkdirSync(name, opts.mode || DIR_MODE);
return {
name: name,
removeCallback: _prepareTmpDirRemoveCallback(name, opts, true)
};
}
/**
* Removes files asynchronously.
*
* @param {Object} fdPath
* @param {Function} next
* @private
*/
function _removeFileAsync(fdPath, next) {
const _handler = function (err) {
if (err && !_isENOENT(err)) {
// reraise any unanticipated error
return next(err);
}
next();
};
if (0 <= fdPath[0])
fs.close(fdPath[0], function () {
fs.unlink(fdPath[1], _handler);
});
else fs.unlink(fdPath[1], _handler);
}
/**
* Removes files synchronously.
*
* @param {Object} fdPath
* @private
*/
function _removeFileSync(fdPath) {
let rethrownException = null;
try {
if (0 <= fdPath[0]) fs.closeSync(fdPath[0]);
} catch (e) {
// reraise any unanticipated error
if (!_isEBADF(e) && !_isENOENT(e)) throw e;
} finally {
try {
fs.unlinkSync(fdPath[1]);
}
catch (e) {
// reraise any unanticipated error
if (!_isENOENT(e)) rethrownException = e;
}
}
if (rethrownException !== null) {
throw rethrownException;
}
}
/**
* Prepares the callback for removal of the temporary file.
*
* Returns either a sync callback or a async callback depending on whether
* fileSync or file was called, which is expressed by the sync parameter.
*
* @param {string} name the path of the file
* @param {number} fd file descriptor
* @param {Object} opts
* @param {boolean} sync
* @returns {fileCallback | fileCallbackSync}
* @private
*/
function _prepareTmpFileRemoveCallback(name, fd, opts, sync) {
const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync);
const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync);
if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
return sync ? removeCallbackSync : removeCallback;
}
/**
* Prepares the callback for removal of the temporary directory.
*
* Returns either a sync callback or a async callback depending on whether
* tmpFileSync or tmpFile was called, which is expressed by the sync parameter.
*
* @param {string} name
* @param {Object} opts
* @param {boolean} sync
* @returns {Function} the callback
* @private
*/
function _prepareTmpDirRemoveCallback(name, opts, sync) {
const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs);
const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC;
const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync);
const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync);
if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
return sync ? removeCallbackSync : removeCallback;
}
/**
* Creates a guarded function wrapping the removeFunction call.
*
* The cleanup callback is save to be called multiple times.
* Subsequent invocations will be ignored.
*
* @param {Function} removeFunction
* @param {string} fileOrDirName
* @param {boolean} sync
* @param {cleanupCallbackSync?} cleanupCallbackSync
* @returns {cleanupCallback | cleanupCallbackSync}
* @private
*/
function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) {
let called = false;
// if sync is true, the next parameter will be ignored
return function _cleanupCallback(next) {
/* istanbul ignore else */
if (!called) {
// remove cleanupCallback from cache
const toRemove = cleanupCallbackSync || _cleanupCallback;
const index = _removeObjects.indexOf(toRemove);
/* istanbul ignore else */
if (index >= 0) _removeObjects.splice(index, 1);
called = true;
if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) {
return removeFunction(fileOrDirName);
} else {
return removeFunction(fileOrDirName, next || function() {});
}
}
};
}
/**
* The garbage collector.
*
* @private
*/
function _garbageCollector() {
/* istanbul ignore else */
if (!_gracefulCleanup) return;
// the function being called removes itself from _removeObjects,
// loop until _removeObjects is empty
while (_removeObjects.length) {
try {
_removeObjects[0]();
} catch (e) {
// already removed?
}
}
}
/**
* Random name generator based on crypto.
* Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
*
* @param {number} howMany
* @returns {string} the generated random name
* @private
*/
function _randomChars(howMany) {
let
value = [],
rnd = null;
// make sure that we do not fail because we ran out of entropy
try {
rnd = crypto.randomBytes(howMany);
} catch (e) {
rnd = crypto.pseudoRandomBytes(howMany);
}
for (var i = 0; i < howMany; i++) {
value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]);
}
return value.join('');
}
/**
* Helper which determines whether a string s is blank, that is undefined, or empty or null.
*
* @private
* @param {string} s
* @returns {Boolean} true whether the string s is blank, false otherwise
*/
function _isBlank(s) {
return s === null || _isUndefined(s) || !s.trim();
}
/**
* Checks whether the `obj` parameter is defined or not.
*
* @param {Object} obj
* @returns {boolean} true if the object is undefined
* @private
*/
function _isUndefined(obj) {
return typeof obj === 'undefined';
}
/**
* Parses the function arguments.
*
* This function helps to have optional arguments.
*
* @param {(Options|null|undefined|Function)} options
* @param {?Function} callback
* @returns {Array} parsed arguments
* @private
*/
function _parseArguments(options, callback) {
/* istanbul ignore else */
if (typeof options === 'function') {
return [{}, options];
}
/* istanbul ignore else */
if (_isUndefined(options)) {
return [{}, callback];
}
// copy options so we do not leak the changes we make internally
const actualOptions = {};
for (const key of Object.getOwnPropertyNames(options)) {
actualOptions[key] = options[key];
}
return [actualOptions, callback];
}
/**
* Generates a new temporary name.
*
* @param {Object} opts
* @returns {string} the new random name according to opts
* @private
*/
function _generateTmpName(opts) {
const tmpDir = opts.tmpdir;
/* istanbul ignore else */
if (!_isUndefined(opts.name))
return path.join(tmpDir, opts.dir, opts.name);
/* istanbul ignore else */
if (!_isUndefined(opts.template))
return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6));
// prefix and postfix
const name = [
opts.prefix ? opts.prefix : 'tmp',
'-',
process.pid,
'-',
_randomChars(12),
opts.postfix ? '-' + opts.postfix : ''
].join('');
return path.join(tmpDir, opts.dir, name);
}
/**
* Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing
* options.
*
* @param {Options} options
* @private
*/
function _assertAndSanitizeOptions(options) {
options.tmpdir = _getTmpDir(options);
const tmpDir = options.tmpdir;
/* istanbul ignore else */
if (!_isUndefined(options.name))
_assertIsRelative(options.name, 'name', tmpDir);
/* istanbul ignore else */
if (!_isUndefined(options.dir))
_assertIsRelative(options.dir, 'dir', tmpDir);
/* istanbul ignore else */
if (!_isUndefined(options.template)) {
_assertIsRelative(options.template, 'template', tmpDir);
if (!options.template.match(TEMPLATE_PATTERN))
throw new Error(`Invalid template, found "${options.template}".`);
}
/* istanbul ignore else */
if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0)
throw new Error(`Invalid tries, found "${options.tries}".`);
// if a name was specified we will try once
options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1;
options.keep = !!options.keep;
options.detachDescriptor = !!options.detachDescriptor;
options.discardDescriptor = !!options.discardDescriptor;
options.unsafeCleanup = !!options.unsafeCleanup;
// sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to
options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir));
options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir));
// sanitize further if template is relative to options.dir
options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template);
// for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to
options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name);
options.prefix = _isUndefined(options.prefix) ? '' : options.prefix;
options.postfix = _isUndefined(options.postfix) ? '' : options.postfix;
}
/**
* Resolve the specified path name in respect to tmpDir.
*
* The specified name might include relative path components, e.g. ../
* so we need to resolve in order to be sure that is is located inside tmpDir
*
* @param name
* @param tmpDir
* @returns {string}
* @private
*/
function _resolvePath(name, tmpDir) {
const sanitizedName = _sanitizeName(name);
if (sanitizedName.startsWith(tmpDir)) {
return path.resolve(sanitizedName);
} else {
return path.resolve(path.join(tmpDir, sanitizedName));
}
}
/**
* Sanitize the specified path name by removing all quote characters.
*
* @param name
* @returns {string}
* @private
*/
function _sanitizeName(name) {
if (_isBlank(name)) {
return name;
}
return name.replace(/["']/g, '');
}
/**
* Asserts whether specified name is relative to the specified tmpDir.
*
* @param {string} name
* @param {string} option
* @param {string} tmpDir
* @throws {Error}
* @private
*/
function _assertIsRelative(name, option, tmpDir) {
if (option === 'name') {
// assert that name is not absolute and does not contain a path
if (path.isAbsolute(name))
throw new Error(`${option} option must not contain an absolute path, found "${name}".`);
// must not fail on valid . or .. or similar such constructs
let basename = path.basename(name);
if (basename === '..' || basename === '.' || basename !== name)
throw new Error(`${option} option must not contain a path, found "${name}".`);
}
else { // if (option === 'dir' || option === 'template') {
// assert that dir or template are relative to tmpDir
if (path.isAbsolute(name) && !name.startsWith(tmpDir)) {
throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`);
}
let resolvedPath = _resolvePath(name, tmpDir);
if (!resolvedPath.startsWith(tmpDir))
throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`);
}
}
/**
* Helper for testing against EBADF to compensate changes made to Node 7.x under Windows.
*
* @private
*/
function _isEBADF(error) {
return _isExpectedError(error, -EBADF, 'EBADF');
}
/**
* Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows.
*
* @private
*/
function _isENOENT(error) {
return _isExpectedError(error, -ENOENT, 'ENOENT');
}
/**
* Helper to determine whether the expected error code matches the actual code and errno,
* which will differ between the supported node versions.
*
* - Node >= 7.0:
* error.code {string}
* error.errno {number} any numerical value will be negated
*
* CAVEAT
*
* On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT
* is no different here.
*
* @param {SystemError} error
* @param {number} errno
* @param {string} code
* @private
*/
function _isExpectedError(error, errno, code) {
return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno;
}
/**
* Sets the graceful cleanup.
*
* If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the
* temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary
* object removals.
*/
function setGracefulCleanup() {
_gracefulCleanup = true;
}
/**
* Returns the currently configured tmp dir from os.tmpdir().
*
* @private
* @param {?Options} options
* @returns {string} the currently configured tmp dir
*/
function _getTmpDir(options) {
return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir()));
}
// Install process exit listener
process.addListener(EXIT, _garbageCollector);
/**
* Configuration options.
*
* @typedef {Object} Options
* @property {?boolean} keep the temporary object (file or dir) will not be garbage collected
* @property {?number} tries the number of tries before give up the name generation
* @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files
* @property {?string} template the "mkstemp" like filename template
* @property {?string} name fixed name relative to tmpdir or the specified dir option
* @property {?string} dir tmp directory relative to the root tmp directory in use
* @property {?string} prefix prefix for the generated name
* @property {?string} postfix postfix for the generated name
* @property {?string} tmpdir the root tmp directory which overrides the os tmpdir
* @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty
* @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection
* @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection
*/
/**
* @typedef {Object} FileSyncObject
* @property {string} name the name of the file
* @property {string} fd the file descriptor or -1 if the fd has been discarded
* @property {fileCallback} removeCallback the callback function to remove the file
*/
/**
* @typedef {Object} DirSyncObject
* @property {string} name the name of the directory
* @property {fileCallback} removeCallback the callback function to remove the directory
*/
/**
* @callback tmpNameCallback
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
*/
/**
* @callback fileCallback
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {number} fd the file descriptor or -1 if the fd had been discarded
* @param {cleanupCallback} fn the cleanup callback function
*/
/**
* @callback fileCallbackSync
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {number} fd the file descriptor or -1 if the fd had been discarded
* @param {cleanupCallbackSync} fn the cleanup callback function
*/
/**
* @callback dirCallback
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {cleanupCallback} fn the cleanup callback function
*/
/**
* @callback dirCallbackSync
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {cleanupCallbackSync} fn the cleanup callback function
*/
/**
* Removes the temporary created file or directory.
*
* @callback cleanupCallback
* @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed
*/
/**
* Removes the temporary created file or directory.
*
* @callback cleanupCallbackSync
*/
/**
* Callback function for function composition.
* @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
*
* @callback simpleCallback
*/
// exporting all the needed methods
// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will
// allow users to reconfigure the temporary directory
Object.defineProperty(module.exports, "tmpdir", ({
enumerable: true,
configurable: false,
get: function () {
return _getTmpDir();
}
}));
module.exports.dir = dir;
module.exports.dirSync = dirSync;
module.exports.file = file;
module.exports.fileSync = fileSync;
module.exports.tmpName = tmpName;
module.exports.tmpNameSync = tmpNameSync;
module.exports.setGracefulCleanup = setGracefulCleanup;
/***/ }),
/***/ 88395:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.copyElixirCodeToTempDir = void 0;
// This file was mostly copied from snyk-python-plugin and its purpose it to support the homebrew package
const fs = __webpack_require__(35747);
const path = __webpack_require__(85287);
const tmp = __webpack_require__(36276);
function copyElixirCodeToTempDir() {
const tmpDir = tmp.dirSync({ unsafeCleanup: true });
dumpAllFilesInTempDir(tmpDir.name);
return tmpDir;
}
exports.copyElixirCodeToTempDir = copyElixirCodeToTempDir;
function dumpAllFilesInTempDir(tempDirName) {
createAssets().forEach((currentReadFilePath) => {
if (!fs.existsSync(currentReadFilePath)) {
throw new Error('The file `' + currentReadFilePath + '` is missing');
}
const relFilePathToDumpDir = getFilePathRelativeToDumpDir(currentReadFilePath);
const writeFilePath = path.join(tempDirName, relFilePathToDumpDir);
const contents = fs.readFileSync(currentReadFilePath, 'utf8');
writeFile(writeFilePath, contents);
});
}
function createAssets() {
return [
path.join(__dirname, '../elixirsrc/mix.exs'),
path.join(__dirname, '../elixirsrc/mix.lock'),
path.join(__dirname, '../elixirsrc/lib/mix/tasks/read.mix.ex'),
path.join(__dirname, '../elixirsrc/lib/json/mix.exs'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/encoder.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/logger.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/decoder.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/encoder/default_implementations.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/encoder/errors.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/encoder/helpers.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/parser/number.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/parser/object.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/parser/unicode.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/parser/string.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/parser/array.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json/parser.ex'),
path.join(__dirname, '../elixirsrc/lib/json/lib/json.ex'),
path.join(__dirname, '../elixirsrc/lib/json/.formatter.exs'),
path.join(__dirname, '../elixirsrc/lib/common.ex'),
path.join(__dirname, '../elixirsrc/lib/mix_project.ex'),
];
}
function getFilePathRelativeToDumpDir(filePath) {
let pathParts = filePath.split('\\elixirsrc\\');
// Windows
if (pathParts.length > 1) {
return pathParts[1];
}
// Unix
pathParts = filePath.split('/elixirsrc/');
return pathParts[1];
}
function writeFile(writeFilePath, contents) {
const dirPath = path.dirname(writeFilePath);
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true });
}
fs.writeFileSync(writeFilePath, contents);
}
//# sourceMappingURL=copy-elixir-code-to-temp-dir.js.map
/***/ }),
/***/ 39410:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.init = exports.debug = void 0;
const debugLib = __webpack_require__(15158);
const PLUGIN_NAME = 'snyk-hex-plugin';
exports.debug = debugLib(PLUGIN_NAME);
function init(enable = false) {
enable ? debugLib.enable(PLUGIN_NAME) : debugLib.disable();
}
exports.init = init;
//# sourceMappingURL=debug.js.map
/***/ }),
/***/ 1649:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var scan_1 = __webpack_require__(3113);
Object.defineProperty(exports, "scan", ({ enumerable: true, get: function () { return scan_1.scan; } }));
var inspect_1 = __webpack_require__(12442);
Object.defineProperty(exports, "inspect", ({ enumerable: true, get: function () { return inspect_1.inspect; } }));
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 12442:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.inspect = void 0;
const subProcess = __webpack_require__(21977);
const scan_1 = __webpack_require__(3113);
const PLUGIN_NAME = 'snyk-hex-plugin';
async function inspect(root, targetFile, options = {}) {
const { debug, dev, allProjects, 'project-name': projectName } = options;
const [scanResult, pluginVersion] = await Promise.all([
scan_1.scan({ debug, dev, allProjects, projectName, path: root, targetFile }),
getPluginVersion(),
]);
const scannedProjects = scanResult.scanResults.map(({ identity, facts: [{ data: depGraph }], name }) => ({
packageManager: 'hex',
targetFile: identity.targetFile,
depGraph,
...(name ? { meta: { projectName: name } } : {}),
}));
return {
plugin: {
name: PLUGIN_NAME,
runtime: pluginVersion,
targetFile: 'mix.exs',
},
scannedProjects,
};
}
exports.inspect = inspect;
async function getPluginVersion() {
const output = await subProcess.execute('mix', ['-v']);
const versionMatch = /(Mix\s\d+\.\d+\.\d*)/.exec(output);
return versionMatch ? versionMatch[0] : 'Unknown version';
}
//# sourceMappingURL=inspect.js.map
/***/ }),
/***/ 3113:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.scan = void 0;
const path = __webpack_require__(85287);
const fs = __webpack_require__(35747);
const mix_parser_1 = __webpack_require__(72559);
const subProcess = __webpack_require__(21977);
const debug_1 = __webpack_require__(39410);
const copy_elixir_code_to_temp_dir_1 = __webpack_require__(88395);
const MANIFEST_FILE_NAME = 'mix.exs';
async function scan(options) {
debug_1.init(options.debug);
const targetFile = path.parse(path.resolve(options.path, options.targetFile || MANIFEST_FILE_NAME));
if (targetFile.base !== MANIFEST_FILE_NAME) {
throw new Error("target file must be 'mix.exs'.");
}
await verifyMixInstalled();
const mixResult = await getMixResult(targetFile.dir);
const depGraphMap = mix_parser_1.buildDepGraphs(mixResult, !!options.dev, true, options.allProjects);
const scanResults = Object.entries(depGraphMap).map(([name, depGraph], index) => {
const isRoot = index === 0;
const relativePathToManifest = getRelativePathToManifest(options, targetFile, isRoot, name);
return {
identity: {
type: 'hex',
targetFile: relativePathToManifest,
},
facts: [
{
type: 'depGraph',
data: depGraph,
},
],
...(options.projectName
? { name: getProjectNamePath(options, relativePathToManifest) }
: {}),
};
});
return { scanResults };
}
exports.scan = scan;
async function verifyMixInstalled() {
try {
const mixVersion = await subProcess.execute('mix', ['-v']);
debug_1.debug(`mix version: `, mixVersion);
}
catch (_a) {
throw new Error('mix is not installed. please make sure Elixir is installed and try again.');
}
}
async function getMixResult(root) {
const elixirTmpDir = copy_elixir_code_to_temp_dir_1.copyElixirCodeToTempDir();
const cwd = elixirTmpDir.name;
let filePath;
try {
const output = await subProcess.execute('mix', ['read.mix', root], { cwd });
debug_1.debug(`read.mix output: ${output}`);
const fileName = output.trim().split('\n').pop();
debug_1.debug(`fileName: ${fileName}`);
filePath = path.resolve(cwd, fileName);
const result = (await fs.promises.readFile(filePath, 'utf8'));
return JSON.parse(result);
}
catch (err) {
const errorMessage = `Error parsing manifest file on ${root}`;
debug_1.debug(errorMessage, err);
throw new Error(errorMessage);
}
finally {
try {
elixirTmpDir.removeCallback();
}
catch (err) {
debug_1.debug(`can't remove ${elixirTmpDir.name}`, err);
}
}
}
function normalizePath(filePath) {
const parts = filePath.split(path.sep);
return parts.join(path.posix.sep);
}
function getRelativePathToManifest(options, targetFile, isRoot, name) {
return normalizePath(path.relative(options.path, path.resolve(targetFile.dir, isRoot ? '' : name, targetFile.base)));
}
function getProjectNamePath(options, relativePathToManifest) {
return [
options.projectName,
...relativePathToManifest.split('/').slice(0, -1),
].join('/');
}
//# sourceMappingURL=scan.js.map
/***/ }),
/***/ 21977:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.execute = void 0;
const childProcess = __webpack_require__(63129);
const debug_1 = __webpack_require__(39410);
function execute(command, args, options) {
debug_1.debug(`running "${command} ${args.join(' ')}"`);
const spawnOptions = { shell: true };
if (options && options.cwd) {
spawnOptions.cwd = options.cwd;
}
return new Promise((resolve, reject) => {
let stdout = '';
let stderr = '';
const proc = childProcess.spawn(command, args, spawnOptions);
proc.stdout.on('data', (data) => {
stdout = stdout + data;
});
proc.stderr.on('data', (data) => {
stderr = stderr + data;
});
proc.on('close', (code) => {
if (code !== 0) {
debug_1.debug(`Error running "${command} ${args.join(' ')}", exit code: ${code}`);
return reject(stdout || stderr);
}
debug_1.debug(`Sub process stderr:`, stderr);
resolve(stdout || stderr);
});
});
}
exports.execute = execute;
//# sourceMappingURL=sub-process.js.map
/***/ }),
/***/ 60081:
/***/ ((module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const defer_to_connect_1 = __webpack_require__(1313);
const util_1 = __webpack_require__(31669);
const nodejsMajorVersion = Number(process.versions.node.split('.')[0]);
const timer = (request) => {
if (request.timings) {
return request.timings;
}
const timings = {
start: Date.now(),
socket: undefined,
lookup: undefined,
connect: undefined,
secureConnect: undefined,
upload: undefined,
response: undefined,
end: undefined,
error: undefined,
abort: undefined,
phases: {
wait: undefined,
dns: undefined,
tcp: undefined,
tls: undefined,
request: undefined,
firstByte: undefined,
download: undefined,
total: undefined
}
};
request.timings = timings;
const handleError = (origin) => {
const emit = origin.emit.bind(origin);
origin.emit = (event, ...args) => {
// Catches the `error` event
if (event === 'error') {
timings.error = Date.now();
timings.phases.total = timings.error - timings.start;
origin.emit = emit;
}
// Saves the original behavior
return emit(event, ...args);
};
};
handleError(request);
const onAbort = () => {
timings.abort = Date.now();
// Let the `end` response event be responsible for setting the total phase,
// unless the Node.js major version is >= 13.
if (!timings.response || nodejsMajorVersion >= 13) {
timings.phases.total = Date.now() - timings.start;
}
};
request.prependOnceListener('abort', onAbort);
const onSocket = (socket) => {
timings.socket = Date.now();
timings.phases.wait = timings.socket - timings.start;
if (util_1.types.isProxy(socket)) {
return;
}
const lookupListener = () => {
timings.lookup = Date.now();
timings.phases.dns = timings.lookup - timings.socket;
};
socket.prependOnceListener('lookup', lookupListener);
defer_to_connect_1.default(socket, {
connect: () => {
timings.connect = Date.now();
if (timings.lookup === undefined) {
socket.removeListener('lookup', lookupListener);
timings.lookup = timings.connect;
timings.phases.dns = timings.lookup - timings.socket;
}
timings.phases.tcp = timings.connect - timings.lookup;
// This callback is called before flushing any data,
// so we don't need to set `timings.phases.request` here.
},
secureConnect: () => {
timings.secureConnect = Date.now();
timings.phases.tls = timings.secureConnect - timings.connect;
}
});
};
if (request.socket) {
onSocket(request.socket);
}
else {
request.prependOnceListener('socket', onSocket);
}
const onUpload = () => {
var _a;
timings.upload = Date.now();
timings.phases.request = timings.upload - ((_a = timings.secureConnect) !== null && _a !== void 0 ? _a : timings.connect);
};
const writableFinished = () => {
if (typeof request.writableFinished === 'boolean') {
return request.writableFinished;
}
// Node.js doesn't have `request.writableFinished` property
return request.finished && request.outputSize === 0 && (!request.socket || request.socket.writableLength === 0);
};
if (writableFinished()) {
onUpload();
}
else {
request.prependOnceListener('finish', onUpload);
}
request.prependOnceListener('response', (response) => {
timings.response = Date.now();
timings.phases.firstByte = timings.response - timings.upload;
response.timings = timings;
handleError(response);
response.prependOnceListener('end', () => {
timings.end = Date.now();
timings.phases.download = timings.end - timings.response;
timings.phases.total = timings.end - timings.start;
});
response.prependOnceListener('aborted', onAbort);
});
return timings;
};
exports.default = timer;
// For CommonJS default export support
module.exports = timer;
module.exports.default = timer;
/***/ }),
/***/ 19552:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Cache = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const fslib_2 = __webpack_require__(91794);
const libzip_1 = __webpack_require__(40364);
const fs_1 = tslib_1.__importDefault(__webpack_require__(35747));
const MessageName_1 = __webpack_require__(62755);
const Report_1 = __webpack_require__(50334);
const hashUtils = tslib_1.__importStar(__webpack_require__(73279));
const miscUtils = tslib_1.__importStar(__webpack_require__(63111));
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
const CACHE_VERSION = 7;
class Cache {
constructor(cacheCwd, { configuration, immutable = configuration.get(`enableImmutableCache`), check = false }) {
// Contains the list of cache files that got accessed since the last time
// you cleared the variable. Useful to know which files aren't needed
// anymore when used in conjunction with fetchEverything.
this.markedFiles = new Set();
this.mutexes = new Map();
this.configuration = configuration;
this.cwd = cacheCwd;
this.immutable = immutable;
this.check = check;
const cacheKeyOverride = configuration.get(`cacheKeyOverride`);
if (cacheKeyOverride !== null) {
this.cacheKey = `${cacheKeyOverride}`;
}
else {
const compressionLevel = configuration.get(`compressionLevel`);
const compressionKey = compressionLevel !== fslib_2.DEFAULT_COMPRESSION_LEVEL
? `c${compressionLevel}` : ``;
this.cacheKey = [
CACHE_VERSION,
compressionKey,
].join(``);
}
}
static async find(configuration, { immutable, check } = {}) {
const cache = new Cache(configuration.get(`cacheFolder`), { configuration, immutable, check });
await cache.setup();
return cache;
}
get mirrorCwd() {
if (!this.configuration.get(`enableMirror`))
return null;
const mirrorCwd = `${this.configuration.get(`globalFolder`)}/cache`;
return mirrorCwd !== this.cwd ? mirrorCwd : null;
}
getVersionFilename(locator) {
return `${structUtils.slugifyLocator(locator)}-${this.cacheKey}.zip`;
}
getChecksumFilename(locator, checksum) {
// We only want the actual checksum (not the cache version, since the whole
// point is to avoid changing the filenames when the cache version changes)
const contentChecksum = getHashComponent(checksum);
// We only care about the first few characters. It doesn't matter if that
// makes the hash easier to collide with, because we check the file hashes
// during each install anyway.
const significantChecksum = contentChecksum.slice(0, 10);
return `${structUtils.slugifyLocator(locator)}-${significantChecksum}.zip`;
}
getLocatorPath(locator, expectedChecksum) {
// If there is no mirror, then the local cache *is* the mirror, in which
// case we use the versioned filename pattern.
if (this.mirrorCwd === null)
return fslib_2.ppath.resolve(this.cwd, this.getVersionFilename(locator));
// If we don't yet know the checksum, discard the path resolution for now
// until the checksum can be obtained from somewhere (mirror or network).
if (expectedChecksum === null)
return null;
// If the cache key changed then we assume that the content probably got
// altered as well and thus the existing path won't be good enough anymore.
const cacheKey = getCacheKeyComponent(expectedChecksum);
if (cacheKey !== this.cacheKey)
return null;
return fslib_2.ppath.resolve(this.cwd, this.getChecksumFilename(locator, expectedChecksum));
}
getLocatorMirrorPath(locator) {
const mirrorCwd = this.mirrorCwd;
return mirrorCwd !== null ? fslib_2.ppath.resolve(mirrorCwd, this.getVersionFilename(locator)) : null;
}
async setup() {
if (!this.configuration.get(`enableGlobalCache`)) {
await fslib_2.xfs.mkdirPromise(this.cwd, { recursive: true });
const gitignorePath = fslib_2.ppath.resolve(this.cwd, `.gitignore`);
await fslib_2.xfs.changeFilePromise(gitignorePath, `/.gitignore\n*.flock\n`);
}
}
async fetchPackageFromCache(locator, expectedChecksum, { onHit, onMiss, loader, skipIntegrityCheck }) {
const mirrorPath = this.getLocatorMirrorPath(locator);
const baseFs = new fslib_1.NodeFS();
const validateFile = async (path, refetchPath = null) => {
const actualChecksum = (!skipIntegrityCheck || !expectedChecksum) ? `${this.cacheKey}/${await hashUtils.checksumFile(path)}` : expectedChecksum;
if (refetchPath !== null) {
const previousChecksum = (!skipIntegrityCheck || !expectedChecksum) ? `${this.cacheKey}/${await hashUtils.checksumFile(refetchPath)}` : expectedChecksum;
if (actualChecksum !== previousChecksum) {
throw new Report_1.ReportError(MessageName_1.MessageName.CACHE_CHECKSUM_MISMATCH, `The remote archive doesn't match the local checksum - has the local cache been corrupted?`);
}
}
if (expectedChecksum !== null && actualChecksum !== expectedChecksum) {
let checksumBehavior;
// Using --check-cache overrides any preconfigured checksum behavior
if (this.check)
checksumBehavior = `throw`;
// If the lockfile references an old cache format, we tolerate different checksums
else if (getCacheKeyComponent(expectedChecksum) !== getCacheKeyComponent(actualChecksum))
checksumBehavior = `update`;
else
checksumBehavior = this.configuration.get(`checksumBehavior`);
switch (checksumBehavior) {
case `ignore`:
return expectedChecksum;
case `update`:
return actualChecksum;
default:
case `throw`: {
throw new Report_1.ReportError(MessageName_1.MessageName.CACHE_CHECKSUM_MISMATCH, `The remote archive doesn't match the expected checksum`);
}
}
}
return actualChecksum;
};
const validateFileAgainstRemote = async (cachePath) => {
if (!loader)
throw new Error(`Cache check required but no loader configured for ${structUtils.prettyLocator(this.configuration, locator)}`);
const zipFs = await loader();
const refetchPath = zipFs.getRealPath();
zipFs.saveAndClose();
await fslib_2.xfs.chmodPromise(refetchPath, 0o644);
return await validateFile(cachePath, refetchPath);
};
const loadPackageThroughMirror = async () => {
if (mirrorPath === null || !(await fslib_2.xfs.existsPromise(mirrorPath))) {
const zipFs = await loader();
const realPath = zipFs.getRealPath();
zipFs.saveAndClose();
return realPath;
}
const tempDir = await fslib_2.xfs.mktempPromise();
const tempPath = fslib_2.ppath.join(tempDir, this.getVersionFilename(locator));
await fslib_2.xfs.copyFilePromise(mirrorPath, tempPath, fs_1.default.constants.COPYFILE_FICLONE);
return tempPath;
};
const loadPackage = async () => {
if (!loader)
throw new Error(`Cache entry required but missing for ${structUtils.prettyLocator(this.configuration, locator)}`);
if (this.immutable)
throw new Report_1.ReportError(MessageName_1.MessageName.IMMUTABLE_CACHE, `Cache entry required but missing for ${structUtils.prettyLocator(this.configuration, locator)}`);
const originalPath = await loadPackageThroughMirror();
await fslib_2.xfs.chmodPromise(originalPath, 0o644);
// Do this before moving the file so that we don't pollute the cache with corrupted archives
const checksum = await validateFile(originalPath);
const cachePath = this.getLocatorPath(locator, checksum);
if (!cachePath)
throw new Error(`Assertion failed: Expected the cache path to be available`);
return await this.writeFileWithLock(cachePath, async () => {
return await this.writeFileWithLock(mirrorPath, async () => {
// Doing a move is important to ensure atomic writes (todo: cross-drive?)
await fslib_2.xfs.movePromise(originalPath, cachePath);
if (mirrorPath !== null)
await fslib_2.xfs.copyFilePromise(cachePath, mirrorPath, fs_1.default.constants.COPYFILE_FICLONE);
return [cachePath, checksum];
});
});
};
const loadPackageThroughMutex = async () => {
const mutexedLoad = async () => {
// We don't yet know whether the cache path can be computed yet, since that
// depends on whether the cache is actually the mirror or not, and whether
// the checksum is known or not.
const tentativeCachePath = this.getLocatorPath(locator, expectedChecksum);
const cacheExists = tentativeCachePath !== null
? await baseFs.existsPromise(tentativeCachePath)
: false;
const action = cacheExists
? onHit
: onMiss;
if (action)
action();
if (!cacheExists) {
return loadPackage();
}
else {
let checksum = null;
const cachePath = tentativeCachePath;
if (this.check)
checksum = await validateFileAgainstRemote(cachePath);
else
checksum = await validateFile(cachePath);
return [cachePath, checksum];
}
};
const mutex = mutexedLoad();
this.mutexes.set(locator.locatorHash, mutex);
try {
return await mutex;
}
finally {
this.mutexes.delete(locator.locatorHash);
}
};
for (let mutex; (mutex = this.mutexes.get(locator.locatorHash));)
await mutex;
const [cachePath, checksum] = await loadPackageThroughMutex();
this.markedFiles.add(cachePath);
let zipFs = null;
const libzip = await libzip_1.getLibzipPromise();
const lazyFs = new fslib_1.LazyFS(() => miscUtils.prettifySyncErrors(() => {
return zipFs = new fslib_1.ZipFS(cachePath, { baseFs, libzip, readOnly: true });
}, message => {
return `Failed to open the cache entry for ${structUtils.prettyLocator(this.configuration, locator)}: ${message}`;
}), fslib_2.ppath);
// We use an AliasFS to speed up getRealPath calls (e.g. VirtualFetcher.ensureVirtualLink)
// (there's no need to create the lazy baseFs instance to gather the already-known cachePath)
const aliasFs = new fslib_1.AliasFS(cachePath, { baseFs: lazyFs, pathUtils: fslib_2.ppath });
const releaseFs = () => {
if (zipFs !== null) {
zipFs.discardAndClose();
}
};
return [aliasFs, releaseFs, checksum];
}
async writeFileWithLock(file, generator) {
if (file === null)
return await generator();
await fslib_2.xfs.mkdirPromise(fslib_2.ppath.dirname(file), { recursive: true });
return await fslib_2.xfs.lockPromise(file, async () => {
return await generator();
});
}
}
exports.Cache = Cache;
function getCacheKeyComponent(checksum) {
const split = checksum.indexOf(`/`);
return split !== -1 ? checksum.slice(0, split) : null;
}
function getHashComponent(checksum) {
const split = checksum.indexOf(`/`);
return split !== -1 ? checksum.slice(split + 1) : checksum;
}
/***/ }),
/***/ 62889:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Configuration = exports.ProjectLookup = exports.coreDefinitions = exports.FormatType = exports.SettingsType = exports.SECRET = exports.DEFAULT_LOCK_FILENAME = exports.DEFAULT_RC_FILENAME = exports.ENVIRONMENT_PREFIX = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const fslib_2 = __webpack_require__(91794);
const parsers_1 = __webpack_require__(21717);
const camelcase_1 = tslib_1.__importDefault(__webpack_require__(35399));
const ci_info_1 = __webpack_require__(8288);
const clipanion_1 = __webpack_require__(87730);
const p_limit_1 = tslib_1.__importDefault(__webpack_require__(94498));
const semver_1 = tslib_1.__importDefault(__webpack_require__(74451));
const stream_1 = __webpack_require__(92413);
const CorePlugin_1 = __webpack_require__(78101);
const Manifest_1 = __webpack_require__(11658);
const MultiFetcher_1 = __webpack_require__(74441);
const MultiResolver_1 = __webpack_require__(54123);
const ProtocolResolver_1 = __webpack_require__(6729);
const VirtualFetcher_1 = __webpack_require__(7100);
const VirtualResolver_1 = __webpack_require__(55605);
const WorkspaceFetcher_1 = __webpack_require__(2925);
const WorkspaceResolver_1 = __webpack_require__(83740);
const folderUtils = tslib_1.__importStar(__webpack_require__(13061));
const formatUtils = tslib_1.__importStar(__webpack_require__(23821));
const miscUtils = tslib_1.__importStar(__webpack_require__(63111));
const nodeUtils = tslib_1.__importStar(__webpack_require__(83825));
const semverUtils = tslib_1.__importStar(__webpack_require__(51201));
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
const types_1 = __webpack_require__(79588);
const IGNORED_ENV_VARIABLES = new Set([
// "binFolder" is the magic location where the parent process stored the
// current binaries; not an actual configuration settings
`binFolder`,
// "version" is set by Docker:
// https://github.com/nodejs/docker-node/blob/5a6a5e91999358c5b04fddd6c22a9a4eb0bf3fbf/10/alpine/Dockerfile#L51
`version`,
// "flags" is set by Netlify; they use it to specify the flags to send to the
// CLI when running the automatic `yarn install`
`flags`,
// "gpg" and "profile" are used by the install.sh script:
// https://classic.yarnpkg.com/install.sh
`profile`,
`gpg`,
// "ignoreNode" is used to disable the Node version check
`ignoreNode`,
// "wrapOutput" was a variable used to indicate nested "yarn run" processes
// back in Yarn 1.
`wrapOutput`,
]);
exports.ENVIRONMENT_PREFIX = `yarn_`;
exports.DEFAULT_RC_FILENAME = `.yarnrc.yml`;
exports.DEFAULT_LOCK_FILENAME = `yarn.lock`;
exports.SECRET = `********`;
var SettingsType;
(function (SettingsType) {
SettingsType["ANY"] = "ANY";
SettingsType["BOOLEAN"] = "BOOLEAN";
SettingsType["ABSOLUTE_PATH"] = "ABSOLUTE_PATH";
SettingsType["LOCATOR"] = "LOCATOR";
SettingsType["LOCATOR_LOOSE"] = "LOCATOR_LOOSE";
SettingsType["NUMBER"] = "NUMBER";
SettingsType["STRING"] = "STRING";
SettingsType["SECRET"] = "SECRET";
SettingsType["SHAPE"] = "SHAPE";
SettingsType["MAP"] = "MAP";
})(SettingsType = exports.SettingsType || (exports.SettingsType = {}));
exports.FormatType = formatUtils.Type;
// General rules:
//
// - filenames that don't accept actual paths must end with the "Filename" suffix
// prefer to use absolute paths instead, since they are automatically resolved
// ex: lockfileFilename
//
// - folders must end with the "Folder" suffix
// ex: cacheFolder, pnpVirtualFolder
//
// - actual paths to a file must end with the "Path" suffix
// ex: pnpPath
//
// - options that tweaks the strictness must begin with the "allow" prefix
// ex: allowInvalidChecksums
//
// - options that enable a feature must begin with the "enable" prefix
// ex: enableEmojis, enableColors
exports.coreDefinitions = {
// Not implemented for now, but since it's part of all Yarn installs we want to declare it in order to improve drop-in compatibility
lastUpdateCheck: {
description: `Last timestamp we checked whether new Yarn versions were available`,
type: SettingsType.STRING,
default: null,
},
// Settings related to proxying all Yarn calls to a specific executable
yarnPath: {
description: `Path to the local executable that must be used over the global one`,
type: SettingsType.ABSOLUTE_PATH,
default: null,
},
ignorePath: {
description: `If true, the local executable will be ignored when using the global one`,
type: SettingsType.BOOLEAN,
default: false,
},
ignoreCwd: {
description: `If true, the \`--cwd\` flag will be ignored`,
type: SettingsType.BOOLEAN,
default: false,
},
// Settings related to the package manager internal names
cacheKeyOverride: {
description: `A global cache key override; used only for test purposes`,
type: SettingsType.STRING,
default: null,
},
globalFolder: {
description: `Folder where are stored the system-wide settings`,
type: SettingsType.ABSOLUTE_PATH,
default: folderUtils.getDefaultGlobalFolder(),
},
cacheFolder: {
description: `Folder where the cache files must be written`,
type: SettingsType.ABSOLUTE_PATH,
default: `./.yarn/cache`,
},
compressionLevel: {
description: `Zip files compression level, from 0 to 9 or mixed (a variant of 9, which stores some files uncompressed, when compression doesn't yield good results)`,
type: SettingsType.NUMBER,
values: [`mixed`, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
default: fslib_2.DEFAULT_COMPRESSION_LEVEL,
},
virtualFolder: {
description: `Folder where the virtual packages (cf doc) will be mapped on the disk (must be named $$virtual)`,
type: SettingsType.ABSOLUTE_PATH,
default: `./.yarn/$$virtual`,
},
bstatePath: {
description: `Path of the file where the current state of the built packages must be stored`,
type: SettingsType.ABSOLUTE_PATH,
default: `./.yarn/build-state.yml`,
},
lockfileFilename: {
description: `Name of the files where the Yarn dependency tree entries must be stored`,
type: SettingsType.STRING,
default: exports.DEFAULT_LOCK_FILENAME,
},
installStatePath: {
description: `Path of the file where the install state will be persisted`,
type: SettingsType.ABSOLUTE_PATH,
default: `./.yarn/install-state.gz`,
},
immutablePatterns: {
description: `Array of glob patterns; files matching them won't be allowed to change during immutable installs`,
type: SettingsType.STRING,
default: [],
isArray: true,
},
rcFilename: {
description: `Name of the files where the configuration can be found`,
type: SettingsType.STRING,
default: getRcFilename(),
},
enableGlobalCache: {
description: `If true, the system-wide cache folder will be used regardless of \`cache-folder\``,
type: SettingsType.BOOLEAN,
default: false,
},
enableAbsoluteVirtuals: {
description: `If true, the virtual symlinks will use absolute paths if required [non portable!!]`,
type: SettingsType.BOOLEAN,
default: false,
},
// Settings related to the output style
enableColors: {
description: `If true, the CLI is allowed to use colors in its output`,
type: SettingsType.BOOLEAN,
default: formatUtils.supportsColor,
defaultText: ``,
},
enableHyperlinks: {
description: `If true, the CLI is allowed to use hyperlinks in its output`,
type: SettingsType.BOOLEAN,
default: formatUtils.supportsHyperlinks,
defaultText: ``,
},
enableInlineBuilds: {
description: `If true, the CLI will print the build output on the command line`,
type: SettingsType.BOOLEAN,
default: ci_info_1.isCI,
defaultText: ``,
},
enableProgressBars: {
description: `If true, the CLI is allowed to show a progress bar for long-running events`,
type: SettingsType.BOOLEAN,
default: !ci_info_1.isCI && process.stdout.isTTY && process.stdout.columns > 22,
defaultText: ``,
},
enableTimers: {
description: `If true, the CLI is allowed to print the time spent executing commands`,
type: SettingsType.BOOLEAN,
default: true,
},
preferAggregateCacheInfo: {
description: `If true, the CLI will only print a one-line report of any cache changes`,
type: SettingsType.BOOLEAN,
default: ci_info_1.isCI,
},
preferInteractive: {
description: `If true, the CLI will automatically use the interactive mode when called from a TTY`,
type: SettingsType.BOOLEAN,
default: false,
},
preferTruncatedLines: {
description: `If true, the CLI will truncate lines that would go beyond the size of the terminal`,
type: SettingsType.BOOLEAN,
default: false,
},
progressBarStyle: {
description: `Which style of progress bar should be used (only when progress bars are enabled)`,
type: SettingsType.STRING,
default: undefined,
defaultText: ``,
},
// Settings related to how packages are interpreted by default
defaultLanguageName: {
description: `Default language mode that should be used when a package doesn't offer any insight`,
type: SettingsType.STRING,
default: `node`,
},
defaultProtocol: {
description: `Default resolution protocol used when resolving pure semver and tag ranges`,
type: SettingsType.STRING,
default: `npm:`,
},
enableTransparentWorkspaces: {
description: `If false, Yarn won't automatically resolve workspace dependencies unless they use the \`workspace:\` protocol`,
type: SettingsType.BOOLEAN,
default: true,
},
// Settings related to network access
enableMirror: {
description: `If true, the downloaded packages will be retrieved and stored in both the local and global folders`,
type: SettingsType.BOOLEAN,
default: true,
},
enableNetwork: {
description: `If false, the package manager will refuse to use the network if required to`,
type: SettingsType.BOOLEAN,
default: true,
},
httpProxy: {
description: `URL of the http proxy that must be used for outgoing http requests`,
type: SettingsType.STRING,
default: null,
},
httpsProxy: {
description: `URL of the http proxy that must be used for outgoing https requests`,
type: SettingsType.STRING,
default: null,
},
unsafeHttpWhitelist: {
description: `List of the hostnames for which http queries are allowed (glob patterns are supported)`,
type: SettingsType.STRING,
default: [],
isArray: true,
},
httpTimeout: {
description: `Timeout of each http request in milliseconds`,
type: SettingsType.NUMBER,
default: 60000,
},
httpRetry: {
description: `Retry times on http failure`,
type: SettingsType.NUMBER,
default: 3,
},
networkConcurrency: {
description: `Maximal number of concurrent requests`,
type: SettingsType.NUMBER,
default: Infinity,
},
networkSettings: {
description: `Network settings per hostname (glob patterns are supported)`,
type: SettingsType.MAP,
valueDefinition: {
description: ``,
type: SettingsType.SHAPE,
properties: {
caFilePath: {
description: `Path to file containing one or multiple Certificate Authority signing certificates`,
type: SettingsType.ABSOLUTE_PATH,
default: null,
},
enableNetwork: {
description: `If false, the package manager will refuse to use the network if required to`,
type: SettingsType.BOOLEAN,
default: null,
},
httpProxy: {
description: `URL of the http proxy that must be used for outgoing http requests`,
type: SettingsType.STRING,
default: null,
},
httpsProxy: {
description: `URL of the http proxy that must be used for outgoing https requests`,
type: SettingsType.STRING,
default: null,
},
},
},
},
caFilePath: {
description: `A path to a file containing one or multiple Certificate Authority signing certificates`,
type: SettingsType.ABSOLUTE_PATH,
default: null,
},
enableStrictSsl: {
description: `If false, SSL certificate errors will be ignored`,
type: SettingsType.BOOLEAN,
default: true,
},
logFilters: {
description: `Overrides for log levels`,
type: SettingsType.SHAPE,
isArray: true,
concatenateValues: true,
properties: {
code: {
description: `Code of the messages covered by this override`,
type: SettingsType.STRING,
default: undefined,
},
text: {
description: `Code of the texts covered by this override`,
type: SettingsType.STRING,
default: undefined,
},
level: {
description: `Log level override, set to null to remove override`,
type: SettingsType.STRING,
values: Object.values(formatUtils.LogLevel),
isNullable: true,
default: undefined,
},
},
},
// Settings related to telemetry
enableTelemetry: {
description: `If true, telemetry will be periodically sent, following the rules in https://yarnpkg.com/advanced/telemetry`,
type: SettingsType.BOOLEAN,
default: true,
},
telemetryInterval: {
description: `Minimal amount of time between two telemetry uploads, in days`,
type: SettingsType.NUMBER,
default: 7,
},
telemetryUserId: {
description: `If you desire to tell us which project you are, you can set this field. Completely optional and opt-in.`,
type: SettingsType.STRING,
default: null,
},
// Settings related to security
enableScripts: {
description: `If true, packages are allowed to have install scripts by default`,
type: SettingsType.BOOLEAN,
default: true,
},
enableImmutableCache: {
description: `If true, the cache is reputed immutable and actions that would modify it will throw`,
type: SettingsType.BOOLEAN,
default: false,
},
checksumBehavior: {
description: `Enumeration defining what to do when a checksum doesn't match expectations`,
type: SettingsType.STRING,
default: `throw`,
},
// Package patching - to fix incorrect definitions
packageExtensions: {
description: `Map of package corrections to apply on the dependency tree`,
type: SettingsType.MAP,
valueDefinition: {
description: `The extension that will be applied to any package whose version matches the specified range`,
type: SettingsType.SHAPE,
properties: {
dependencies: {
description: `The set of dependencies that must be made available to the current package in order for it to work properly`,
type: SettingsType.MAP,
valueDefinition: {
description: `A range`,
type: SettingsType.STRING,
},
},
peerDependencies: {
description: `Inherited dependencies - the consumer of the package will be tasked to provide them`,
type: SettingsType.MAP,
valueDefinition: {
description: `A semver range`,
type: SettingsType.STRING,
},
},
peerDependenciesMeta: {
description: `Extra information related to the dependencies listed in the peerDependencies field`,
type: SettingsType.MAP,
valueDefinition: {
description: `The peerDependency meta`,
type: SettingsType.SHAPE,
properties: {
optional: {
description: `If true, the selected peer dependency will be marked as optional by the package manager and the consumer omitting it won't be reported as an error`,
type: SettingsType.BOOLEAN,
default: false,
},
},
},
},
},
},
},
};
function parseValue(configuration, path, value, definition, folder) {
if (definition.isArray) {
if (!Array.isArray(value)) {
return String(value).split(/,/).map(segment => {
return parseSingleValue(configuration, path, segment, definition, folder);
});
}
else {
return value.map((sub, i) => parseSingleValue(configuration, `${path}[${i}]`, sub, definition, folder));
}
}
else {
if (Array.isArray(value)) {
throw new Error(`Non-array configuration settings "${path}" cannot be an array`);
}
else {
return parseSingleValue(configuration, path, value, definition, folder);
}
}
}
function parseSingleValue(configuration, path, value, definition, folder) {
var _a;
switch (definition.type) {
case SettingsType.ANY:
return value;
case SettingsType.SHAPE:
return parseShape(configuration, path, value, definition, folder);
case SettingsType.MAP:
return parseMap(configuration, path, value, definition, folder);
}
if (value === null && !definition.isNullable && definition.default !== null)
throw new Error(`Non-nullable configuration settings "${path}" cannot be set to null`);
if ((_a = definition.values) === null || _a === void 0 ? void 0 : _a.includes(value))
return value;
const interpretValue = () => {
if (definition.type === SettingsType.BOOLEAN)
return miscUtils.parseBoolean(value);
if (typeof value !== `string`)
throw new Error(`Expected value (${value}) to be a string`);
const valueWithReplacedVariables = miscUtils.replaceEnvVariables(value, {
env: process.env,
});
switch (definition.type) {
case SettingsType.ABSOLUTE_PATH:
return fslib_1.ppath.resolve(folder, fslib_1.npath.toPortablePath(valueWithReplacedVariables));
case SettingsType.LOCATOR_LOOSE:
return structUtils.parseLocator(valueWithReplacedVariables, false);
case SettingsType.NUMBER:
return parseInt(valueWithReplacedVariables);
case SettingsType.LOCATOR:
return structUtils.parseLocator(valueWithReplacedVariables);
default:
return valueWithReplacedVariables;
}
};
const interpreted = interpretValue();
if (definition.values && !definition.values.includes(interpreted))
throw new Error(`Invalid value, expected one of ${definition.values.join(`, `)}`);
return interpreted;
}
function parseShape(configuration, path, value, definition, folder) {
if (typeof value !== `object` || Array.isArray(value))
throw new clipanion_1.UsageError(`Object configuration settings "${path}" must be an object`);
const result = getDefaultValue(configuration, definition, {
ignoreArrays: true,
});
if (value === null)
return result;
for (const [propKey, propValue] of Object.entries(value)) {
const subPath = `${path}.${propKey}`;
const subDefinition = definition.properties[propKey];
if (!subDefinition)
throw new clipanion_1.UsageError(`Unrecognized configuration settings found: ${path}.${propKey} - run "yarn config -v" to see the list of settings supported in Yarn`);
result.set(propKey, parseValue(configuration, subPath, propValue, definition.properties[propKey], folder));
}
return result;
}
function parseMap(configuration, path, value, definition, folder) {
const result = new Map();
if (typeof value !== `object` || Array.isArray(value))
throw new clipanion_1.UsageError(`Map configuration settings "${path}" must be an object`);
if (value === null)
return result;
for (const [propKey, propValue] of Object.entries(value)) {
const normalizedKey = definition.normalizeKeys ? definition.normalizeKeys(propKey) : propKey;
const subPath = `${path}['${normalizedKey}']`;
// @ts-expect-error: SettingsDefinitionNoDefault has ... no default ... but
// that's fine because we're guaranteed it's not undefined.
const valueDefinition = definition.valueDefinition;
result.set(normalizedKey, parseValue(configuration, subPath, propValue, valueDefinition, folder));
}
return result;
}
function getDefaultValue(configuration, definition, { ignoreArrays = false } = {}) {
switch (definition.type) {
case SettingsType.SHAPE:
{
if (definition.isArray && !ignoreArrays)
return [];
const result = new Map();
for (const [propKey, propDefinition] of Object.entries(definition.properties))
result.set(propKey, getDefaultValue(configuration, propDefinition));
return result;
}
break;
case SettingsType.MAP:
{
if (definition.isArray && !ignoreArrays)
return [];
return new Map();
}
break;
case SettingsType.ABSOLUTE_PATH:
{
if (definition.default === null)
return null;
if (configuration.projectCwd === null) {
if (fslib_1.ppath.isAbsolute(definition.default)) {
return fslib_1.ppath.normalize(definition.default);
}
else if (definition.isNullable) {
return null;
}
else {
// Reached when a relative path is the default but the current
// context is evaluated outside of a Yarn project
return undefined;
}
}
else {
if (Array.isArray(definition.default)) {
return definition.default.map((entry) => fslib_1.ppath.resolve(configuration.projectCwd, entry));
}
else {
return fslib_1.ppath.resolve(configuration.projectCwd, definition.default);
}
}
}
break;
default:
{
return definition.default;
}
break;
}
}
function transformConfiguration(rawValue, definition, transforms) {
if (definition.type === SettingsType.SECRET && typeof rawValue === `string` && transforms.hideSecrets)
return exports.SECRET;
if (definition.type === SettingsType.ABSOLUTE_PATH && typeof rawValue === `string` && transforms.getNativePaths)
return fslib_1.npath.fromPortablePath(rawValue);
if (definition.isArray && Array.isArray(rawValue)) {
const newValue = [];
for (const value of rawValue)
newValue.push(transformConfiguration(value, definition, transforms));
return newValue;
}
if (definition.type === SettingsType.MAP && rawValue instanceof Map) {
const newValue = new Map();
for (const [key, value] of rawValue.entries())
newValue.set(key, transformConfiguration(value, definition.valueDefinition, transforms));
return newValue;
}
if (definition.type === SettingsType.SHAPE && rawValue instanceof Map) {
const newValue = new Map();
for (const [key, value] of rawValue.entries()) {
const propertyDefinition = definition.properties[key];
newValue.set(key, transformConfiguration(value, propertyDefinition, transforms));
}
return newValue;
}
return rawValue;
}
function getEnvironmentSettings() {
const environmentSettings = {};
for (let [key, value] of Object.entries(process.env)) {
key = key.toLowerCase();
if (!key.startsWith(exports.ENVIRONMENT_PREFIX))
continue;
key = camelcase_1.default(key.slice(exports.ENVIRONMENT_PREFIX.length));
environmentSettings[key] = value;
}
return environmentSettings;
}
function getRcFilename() {
const rcKey = `${exports.ENVIRONMENT_PREFIX}rc_filename`;
for (const [key, value] of Object.entries(process.env))
if (key.toLowerCase() === rcKey && typeof value === `string`)
return value;
return exports.DEFAULT_RC_FILENAME;
}
var ProjectLookup;
(function (ProjectLookup) {
ProjectLookup[ProjectLookup["LOCKFILE"] = 0] = "LOCKFILE";
ProjectLookup[ProjectLookup["MANIFEST"] = 1] = "MANIFEST";
ProjectLookup[ProjectLookup["NONE"] = 2] = "NONE";
})(ProjectLookup = exports.ProjectLookup || (exports.ProjectLookup = {}));
class Configuration {
constructor(startingCwd) {
this.projectCwd = null;
this.plugins = new Map();
this.settings = new Map();
this.values = new Map();
this.sources = new Map();
this.invalid = new Map();
this.packageExtensions = new Map();
this.limits = new Map();
this.startingCwd = startingCwd;
}
static create(startingCwd, projectCwdOrPlugins, maybePlugins) {
const configuration = new Configuration(startingCwd);
if (typeof projectCwdOrPlugins !== `undefined` && !(projectCwdOrPlugins instanceof Map))
configuration.projectCwd = projectCwdOrPlugins;
configuration.importSettings(exports.coreDefinitions);
const plugins = typeof maybePlugins !== `undefined`
? maybePlugins
: projectCwdOrPlugins instanceof Map
? projectCwdOrPlugins
: new Map();
for (const [name, plugin] of plugins)
configuration.activatePlugin(name, plugin);
return configuration;
}
/**
* Instantiate a new configuration object exposing the configuration obtained
* from reading the various rc files and the environment settings.
*
* The `pluginConfiguration` parameter is expected to indicate:
*
* 1. which modules should be made available to plugins when they require a
* package (this is the dynamic linking part - for example we want all the
* plugins to use the exact same version of @yarnpkg/core, which also is the
* version used by the running Yarn instance).
*
* 2. which of those modules are actually plugins that need to be injected
* within the configuration.
*
* Note that some extra plugins will be automatically added based on the
* content of the rc files - with the rc plugins taking precedence over
* the other ones.
*
* One particularity: the plugin initialization order is quite strict, with
* plugins listed in /foo/bar/.yarnrc.yml taking precedence over plugins
* listed in /foo/.yarnrc.yml and /.yarnrc.yml. Additionally, while plugins
* can depend on one another, they can only depend on plugins that have been
* instantiated before them (so a plugin listed in /foo/.yarnrc.yml can
* depend on another one listed on /foo/bar/.yarnrc.yml, but not the other
* way around).
*/
static async find(startingCwd, pluginConfiguration, { lookup = ProjectLookup.LOCKFILE, strict = true, usePath = false, useRc = true } = {}) {
const environmentSettings = getEnvironmentSettings();
delete environmentSettings.rcFilename;
const rcFiles = await Configuration.findRcFiles(startingCwd);
const homeRcFile = await Configuration.findHomeRcFile();
const pickCoreFields = ({ ignoreCwd, yarnPath, ignorePath, lockfileFilename }) => ({ ignoreCwd, yarnPath, ignorePath, lockfileFilename });
const excludeCoreFields = ({ ignoreCwd, yarnPath, ignorePath, lockfileFilename, ...rest }) => rest;
const configuration = new Configuration(startingCwd);
configuration.importSettings(pickCoreFields(exports.coreDefinitions));
configuration.useWithSource(``, pickCoreFields(environmentSettings), startingCwd, { strict: false });
for (const { path, cwd, data } of rcFiles)
configuration.useWithSource(path, pickCoreFields(data), cwd, { strict: false });
if (homeRcFile)
configuration.useWithSource(homeRcFile.path, pickCoreFields(homeRcFile.data), homeRcFile.cwd, { strict: false });
if (usePath) {
const yarnPath = configuration.get(`yarnPath`);
const ignorePath = configuration.get(`ignorePath`);
if (yarnPath !== null && !ignorePath) {
return configuration;
}
}
// We need to know the project root before being able to truly instantiate
// our configuration, and to know that we need to know the lockfile name
const lockfileFilename = configuration.get(`lockfileFilename`);
let projectCwd;
switch (lookup) {
case ProjectLookup.LOCKFILE:
{
projectCwd = await Configuration.findProjectCwd(startingCwd, lockfileFilename);
}
break;
case ProjectLookup.MANIFEST:
{
projectCwd = await Configuration.findProjectCwd(startingCwd, null);
}
break;
case ProjectLookup.NONE:
{
if (fslib_1.xfs.existsSync(fslib_1.ppath.join(startingCwd, `package.json`))) {
projectCwd = fslib_1.ppath.resolve(startingCwd);
}
else {
projectCwd = null;
}
}
break;
}
// Great! We now have enough information to really start to setup the
// core configuration object.
configuration.startingCwd = startingCwd;
configuration.projectCwd = projectCwd;
configuration.importSettings(excludeCoreFields(exports.coreDefinitions));
// Now that the configuration object is almost ready, we need to load all
// the configured plugins
const plugins = new Map([
[`@@core`, CorePlugin_1.CorePlugin],
]);
const interop = (obj) => obj.__esModule
? obj.default
: obj;
if (pluginConfiguration !== null) {
for (const request of pluginConfiguration.plugins.keys())
plugins.set(request, interop(pluginConfiguration.modules.get(request)));
const requireEntries = new Map();
for (const request of nodeUtils.builtinModules())
requireEntries.set(request, () => nodeUtils.dynamicRequire(request));
for (const [request, embedModule] of pluginConfiguration.modules)
requireEntries.set(request, () => embedModule);
const dynamicPlugins = new Set();
const getDefault = (object) => {
return object.default || object;
};
const importPlugin = (pluginPath, source) => {
const { factory, name } = nodeUtils.dynamicRequire(fslib_1.npath.fromPortablePath(pluginPath));
// Prevent plugin redefinition so that the ones declared deeper in the
// filesystem always have precedence over the ones below.
if (dynamicPlugins.has(name))
return;
const pluginRequireEntries = new Map(requireEntries);
const pluginRequire = (request) => {
if (pluginRequireEntries.has(request)) {
return pluginRequireEntries.get(request)();
}
else {
throw new clipanion_1.UsageError(`This plugin cannot access the package referenced via ${request} which is neither a builtin, nor an exposed entry`);
}
};
const plugin = miscUtils.prettifySyncErrors(() => {
return getDefault(factory(pluginRequire));
}, message => {
return `${message} (when initializing ${name}, defined in ${source})`;
});
requireEntries.set(name, () => plugin);
dynamicPlugins.add(name);
plugins.set(name, plugin);
};
if (environmentSettings.plugins) {
for (const userProvidedPath of environmentSettings.plugins.split(`;`)) {
const pluginPath = fslib_1.ppath.resolve(startingCwd, fslib_1.npath.toPortablePath(userProvidedPath));
importPlugin(pluginPath, ``);
}
}
for (const { path, cwd, data } of rcFiles) {
if (!useRc)
continue;
if (!Array.isArray(data.plugins))
continue;
for (const userPluginEntry of data.plugins) {
const userProvidedPath = typeof userPluginEntry !== `string`
? userPluginEntry.path
: userPluginEntry;
const pluginPath = fslib_1.ppath.resolve(cwd, fslib_1.npath.toPortablePath(userProvidedPath));
importPlugin(pluginPath, path);
}
}
}
for (const [name, plugin] of plugins)
configuration.activatePlugin(name, plugin);
configuration.useWithSource(``, excludeCoreFields(environmentSettings), startingCwd, { strict });
for (const { path, cwd, data } of rcFiles)
configuration.useWithSource(path, excludeCoreFields(data), cwd, { strict });
// The home configuration is never strict because it improves support for
// multiple projects using different Yarn versions on the same machine
if (homeRcFile)
configuration.useWithSource(homeRcFile.path, excludeCoreFields(homeRcFile.data), homeRcFile.cwd, { strict: false });
if (configuration.get(`enableGlobalCache`)) {
configuration.values.set(`cacheFolder`, `${configuration.get(`globalFolder`)}/cache`);
configuration.sources.set(`cacheFolder`, ``);
}
await configuration.refreshPackageExtensions();
return configuration;
}
static async findRcFiles(startingCwd) {
const rcFilename = getRcFilename();
const rcFiles = [];
let nextCwd = startingCwd;
let currentCwd = null;
while (nextCwd !== currentCwd) {
currentCwd = nextCwd;
const rcPath = fslib_1.ppath.join(currentCwd, rcFilename);
if (fslib_1.xfs.existsSync(rcPath)) {
const content = await fslib_1.xfs.readFilePromise(rcPath, `utf8`);
let data;
try {
data = parsers_1.parseSyml(content);
}
catch (error) {
let tip = ``;
if (content.match(/^\s+(?!-)[^:]+\s+\S+/m))
tip = ` (in particular, make sure you list the colons after each key name)`;
throw new clipanion_1.UsageError(`Parse error when loading ${rcPath}; please check it's proper Yaml${tip}`);
}
rcFiles.push({ path: rcPath, cwd: currentCwd, data });
}
nextCwd = fslib_1.ppath.dirname(currentCwd);
}
return rcFiles;
}
static async findHomeRcFile() {
const rcFilename = getRcFilename();
const homeFolder = folderUtils.getHomeFolder();
const homeRcFilePath = fslib_1.ppath.join(homeFolder, rcFilename);
if (fslib_1.xfs.existsSync(homeRcFilePath)) {
const content = await fslib_1.xfs.readFilePromise(homeRcFilePath, `utf8`);
const data = parsers_1.parseSyml(content);
return { path: homeRcFilePath, cwd: homeFolder, data };
}
return null;
}
static async findProjectCwd(startingCwd, lockfileFilename) {
let projectCwd = null;
let nextCwd = startingCwd;
let currentCwd = null;
while (nextCwd !== currentCwd) {
currentCwd = nextCwd;
if (fslib_1.xfs.existsSync(fslib_1.ppath.join(currentCwd, `package.json`)))
projectCwd = currentCwd;
if (lockfileFilename !== null) {
if (fslib_1.xfs.existsSync(fslib_1.ppath.join(currentCwd, lockfileFilename))) {
projectCwd = currentCwd;
break;
}
}
else {
if (projectCwd !== null) {
break;
}
}
nextCwd = fslib_1.ppath.dirname(currentCwd);
}
return projectCwd;
}
static async updateConfiguration(cwd, patch) {
const rcFilename = getRcFilename();
const configurationPath = fslib_1.ppath.join(cwd, rcFilename);
const current = fslib_1.xfs.existsSync(configurationPath)
? parsers_1.parseSyml(await fslib_1.xfs.readFilePromise(configurationPath, `utf8`))
: {};
let patched = false;
let replacement;
if (typeof patch === `function`) {
try {
replacement = patch(current);
}
catch (_a) {
replacement = patch({});
}
if (replacement === current) {
return;
}
}
else {
replacement = current;
for (const key of Object.keys(patch)) {
const currentValue = current[key];
const patchField = patch[key];
let nextValue;
if (typeof patchField === `function`) {
try {
nextValue = patchField(currentValue);
}
catch (_b) {
nextValue = patchField(undefined);
}
}
else {
nextValue = patchField;
}
if (currentValue === nextValue)
continue;
replacement[key] = nextValue;
patched = true;
}
if (!patched) {
return;
}
}
await fslib_1.xfs.changeFilePromise(configurationPath, parsers_1.stringifySyml(replacement), {
automaticNewlines: true,
});
}
static async updateHomeConfiguration(patch) {
const homeFolder = folderUtils.getHomeFolder();
return await Configuration.updateConfiguration(homeFolder, patch);
}
activatePlugin(name, plugin) {
this.plugins.set(name, plugin);
if (typeof plugin.configuration !== `undefined`) {
this.importSettings(plugin.configuration);
}
}
importSettings(definitions) {
for (const [name, definition] of Object.entries(definitions)) {
if (definition == null)
continue;
if (this.settings.has(name))
throw new Error(`Cannot redefine settings "${name}"`);
this.settings.set(name, definition);
this.values.set(name, getDefaultValue(this, definition));
}
}
useWithSource(source, data, folder, opts) {
try {
this.use(source, data, folder, opts);
}
catch (error) {
error.message += ` (in ${formatUtils.pretty(this, source, formatUtils.Type.PATH)})`;
throw error;
}
}
use(source, data, folder, { strict = true, overwrite = false } = {}) {
for (const key of Object.keys(data)) {
const value = data[key];
if (typeof value === `undefined`)
continue;
// The plugins have already been loaded at this point
if (key === `plugins`)
continue;
// Some environment variables should be ignored when applying the configuration
if (source === `` && IGNORED_ENV_VARIABLES.has(key))
continue;
// It wouldn't make much sense, would it?
if (key === `rcFilename`)
throw new clipanion_1.UsageError(`The rcFilename settings can only be set via ${`${exports.ENVIRONMENT_PREFIX}RC_FILENAME`.toUpperCase()}, not via a rc file`);
const definition = this.settings.get(key);
if (!definition) {
if (strict) {
throw new clipanion_1.UsageError(`Unrecognized or legacy configuration settings found: ${key} - run "yarn config -v" to see the list of settings supported in Yarn`);
}
else {
this.invalid.set(key, source);
continue;
}
}
if (this.sources.has(key) && !(overwrite || definition.type === SettingsType.MAP || definition.isArray && definition.concatenateValues))
continue;
let parsed;
try {
parsed = parseValue(this, key, data[key], definition, folder);
}
catch (error) {
error.message += ` in ${formatUtils.pretty(this, source, formatUtils.Type.PATH)}`;
throw error;
}
if (definition.type === SettingsType.MAP) {
const previousValue = this.values.get(key);
this.values.set(key, new Map(overwrite
? [...previousValue, ...parsed]
: [...parsed, ...previousValue]));
this.sources.set(key, `${this.sources.get(key)}, ${source}`);
}
else if (definition.isArray && definition.concatenateValues) {
const previousValue = this.values.get(key);
this.values.set(key, overwrite
? [...previousValue, ...parsed]
: [...parsed, ...previousValue]);
this.sources.set(key, `${this.sources.get(key)}, ${source}`);
}
else {
this.values.set(key, parsed);
this.sources.set(key, source);
}
}
}
get(key) {
if (!this.values.has(key))
throw new Error(`Invalid configuration key "${key}"`);
return this.values.get(key);
}
getSpecial(key, { hideSecrets = false, getNativePaths = false }) {
const rawValue = this.get(key);
const definition = this.settings.get(key);
if (typeof definition === `undefined`)
throw new clipanion_1.UsageError(`Couldn't find a configuration settings named "${key}"`);
return transformConfiguration(rawValue, definition, {
hideSecrets,
getNativePaths,
});
}
getSubprocessStreams(logFile, { header, prefix, report }) {
let stdout;
let stderr;
const logStream = fslib_1.xfs.createWriteStream(logFile);
if (this.get(`enableInlineBuilds`)) {
const stdoutLineReporter = report.createStreamReporter(`${prefix} ${formatUtils.pretty(this, `STDOUT`, `green`)}`);
const stderrLineReporter = report.createStreamReporter(`${prefix} ${formatUtils.pretty(this, `STDERR`, `red`)}`);
stdout = new stream_1.PassThrough();
stdout.pipe(stdoutLineReporter);
stdout.pipe(logStream);
stderr = new stream_1.PassThrough();
stderr.pipe(stderrLineReporter);
stderr.pipe(logStream);
}
else {
stdout = logStream;
stderr = logStream;
if (typeof header !== `undefined`) {
stdout.write(`${header}\n`);
}
}
return { stdout, stderr };
}
makeResolver() {
const pluginResolvers = [];
for (const plugin of this.plugins.values())
for (const resolver of plugin.resolvers || [])
pluginResolvers.push(new resolver());
return new MultiResolver_1.MultiResolver([
new VirtualResolver_1.VirtualResolver(),
new WorkspaceResolver_1.WorkspaceResolver(),
new ProtocolResolver_1.ProtocolResolver(),
...pluginResolvers,
]);
}
makeFetcher() {
const pluginFetchers = [];
for (const plugin of this.plugins.values())
for (const fetcher of plugin.fetchers || [])
pluginFetchers.push(new fetcher());
return new MultiFetcher_1.MultiFetcher([
new VirtualFetcher_1.VirtualFetcher(),
new WorkspaceFetcher_1.WorkspaceFetcher(),
...pluginFetchers,
]);
}
getLinkers() {
const linkers = [];
for (const plugin of this.plugins.values())
for (const linker of plugin.linkers || [])
linkers.push(new linker());
return linkers;
}
async refreshPackageExtensions() {
this.packageExtensions = new Map();
const packageExtensions = this.packageExtensions;
const registerPackageExtension = (descriptor, extensionData, { userProvided = false } = {}) => {
if (!semver_1.default.validRange(descriptor.range))
throw new Error(`Only semver ranges are allowed as keys for the lockfileExtensions setting`);
const extension = new Manifest_1.Manifest();
extension.load(extensionData, { yamlCompatibilityMode: true });
const extensionsPerIdent = miscUtils.getArrayWithDefault(packageExtensions, descriptor.identHash);
const extensionsPerRange = [];
extensionsPerIdent.push([descriptor.range, extensionsPerRange]);
const baseExtension = {
status: types_1.PackageExtensionStatus.Inactive,
userProvided,
parentDescriptor: descriptor,
};
for (const dependency of extension.dependencies.values())
extensionsPerRange.push({ ...baseExtension, type: types_1.PackageExtensionType.Dependency, descriptor: dependency, description: `${structUtils.stringifyIdent(descriptor)} > ${structUtils.stringifyIdent(dependency)}` });
for (const peerDependency of extension.peerDependencies.values())
extensionsPerRange.push({ ...baseExtension, type: types_1.PackageExtensionType.PeerDependency, descriptor: peerDependency, description: `${structUtils.stringifyIdent(descriptor)} >> ${structUtils.stringifyIdent(peerDependency)}` });
for (const [selector, meta] of extension.peerDependenciesMeta) {
for (const [key, value] of Object.entries(meta)) {
extensionsPerRange.push({ ...baseExtension, type: types_1.PackageExtensionType.PeerDependencyMeta, selector, key: key, value, description: `${structUtils.stringifyIdent(descriptor)} >> ${selector} / ${key}` });
}
}
};
await this.triggerHook(hooks => {
return hooks.registerPackageExtensions;
}, this, registerPackageExtension);
for (const [descriptorString, extensionData] of this.get(`packageExtensions`)) {
registerPackageExtension(structUtils.parseDescriptor(descriptorString, true), miscUtils.convertMapsToIndexableObjects(extensionData), { userProvided: true });
}
}
normalizePackage(original) {
const pkg = structUtils.copyPackage(original);
// We use the extensions to define additional dependencies that weren't
// properly listed in the original package definition
if (this.packageExtensions == null)
throw new Error(`refreshPackageExtensions has to be called before normalizing packages`);
const extensionsPerIdent = this.packageExtensions.get(original.identHash);
if (typeof extensionsPerIdent !== `undefined`) {
const version = original.version;
if (version !== null) {
for (const [range, extensionsPerRange] of extensionsPerIdent) {
if (!semverUtils.satisfiesWithPrereleases(version, range))
continue;
for (const extension of extensionsPerRange) {
// If an extension is active for a package but redundant
// for another one, it should be considered active
if (extension.status === types_1.PackageExtensionStatus.Inactive)
extension.status = types_1.PackageExtensionStatus.Redundant;
switch (extension.type) {
case types_1.PackageExtensionType.Dependency:
{
const currentDependency = pkg.dependencies.get(extension.descriptor.identHash);
if (typeof currentDependency === `undefined`) {
extension.status = types_1.PackageExtensionStatus.Active;
pkg.dependencies.set(extension.descriptor.identHash, extension.descriptor);
}
}
break;
case types_1.PackageExtensionType.PeerDependency:
{
const currentPeerDependency = pkg.peerDependencies.get(extension.descriptor.identHash);
if (typeof currentPeerDependency === `undefined`) {
extension.status = types_1.PackageExtensionStatus.Active;
pkg.peerDependencies.set(extension.descriptor.identHash, extension.descriptor);
}
}
break;
case types_1.PackageExtensionType.PeerDependencyMeta:
{
const currentPeerDependencyMeta = pkg.peerDependenciesMeta.get(extension.selector);
if (typeof currentPeerDependencyMeta === `undefined` || !Object.prototype.hasOwnProperty.call(currentPeerDependencyMeta, extension.key) || currentPeerDependencyMeta[extension.key] !== extension.value) {
extension.status = types_1.PackageExtensionStatus.Active;
miscUtils.getFactoryWithDefault(pkg.peerDependenciesMeta, extension.selector, () => ({}))[extension.key] = extension.value;
}
}
break;
default:
{
miscUtils.assertNever(extension);
}
break;
}
}
}
}
}
// We also add implicit optional @types peer dependencies for each peer
// dependency. This is for compatibility reason, as many existing packages
// forget to define their @types/react optional peer dependency when they
// peer-depend on react.
const getTypesName = (descriptor) => {
return descriptor.scope
? `${descriptor.scope}__${descriptor.name}`
: `${descriptor.name}`;
};
for (const descriptor of pkg.peerDependencies.values()) {
if (descriptor.scope === `@types`)
continue;
const typesName = getTypesName(descriptor);
const typesIdent = structUtils.makeIdent(`types`, typesName);
if (pkg.peerDependencies.has(typesIdent.identHash) || pkg.peerDependenciesMeta.has(typesIdent.identHash))
continue;
pkg.peerDependenciesMeta.set(structUtils.stringifyIdent(typesIdent), {
optional: true,
});
}
// I don't like implicit dependencies, but package authors are reluctant to
// use optional peer dependencies because they would print warnings in older
// npm releases.
for (const identString of pkg.peerDependenciesMeta.keys()) {
const ident = structUtils.parseIdent(identString);
if (!pkg.peerDependencies.has(ident.identHash)) {
pkg.peerDependencies.set(ident.identHash, structUtils.makeDescriptor(ident, `*`));
}
}
// We sort the dependencies so that further iterations always occur in the
// same order, regardless how the various registries formatted their output
pkg.dependencies = new Map(miscUtils.sortMap(pkg.dependencies, ([, descriptor]) => structUtils.stringifyDescriptor(descriptor)));
pkg.peerDependencies = new Map(miscUtils.sortMap(pkg.peerDependencies, ([, descriptor]) => structUtils.stringifyDescriptor(descriptor)));
return pkg;
}
getLimit(key) {
return miscUtils.getFactoryWithDefault(this.limits, key, () => {
return p_limit_1.default(this.get(key));
});
}
async triggerHook(get, ...args) {
for (const plugin of this.plugins.values()) {
const hooks = plugin.hooks;
if (!hooks)
continue;
const hook = get(hooks);
if (!hook)
continue;
await hook(...args);
}
}
async triggerMultipleHooks(get, argsList) {
for (const args of argsList) {
await this.triggerHook(get, ...args);
}
}
async reduceHook(get, initialValue, ...args) {
let value = initialValue;
for (const plugin of this.plugins.values()) {
const hooks = plugin.hooks;
if (!hooks)
continue;
const hook = get(hooks);
if (!hook)
continue;
value = await hook(value, ...args);
}
return value;
}
async firstHook(get, ...args) {
for (const plugin of this.plugins.values()) {
const hooks = plugin.hooks;
if (!hooks)
continue;
const hook = get(hooks);
if (!hook)
continue;
const ret = await hook(...args);
if (typeof ret !== `undefined`) {
// @ts-expect-error
return ret;
}
}
return null;
}
/**
* @deprecated Prefer using formatUtils.pretty instead, which is type-safe
*/
format(value, formatType) {
return formatUtils.pretty(this, value, formatType);
}
}
exports.Configuration = Configuration;
Configuration.telemetry = null;
/***/ }),
/***/ 78101:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.CorePlugin = void 0;
const tslib_1 = __webpack_require__(70655);
const MessageName_1 = __webpack_require__(62755);
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
exports.CorePlugin = {
hooks: {
reduceDependency: (dependency, project, locator, initialDependency, { resolver, resolveOptions }) => {
for (const { pattern, reference } of project.topLevelWorkspace.manifest.resolutions) {
if (pattern.from && pattern.from.fullName !== structUtils.requirableIdent(locator))
continue;
if (pattern.from && pattern.from.description && pattern.from.description !== locator.reference)
continue;
if (pattern.descriptor.fullName !== structUtils.requirableIdent(dependency))
continue;
if (pattern.descriptor.description && pattern.descriptor.description !== dependency.range)
continue;
const alias = resolver.bindDescriptor(structUtils.makeDescriptor(dependency, reference), project.topLevelWorkspace.anchoredLocator, resolveOptions);
return alias;
}
return dependency;
},
validateProject: async (project, report) => {
for (const workspace of project.workspaces) {
const workspaceName = structUtils.prettyWorkspace(project.configuration, workspace);
await project.configuration.triggerHook(hooks => {
return hooks.validateWorkspace;
}, workspace, {
reportWarning: (name, text) => report.reportWarning(name, `${workspaceName}: ${text}`),
reportError: (name, text) => report.reportError(name, `${workspaceName}: ${text}`),
});
}
},
validateWorkspace: async (workspace, report) => {
// Validate manifest
const { manifest } = workspace;
if (manifest.resolutions.length && workspace.cwd !== workspace.project.cwd)
manifest.errors.push(new Error(`Resolutions field will be ignored`));
for (const manifestError of manifest.errors) {
report.reportWarning(MessageName_1.MessageName.INVALID_MANIFEST, manifestError.message);
}
},
},
};
/***/ }),
/***/ 45006:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.BuildType = void 0;
var BuildType;
(function (BuildType) {
BuildType[BuildType["SCRIPT"] = 0] = "SCRIPT";
BuildType[BuildType["SHELLCODE"] = 1] = "SHELLCODE";
})(BuildType = exports.BuildType || (exports.BuildType = {}));
/***/ }),
/***/ 48900:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.LegacyMigrationResolver = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const parsers_1 = __webpack_require__(21717);
const semver_1 = tslib_1.__importDefault(__webpack_require__(74451));
const MessageName_1 = __webpack_require__(62755);
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
const IMPORTED_PATTERNS = [
// These ones come from Git urls
[/^(git(?:\+(?:https|ssh))?:\/\/.*(?:\.git)?)#(.*)$/, (version, $0, $1, $2) => `${$1}#commit=${$2}`],
// These ones come from the GitHub HTTP endpoints
[/^https:\/\/((?:[^/]+?)@)?codeload\.github\.com\/([^/]+\/[^/]+)\/tar\.gz\/([0-9a-f]+)$/, (version, $0, $1 = ``, $2, $3) => `https://${$1}github.com/${$2}.git#commit=${$3}`],
[/^https:\/\/((?:[^/]+?)@)?github\.com\/([^/]+\/[^/]+?)(?:\.git)?#([0-9a-f]+)$/, (version, $0, $1 = ``, $2, $3) => `https://${$1}github.com/${$2}.git#commit=${$3}`],
// These ones come from the npm registry
// Note: /download/ is used by custom registries like Taobao
[/^https?:\/\/[^/]+\/(?:[^/]+\/)*(?:@[^/]+\/)?([^/]+)\/(?:-|download)\/\1-[^/]+\.tgz(?:#|$)/, version => `npm:${version}`],
// The GitHub package registry uses a different style of URLs
[/^https:\/\/npm\.pkg\.github\.com\/download\/(?:@[^/]+)\/(?:[^/]+)\/(?:[^/]+)\/(?:[0-9a-f]+)$/, version => `npm:${version}`],
// FontAwesome too; what is it with these registries that made them think using a different url pattern was a good idea?
[/^https:\/\/npm\.fontawesome\.com\/(?:@[^/]+)\/([^/]+)\/-\/([^/]+)\/\1-\2.tgz(?:#|$)/, version => `npm:${version}`],
// These ones come from the old Yarn offline mirror - we assume they came from npm
[/^[^/]+\.tgz#[0-9a-f]+$/, version => `npm:${version}`],
];
class LegacyMigrationResolver {
constructor() {
this.resolutions = null;
}
async setup(project, { report }) {
const lockfilePath = fslib_1.ppath.join(project.cwd, project.configuration.get(`lockfileFilename`));
// No need to enable it if the lockfile doesn't exist
if (!fslib_1.xfs.existsSync(lockfilePath))
return;
const content = await fslib_1.xfs.readFilePromise(lockfilePath, `utf8`);
const parsed = parsers_1.parseSyml(content);
// No need to enable it either if the lockfile is modern
if (Object.prototype.hasOwnProperty.call(parsed, `__metadata`))
return;
const resolutions = this.resolutions = new Map();
for (const key of Object.keys(parsed)) {
let descriptor = structUtils.tryParseDescriptor(key);
if (!descriptor) {
report.reportWarning(MessageName_1.MessageName.YARN_IMPORT_FAILED, `Failed to parse the string "${key}" into a proper descriptor`);
continue;
}
if (semver_1.default.validRange(descriptor.range))
descriptor = structUtils.makeDescriptor(descriptor, `npm:${descriptor.range}`);
const { version, resolved } = parsed[key];
// Workspaces don't have the "resolved" key; we can skip them, as their
// resolution will be recomputed when needed anyway
if (!resolved)
continue;
let reference;
for (const [pattern, matcher] of IMPORTED_PATTERNS) {
const match = resolved.match(pattern);
if (match) {
reference = matcher(version, ...match);
break;
}
}
if (!reference) {
report.reportWarning(MessageName_1.MessageName.YARN_IMPORT_FAILED, `${structUtils.prettyDescriptor(project.configuration, descriptor)}: Only some patterns can be imported from legacy lockfiles (not "${resolved}")`);
continue;
}
const resolution = structUtils.makeLocator(descriptor, reference);
resolutions.set(descriptor.descriptorHash, resolution);
}
}
supportsDescriptor(descriptor, opts) {
if (!this.resolutions)
return false;
return this.resolutions.has(descriptor.descriptorHash);
}
supportsLocator(locator, opts) {
// This resolver only supports the descriptor -> locator part of the
// resolution, not the locator -> package one.
return false;
}
shouldPersistResolution(locator, opts) {
throw new Error(`Assertion failed: This resolver doesn't support resolving locators to packages`);
}
bindDescriptor(descriptor, fromLocator, opts) {
return descriptor;
}
getResolutionDependencies(descriptor, opts) {
return [];
}
async getCandidates(descriptor, dependencies, opts) {
if (!this.resolutions)
throw new Error(`Assertion failed: The resolution store should have been setup`);
const resolution = this.resolutions.get(descriptor.descriptorHash);
if (!resolution)
throw new Error(`Assertion failed: The resolution should have been registered`);
return [resolution];
}
async getSatisfying(descriptor, references, opts) {
return null;
}
async resolve(locator, opts) {
throw new Error(`Assertion failed: This resolver doesn't support resolving locators to packages`);
}
}
exports.LegacyMigrationResolver = LegacyMigrationResolver;
/***/ }),
/***/ 60613:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.LightReport = void 0;
const tslib_1 = __webpack_require__(70655);
const Report_1 = __webpack_require__(50334);
const StreamReport_1 = __webpack_require__(73759);
const formatUtils = tslib_1.__importStar(__webpack_require__(23821));
class LightReport extends Report_1.Report {
constructor({ configuration, stdout, suggestInstall = true }) {
super();
this.errorCount = 0;
formatUtils.addLogFilterSupport(this, { configuration });
this.configuration = configuration;
this.stdout = stdout;
this.suggestInstall = suggestInstall;
}
static async start(opts, cb) {
const report = new this(opts);
try {
await cb(report);
}
catch (error) {
report.reportExceptionOnce(error);
}
finally {
await report.finalize();
}
return report;
}
hasErrors() {
return this.errorCount > 0;
}
exitCode() {
return this.hasErrors() ? 1 : 0;
}
reportCacheHit(locator) {
}
reportCacheMiss(locator) {
}
startTimerSync(what, opts, cb) {
const realCb = typeof opts === `function` ? opts : cb;
return realCb();
}
async startTimerPromise(what, opts, cb) {
const realCb = typeof opts === `function` ? opts : cb;
return await realCb();
}
async startCacheReport(cb) {
return await cb();
}
reportSeparator() {
}
reportInfo(name, text) {
}
reportWarning(name, text) {
}
reportError(name, text) {
this.errorCount += 1;
this.stdout.write(`${formatUtils.pretty(this.configuration, `➤`, `redBright`)} ${this.formatNameWithHyperlink(name)}: ${text}\n`);
}
reportProgress(progress) {
const promise = Promise.resolve().then(async () => {
// eslint-disable-next-line no-empty-pattern
for await (const {} of progress) {
// No need to do anything; we just want to consume the progress events
}
});
const stop = () => {
// Nothing to stop
};
return { ...promise, stop };
}
reportJson(data) {
// Just ignore the json output
}
async finalize() {
if (this.errorCount > 0) {
this.stdout.write(`${formatUtils.pretty(this.configuration, `➤`, `redBright`)} Errors happened when preparing the environment required to run this command.\n`);
if (this.suggestInstall) {
this.stdout.write(`${formatUtils.pretty(this.configuration, `➤`, `redBright`)} This might be caused by packages being missing from the lockfile, in which case running "yarn install" might help.\n`);
}
}
}
formatNameWithHyperlink(name) {
return StreamReport_1.formatNameWithHyperlink(name, {
configuration: this.configuration,
json: false,
});
}
}
exports.LightReport = LightReport;
/***/ }),
/***/ 59909:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.LockfileResolver = void 0;
const tslib_1 = __webpack_require__(70655);
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
class LockfileResolver {
supportsDescriptor(descriptor, opts) {
const resolution = opts.project.storedResolutions.get(descriptor.descriptorHash);
if (resolution)
return true;
// If the descriptor matches a package that's already been used, we can just use it even if we never resolved the range before
// Ex: foo depends on bar@^1.0.0 that we resolved to foo@1.1.0, then we add a package qux that depends on foo@1.1.0 (without the caret)
if (opts.project.originalPackages.has(structUtils.convertDescriptorToLocator(descriptor).locatorHash))
return true;
return false;
}
supportsLocator(locator, opts) {
if (opts.project.originalPackages.has(locator.locatorHash))
return true;
return false;
}
shouldPersistResolution(locator, opts) {
throw new Error(`The shouldPersistResolution method shouldn't be called on the lockfile resolver, which would always answer yes`);
}
bindDescriptor(descriptor, fromLocator, opts) {
return descriptor;
}
getResolutionDependencies(descriptor, opts) {
return [];
}
async getCandidates(descriptor, dependencies, opts) {
let pkg = opts.project.originalPackages.get(structUtils.convertDescriptorToLocator(descriptor).locatorHash);
if (pkg)
return [pkg];
const resolution = opts.project.storedResolutions.get(descriptor.descriptorHash);
if (!resolution)
throw new Error(`Expected the resolution to have been successful - resolution not found`);
pkg = opts.project.originalPackages.get(resolution);
if (!pkg)
throw new Error(`Expected the resolution to have been successful - package not found`);
return [pkg];
}
async getSatisfying(descriptor, references, opts) {
return null;
}
async resolve(locator, opts) {
const pkg = opts.project.originalPackages.get(locator.locatorHash);
if (!pkg)
throw new Error(`The lockfile resolver isn't meant to resolve packages - they should already have been stored into a cache`);
return pkg;
}
}
exports.LockfileResolver = LockfileResolver;
/***/ }),
/***/ 11658:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Manifest = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const parsers_1 = __webpack_require__(21717);
const semver_1 = tslib_1.__importDefault(__webpack_require__(74451));
const miscUtils = tslib_1.__importStar(__webpack_require__(63111));
const semverUtils = tslib_1.__importStar(__webpack_require__(51201));
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
class Manifest {
constructor() {
this.indent = ` `;
this.name = null;
this.version = null;
this.os = null;
this.cpu = null;
this.type = null;
this["private"] = false;
this.license = null;
this.main = null;
this.module = null;
this.browser = null;
this.languageName = null;
this.bin = new Map();
this.scripts = new Map();
this.dependencies = new Map();
this.devDependencies = new Map();
this.peerDependencies = new Map();
this.workspaceDefinitions = [];
this.dependenciesMeta = new Map();
this.peerDependenciesMeta = new Map();
this.resolutions = [];
this.files = null;
this.publishConfig = null;
this.installConfig = null;
this.preferUnplugged = null;
this.raw = {};
/**
* errors found in the raw manifest while loading
*/
this.errors = [];
}
static async tryFind(path, { baseFs = new fslib_1.NodeFS() } = {}) {
const manifestPath = fslib_1.ppath.join(path, `package.json`);
if (!await baseFs.existsPromise(manifestPath))
return null;
return await Manifest.fromFile(manifestPath, { baseFs });
}
static async find(path, { baseFs } = {}) {
const manifest = await Manifest.tryFind(path, { baseFs });
if (manifest === null)
throw new Error(`Manifest not found`);
return manifest;
}
static async fromFile(path, { baseFs = new fslib_1.NodeFS() } = {}) {
const manifest = new Manifest();
await manifest.loadFile(path, { baseFs });
return manifest;
}
static fromText(text) {
const manifest = new Manifest();
manifest.loadFromText(text);
return manifest;
}
static isManifestFieldCompatible(rules, actual) {
if (rules === null)
return true;
let isNotOnAllowlist = true;
let isOnDenylist = false;
for (const rule of rules) {
if (rule[0] === `!`) {
isOnDenylist = true;
if (actual === rule.slice(1)) {
return false;
}
}
else {
isNotOnAllowlist = false;
if (rule === actual) {
return true;
}
}
}
// Denylists with allowlisted items should be treated as allowlists for `os` and `cpu` in `package.json`
return isOnDenylist && isNotOnAllowlist;
}
loadFromText(text) {
let data;
try {
data = JSON.parse(stripBOM(text) || `{}`);
}
catch (error) {
error.message += ` (when parsing ${text})`;
throw error;
}
this.load(data);
this.indent = getIndent(text);
}
async loadFile(path, { baseFs = new fslib_1.NodeFS() }) {
const content = await baseFs.readFilePromise(path, `utf8`);
let data;
try {
data = JSON.parse(stripBOM(content) || `{}`);
}
catch (error) {
error.message += ` (when parsing ${path})`;
throw error;
}
this.load(data);
this.indent = getIndent(content);
}
load(data, { yamlCompatibilityMode = false } = {}) {
if (typeof data !== `object` || data === null)
throw new Error(`Utterly invalid manifest data (${data})`);
this.raw = data;
const errors = [];
if (typeof data.name === `string`) {
try {
this.name = structUtils.parseIdent(data.name);
}
catch (error) {
errors.push(new Error(`Parsing failed for the 'name' field`));
}
}
if (typeof data.version === `string`)
this.version = data.version;
if (Array.isArray(data.os)) {
const os = [];
this.os = os;
for (const item of data.os) {
if (typeof item !== `string`) {
errors.push(new Error(`Parsing failed for the 'os' field`));
}
else {
os.push(item);
}
}
}
if (Array.isArray(data.cpu)) {
const cpu = [];
this.cpu = cpu;
for (const item of data.cpu) {
if (typeof item !== `string`) {
errors.push(new Error(`Parsing failed for the 'cpu' field`));
}
else {
cpu.push(item);
}
}
}
if (typeof data.type === `string`)
this.type = data.type;
if (typeof data.private === `boolean`)
this.private = data.private;
if (typeof data.license === `string`)
this.license = data.license;
if (typeof data.languageName === `string`)
this.languageName = data.languageName;
if (typeof data.main === `string`)
this.main = normalizeSlashes(data.main);
if (typeof data.module === `string`)
this.module = normalizeSlashes(data.module);
if (data.browser != null) {
if (typeof data.browser === `string`) {
this.browser = normalizeSlashes(data.browser);
}
else {
this.browser = new Map();
for (const [key, value] of Object.entries(data.browser)) {
this.browser.set(normalizeSlashes(key), typeof value === `string` ? normalizeSlashes(value) : value);
}
}
}
if (typeof data.bin === `string`) {
if (this.name !== null) {
this.bin = new Map([[this.name.name, normalizeSlashes(data.bin)]]);
}
else {
errors.push(new Error(`String bin field, but no attached package name`));
}
}
else if (typeof data.bin === `object` && data.bin !== null) {
for (const [key, value] of Object.entries(data.bin)) {
if (typeof value !== `string`) {
errors.push(new Error(`Invalid bin definition for '${key}'`));
continue;
}
this.bin.set(key, normalizeSlashes(value));
}
}
if (typeof data.scripts === `object` && data.scripts !== null) {
for (const [key, value] of Object.entries(data.scripts)) {
if (typeof value !== `string`) {
errors.push(new Error(`Invalid script definition for '${key}'`));
continue;
}
this.scripts.set(key, value);
}
}
if (typeof data.dependencies === `object` && data.dependencies !== null) {
for (const [name, range] of Object.entries(data.dependencies)) {
if (typeof range !== `string`) {
errors.push(new Error(`Invalid dependency range for '${name}'`));
continue;
}
let ident;
try {
ident = structUtils.parseIdent(name);
}
catch (error) {
errors.push(new Error(`Parsing failed for the dependency name '${name}'`));
continue;
}
const descriptor = structUtils.makeDescriptor(ident, range);
this.dependencies.set(descriptor.identHash, descriptor);
}
}
if (typeof data.devDependencies === `object` && data.devDependencies !== null) {
for (const [name, range] of Object.entries(data.devDependencies)) {
if (typeof range !== `string`) {
errors.push(new Error(`Invalid dependency range for '${name}'`));
continue;
}
let ident;
try {
ident = structUtils.parseIdent(name);
}
catch (error) {
errors.push(new Error(`Parsing failed for the dependency name '${name}'`));
continue;
}
const descriptor = structUtils.makeDescriptor(ident, range);
this.devDependencies.set(descriptor.identHash, descriptor);
}
}
if (typeof data.peerDependencies === `object` && data.peerDependencies !== null) {
for (let [name, range] of Object.entries(data.peerDependencies)) {
let ident;
try {
ident = structUtils.parseIdent(name);
}
catch (error) {
errors.push(new Error(`Parsing failed for the dependency name '${name}'`));
continue;
}
if (typeof range !== `string` || !semverUtils.validRange(range)) {
errors.push(new Error(`Invalid dependency range for '${name}'`));
range = `*`;
}
const descriptor = structUtils.makeDescriptor(ident, range);
this.peerDependencies.set(descriptor.identHash, descriptor);
}
}
if (typeof data.workspaces === `object` && data.workspaces.nohoist)
errors.push(new Error(`'nohoist' is deprecated, please use 'installConfig.hoistingLimits' instead`));
const workspaces = Array.isArray(data.workspaces)
? data.workspaces
: typeof data.workspaces === `object` && data.workspaces !== null && Array.isArray(data.workspaces.packages)
? data.workspaces.packages
: [];
for (const entry of workspaces) {
if (typeof entry !== `string`) {
errors.push(new Error(`Invalid workspace definition for '${entry}'`));
continue;
}
this.workspaceDefinitions.push({
pattern: entry,
});
}
if (typeof data.dependenciesMeta === `object` && data.dependenciesMeta !== null) {
for (const [pattern, meta] of Object.entries(data.dependenciesMeta)) {
if (typeof meta !== `object` || meta === null) {
errors.push(new Error(`Invalid meta field for '${pattern}`));
continue;
}
const descriptor = structUtils.parseDescriptor(pattern);
const dependencyMeta = this.ensureDependencyMeta(descriptor);
const built = tryParseOptionalBoolean(meta.built, { yamlCompatibilityMode });
if (built === null) {
errors.push(new Error(`Invalid built meta field for '${pattern}'`));
continue;
}
const optional = tryParseOptionalBoolean(meta.optional, { yamlCompatibilityMode });
if (optional === null) {
errors.push(new Error(`Invalid optional meta field for '${pattern}'`));
continue;
}
const unplugged = tryParseOptionalBoolean(meta.unplugged, { yamlCompatibilityMode });
if (unplugged === null) {
errors.push(new Error(`Invalid unplugged meta field for '${pattern}'`));
continue;
}
Object.assign(dependencyMeta, { built, optional, unplugged });
}
}
if (typeof data.peerDependenciesMeta === `object` && data.peerDependenciesMeta !== null) {
for (const [pattern, meta] of Object.entries(data.peerDependenciesMeta)) {
if (typeof meta !== `object` || meta === null) {
errors.push(new Error(`Invalid meta field for '${pattern}'`));
continue;
}
const descriptor = structUtils.parseDescriptor(pattern);
const peerDependencyMeta = this.ensurePeerDependencyMeta(descriptor);
const optional = tryParseOptionalBoolean(meta.optional, { yamlCompatibilityMode });
if (optional === null) {
errors.push(new Error(`Invalid optional meta field for '${pattern}'`));
continue;
}
Object.assign(peerDependencyMeta, { optional });
}
}
if (typeof data.resolutions === `object` && data.resolutions !== null) {
for (const [pattern, reference] of Object.entries(data.resolutions)) {
if (typeof reference !== `string`) {
errors.push(new Error(`Invalid resolution entry for '${pattern}'`));
continue;
}
try {
this.resolutions.push({ pattern: parsers_1.parseResolution(pattern), reference });
}
catch (error) {
errors.push(error);
continue;
}
}
}
if (Array.isArray(data.files)) {
this.files = new Set();
for (const filename of data.files) {
if (typeof filename !== `string`) {
errors.push(new Error(`Invalid files entry for '${filename}'`));
continue;
}
this.files.add(filename);
}
}
if (typeof data.publishConfig === `object` && data.publishConfig !== null) {
this.publishConfig = {};
if (typeof data.publishConfig.access === `string`)
this.publishConfig.access = data.publishConfig.access;
if (typeof data.publishConfig.main === `string`)
this.publishConfig.main = normalizeSlashes(data.publishConfig.main);
if (typeof data.publishConfig.module === `string`)
this.publishConfig.module = normalizeSlashes(data.publishConfig.module);
if (data.publishConfig.browser != null) {
if (typeof data.publishConfig.browser === `string`) {
this.publishConfig.browser = normalizeSlashes(data.publishConfig.browser);
}
else {
this.publishConfig.browser = new Map();
for (const [key, value] of Object.entries(data.publishConfig.browser)) {
this.publishConfig.browser.set(normalizeSlashes(key), typeof value === `string` ? normalizeSlashes(value) : value);
}
}
}
if (typeof data.publishConfig.registry === `string`)
this.publishConfig.registry = data.publishConfig.registry;
if (typeof data.publishConfig.bin === `string`) {
if (this.name !== null) {
this.publishConfig.bin = new Map([[this.name.name, normalizeSlashes(data.publishConfig.bin)]]);
}
else {
errors.push(new Error(`String bin field, but no attached package name`));
}
}
else if (typeof data.publishConfig.bin === `object` && data.publishConfig.bin !== null) {
this.publishConfig.bin = new Map();
for (const [key, value] of Object.entries(data.publishConfig.bin)) {
if (typeof value !== `string`) {
errors.push(new Error(`Invalid bin definition for '${key}'`));
continue;
}
this.publishConfig.bin.set(key, normalizeSlashes(value));
}
}
if (Array.isArray(data.publishConfig.executableFiles)) {
this.publishConfig.executableFiles = new Set();
for (const value of data.publishConfig.executableFiles) {
if (typeof value !== `string`) {
errors.push(new Error(`Invalid executable file definition`));
continue;
}
this.publishConfig.executableFiles.add(normalizeSlashes(value));
}
}
}
if (typeof data.installConfig === `object` && data.installConfig !== null) {
this.installConfig = {};
for (const key of Object.keys(data.installConfig)) {
if (key === `hoistingLimits`) {
if (typeof data.installConfig.hoistingLimits === `string`) {
this.installConfig.hoistingLimits = data.installConfig.hoistingLimits;
}
else {
errors.push(new Error(`Invalid hoisting limits definition`));
}
}
else {
errors.push(new Error(`Unrecognized installConfig key: ${key}`));
}
}
}
// We treat optional dependencies after both the regular dependency field
// and the dependenciesMeta field have been generated (because we will
// override them)
if (typeof data.optionalDependencies === `object` && data.optionalDependencies !== null) {
for (const [name, range] of Object.entries(data.optionalDependencies)) {
if (typeof range !== `string`) {
errors.push(new Error(`Invalid dependency range for '${name}'`));
continue;
}
let ident;
try {
ident = structUtils.parseIdent(name);
}
catch (error) {
errors.push(new Error(`Parsing failed for the dependency name '${name}'`));
continue;
}
// Note that we store the optional dependencies in the same store as
// the one that keep the regular dependencies, because they're
// effectively the same (the only difference is that optional
// dependencies have an extra field set in dependenciesMeta).
const realDescriptor = structUtils.makeDescriptor(ident, range);
this.dependencies.set(realDescriptor.identHash, realDescriptor);
const identDescriptor = structUtils.makeDescriptor(ident, `unknown`);
const dependencyMeta = this.ensureDependencyMeta(identDescriptor);
Object.assign(dependencyMeta, { optional: true });
}
}
if (typeof data.preferUnplugged === `boolean`)
this.preferUnplugged = data.preferUnplugged;
this.errors = errors;
}
getForScope(type) {
switch (type) {
case `dependencies`:
return this.dependencies;
case `devDependencies`:
return this.devDependencies;
case `peerDependencies`:
return this.peerDependencies;
default: {
throw new Error(`Unsupported value ("${type}")`);
}
}
}
hasConsumerDependency(ident) {
if (this.dependencies.has(ident.identHash))
return true;
if (this.peerDependencies.has(ident.identHash))
return true;
return false;
}
hasHardDependency(ident) {
if (this.dependencies.has(ident.identHash))
return true;
if (this.devDependencies.has(ident.identHash))
return true;
return false;
}
hasSoftDependency(ident) {
if (this.peerDependencies.has(ident.identHash))
return true;
return false;
}
hasDependency(ident) {
if (this.hasHardDependency(ident))
return true;
if (this.hasSoftDependency(ident))
return true;
return false;
}
isCompatibleWithOS(os) {
return Manifest.isManifestFieldCompatible(this.os, os);
}
isCompatibleWithCPU(cpu) {
return Manifest.isManifestFieldCompatible(this.cpu, cpu);
}
ensureDependencyMeta(descriptor) {
if (descriptor.range !== `unknown` && !semver_1.default.valid(descriptor.range))
throw new Error(`Invalid meta field range for '${structUtils.stringifyDescriptor(descriptor)}'`);
const identString = structUtils.stringifyIdent(descriptor);
const range = descriptor.range !== `unknown` ? descriptor.range : null;
let dependencyMetaSet = this.dependenciesMeta.get(identString);
if (!dependencyMetaSet)
this.dependenciesMeta.set(identString, dependencyMetaSet = new Map());
let dependencyMeta = dependencyMetaSet.get(range);
if (!dependencyMeta)
dependencyMetaSet.set(range, dependencyMeta = {});
return dependencyMeta;
}
ensurePeerDependencyMeta(descriptor) {
if (descriptor.range !== `unknown`)
throw new Error(`Invalid meta field range for '${structUtils.stringifyDescriptor(descriptor)}'`);
const identString = structUtils.stringifyIdent(descriptor);
let peerDependencyMeta = this.peerDependenciesMeta.get(identString);
if (!peerDependencyMeta)
this.peerDependenciesMeta.set(identString, peerDependencyMeta = {});
return peerDependencyMeta;
}
setRawField(name, value, { after = [] } = {}) {
const afterSet = new Set(after.filter(key => {
return Object.prototype.hasOwnProperty.call(this.raw, key);
}));
if (afterSet.size === 0 || Object.prototype.hasOwnProperty.call(this.raw, name)) {
this.raw[name] = value;
}
else {
const oldRaw = this.raw;
const newRaw = this.raw = {};
let inserted = false;
for (const key of Object.keys(oldRaw)) {
newRaw[key] = oldRaw[key];
if (!inserted) {
afterSet.delete(key);
if (afterSet.size === 0) {
newRaw[name] = value;
inserted = true;
}
}
}
}
}
exportTo(data, { compatibilityMode = true } = {}) {
// Note that we even set the fields that we re-set later; it
// allows us to preserve the key ordering
Object.assign(data, this.raw);
if (this.name !== null)
data.name = structUtils.stringifyIdent(this.name);
else
delete data.name;
if (this.version !== null)
data.version = this.version;
else
delete data.version;
if (this.os !== null)
data.os = this.os;
else
delete data.os;
if (this.cpu !== null)
data.cpu = this.cpu;
else
delete data.cpu;
if (this.type !== null)
data.type = this.type;
else
delete data.type;
if (this.private)
data.private = true;
else
delete data.private;
if (this.license !== null)
data.license = this.license;
else
delete data.license;
if (this.languageName !== null)
data.languageName = this.languageName;
else
delete data.languageName;
if (this.main !== null)
data.main = this.main;
else
delete data.main;
if (this.module !== null)
data.module = this.module;
else
delete data.module;
if (this.browser !== null) {
const browser = this.browser;
if (typeof browser === `string`) {
data.browser = browser;
}
else if (browser instanceof Map) {
data.browser = Object.assign({}, ...Array.from(browser.keys()).sort().map(name => {
return { [name]: browser.get(name) };
}));
}
}
else {
delete data.browser;
}
if (this.bin.size === 1 && this.name !== null && this.bin.has(this.name.name)) {
data.bin = this.bin.get(this.name.name);
}
else if (this.bin.size > 0) {
data.bin = Object.assign({}, ...Array.from(this.bin.keys()).sort().map(name => {
return { [name]: this.bin.get(name) };
}));
}
else {
delete data.bin;
}
if (this.workspaceDefinitions.length > 0) {
if (this.raw.workspaces && !Array.isArray(this.raw.workspaces)) {
data.workspaces = { ...this.raw.workspaces, packages: this.workspaceDefinitions.map(({ pattern }) => pattern) };
}
else {
data.workspaces = this.workspaceDefinitions.map(({ pattern }) => pattern);
}
}
else if (this.raw.workspaces && !Array.isArray(this.raw.workspaces) && Object.keys(this.raw.workspaces).length > 0) {
data.workspaces = this.raw.workspaces;
}
else {
delete data.workspaces;
}
const regularDependencies = [];
const optionalDependencies = [];
for (const dependency of this.dependencies.values()) {
const dependencyMetaSet = this.dependenciesMeta.get(structUtils.stringifyIdent(dependency));
let isOptionallyBuilt = false;
if (compatibilityMode) {
if (dependencyMetaSet) {
const meta = dependencyMetaSet.get(null);
if (meta && meta.optional) {
isOptionallyBuilt = true;
}
}
}
if (isOptionallyBuilt) {
optionalDependencies.push(dependency);
}
else {
regularDependencies.push(dependency);
}
}
if (regularDependencies.length > 0) {
data.dependencies = Object.assign({}, ...structUtils.sortDescriptors(regularDependencies).map(dependency => {
return { [structUtils.stringifyIdent(dependency)]: dependency.range };
}));
}
else {
delete data.dependencies;
}
if (optionalDependencies.length > 0) {
data.optionalDependencies = Object.assign({}, ...structUtils.sortDescriptors(optionalDependencies).map(dependency => {
return { [structUtils.stringifyIdent(dependency)]: dependency.range };
}));
}
else {
delete data.optionalDependencies;
}
if (this.devDependencies.size > 0) {
data.devDependencies = Object.assign({}, ...structUtils.sortDescriptors(this.devDependencies.values()).map(dependency => {
return { [structUtils.stringifyIdent(dependency)]: dependency.range };
}));
}
else {
delete data.devDependencies;
}
if (this.peerDependencies.size > 0) {
data.peerDependencies = Object.assign({}, ...structUtils.sortDescriptors(this.peerDependencies.values()).map(dependency => {
return { [structUtils.stringifyIdent(dependency)]: dependency.range };
}));
}
else {
delete data.peerDependencies;
}
data.dependenciesMeta = {};
for (const [identString, dependencyMetaSet] of miscUtils.sortMap(this.dependenciesMeta.entries(), ([identString, dependencyMetaSet]) => identString)) {
for (const [range, meta] of miscUtils.sortMap(dependencyMetaSet.entries(), ([range, meta]) => range !== null ? `0${range}` : `1`)) {
const key = range !== null
? structUtils.stringifyDescriptor(structUtils.makeDescriptor(structUtils.parseIdent(identString), range))
: identString;
const metaCopy = { ...meta };
if (compatibilityMode && range === null)
delete metaCopy.optional;
if (Object.keys(metaCopy).length === 0)
continue;
data.dependenciesMeta[key] = metaCopy;
}
}
if (Object.keys(data.dependenciesMeta).length === 0)
delete data.dependenciesMeta;
if (this.peerDependenciesMeta.size > 0) {
data.peerDependenciesMeta = Object.assign({}, ...miscUtils.sortMap(this.peerDependenciesMeta.entries(), ([identString, meta]) => identString).map(([identString, meta]) => {
return { [identString]: meta };
}));
}
else {
delete data.peerDependenciesMeta;
}
if (this.resolutions.length > 0) {
data.resolutions = Object.assign({}, ...this.resolutions.map(({ pattern, reference }) => {
return { [parsers_1.stringifyResolution(pattern)]: reference };
}));
}
else {
delete data.resolutions;
}
if (this.files !== null)
data.files = Array.from(this.files);
else
delete data.files;
if (this.preferUnplugged !== null)
data.preferUnplugged = this.preferUnplugged;
else
delete data.preferUnplugged;
return data;
}
}
exports.Manifest = Manifest;
Manifest.fileName = `package.json`;
Manifest.allDependencies = [`dependencies`, `devDependencies`, `peerDependencies`];
Manifest.hardDependencies = [`dependencies`, `devDependencies`];
function getIndent(content) {
const indentMatch = content.match(/^[ \t]+/m);
if (indentMatch) {
return indentMatch[0];
}
else {
return ` `;
}
}
function stripBOM(content) {
if (content.charCodeAt(0) === 0xFEFF) {
return content.slice(1);
}
else {
return content;
}
}
function normalizeSlashes(str) {
return str.replace(/\\/g, `/`);
}
function tryParseOptionalBoolean(value, { yamlCompatibilityMode }) {
if (yamlCompatibilityMode)
return miscUtils.tryParseOptionalBoolean(value);
if (typeof value === `undefined` || typeof value === `boolean`)
return value;
return null;
}
/***/ }),
/***/ 62755:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.stringifyMessageName = exports.MessageName = void 0;
// The values in this enum should never be reassigned, even if some are removed
// over time (it would mess up the search results, which are the whole point of
// having this system)
var MessageName;
(function (MessageName) {
MessageName[MessageName["UNNAMED"] = 0] = "UNNAMED";
MessageName[MessageName["EXCEPTION"] = 1] = "EXCEPTION";
MessageName[MessageName["MISSING_PEER_DEPENDENCY"] = 2] = "MISSING_PEER_DEPENDENCY";
MessageName[MessageName["CYCLIC_DEPENDENCIES"] = 3] = "CYCLIC_DEPENDENCIES";
MessageName[MessageName["DISABLED_BUILD_SCRIPTS"] = 4] = "DISABLED_BUILD_SCRIPTS";
MessageName[MessageName["BUILD_DISABLED"] = 5] = "BUILD_DISABLED";
MessageName[MessageName["SOFT_LINK_BUILD"] = 6] = "SOFT_LINK_BUILD";
MessageName[MessageName["MUST_BUILD"] = 7] = "MUST_BUILD";
MessageName[MessageName["MUST_REBUILD"] = 8] = "MUST_REBUILD";
MessageName[MessageName["BUILD_FAILED"] = 9] = "BUILD_FAILED";
MessageName[MessageName["RESOLVER_NOT_FOUND"] = 10] = "RESOLVER_NOT_FOUND";
MessageName[MessageName["FETCHER_NOT_FOUND"] = 11] = "FETCHER_NOT_FOUND";
MessageName[MessageName["LINKER_NOT_FOUND"] = 12] = "LINKER_NOT_FOUND";
MessageName[MessageName["FETCH_NOT_CACHED"] = 13] = "FETCH_NOT_CACHED";
MessageName[MessageName["YARN_IMPORT_FAILED"] = 14] = "YARN_IMPORT_FAILED";
MessageName[MessageName["REMOTE_INVALID"] = 15] = "REMOTE_INVALID";
MessageName[MessageName["REMOTE_NOT_FOUND"] = 16] = "REMOTE_NOT_FOUND";
MessageName[MessageName["RESOLUTION_PACK"] = 17] = "RESOLUTION_PACK";
MessageName[MessageName["CACHE_CHECKSUM_MISMATCH"] = 18] = "CACHE_CHECKSUM_MISMATCH";
MessageName[MessageName["UNUSED_CACHE_ENTRY"] = 19] = "UNUSED_CACHE_ENTRY";
MessageName[MessageName["MISSING_LOCKFILE_ENTRY"] = 20] = "MISSING_LOCKFILE_ENTRY";
MessageName[MessageName["WORKSPACE_NOT_FOUND"] = 21] = "WORKSPACE_NOT_FOUND";
MessageName[MessageName["TOO_MANY_MATCHING_WORKSPACES"] = 22] = "TOO_MANY_MATCHING_WORKSPACES";
MessageName[MessageName["CONSTRAINTS_MISSING_DEPENDENCY"] = 23] = "CONSTRAINTS_MISSING_DEPENDENCY";
MessageName[MessageName["CONSTRAINTS_INCOMPATIBLE_DEPENDENCY"] = 24] = "CONSTRAINTS_INCOMPATIBLE_DEPENDENCY";
MessageName[MessageName["CONSTRAINTS_EXTRANEOUS_DEPENDENCY"] = 25] = "CONSTRAINTS_EXTRANEOUS_DEPENDENCY";
MessageName[MessageName["CONSTRAINTS_INVALID_DEPENDENCY"] = 26] = "CONSTRAINTS_INVALID_DEPENDENCY";
MessageName[MessageName["CANT_SUGGEST_RESOLUTIONS"] = 27] = "CANT_SUGGEST_RESOLUTIONS";
MessageName[MessageName["FROZEN_LOCKFILE_EXCEPTION"] = 28] = "FROZEN_LOCKFILE_EXCEPTION";
MessageName[MessageName["CROSS_DRIVE_VIRTUAL_LOCAL"] = 29] = "CROSS_DRIVE_VIRTUAL_LOCAL";
MessageName[MessageName["FETCH_FAILED"] = 30] = "FETCH_FAILED";
MessageName[MessageName["DANGEROUS_NODE_MODULES"] = 31] = "DANGEROUS_NODE_MODULES";
MessageName[MessageName["NODE_GYP_INJECTED"] = 32] = "NODE_GYP_INJECTED";
MessageName[MessageName["AUTHENTICATION_NOT_FOUND"] = 33] = "AUTHENTICATION_NOT_FOUND";
MessageName[MessageName["INVALID_CONFIGURATION_KEY"] = 34] = "INVALID_CONFIGURATION_KEY";
MessageName[MessageName["NETWORK_ERROR"] = 35] = "NETWORK_ERROR";
MessageName[MessageName["LIFECYCLE_SCRIPT"] = 36] = "LIFECYCLE_SCRIPT";
MessageName[MessageName["CONSTRAINTS_MISSING_FIELD"] = 37] = "CONSTRAINTS_MISSING_FIELD";
MessageName[MessageName["CONSTRAINTS_INCOMPATIBLE_FIELD"] = 38] = "CONSTRAINTS_INCOMPATIBLE_FIELD";
MessageName[MessageName["CONSTRAINTS_EXTRANEOUS_FIELD"] = 39] = "CONSTRAINTS_EXTRANEOUS_FIELD";
MessageName[MessageName["CONSTRAINTS_INVALID_FIELD"] = 40] = "CONSTRAINTS_INVALID_FIELD";
MessageName[MessageName["AUTHENTICATION_INVALID"] = 41] = "AUTHENTICATION_INVALID";
MessageName[MessageName["PROLOG_UNKNOWN_ERROR"] = 42] = "PROLOG_UNKNOWN_ERROR";
MessageName[MessageName["PROLOG_SYNTAX_ERROR"] = 43] = "PROLOG_SYNTAX_ERROR";
MessageName[MessageName["PROLOG_EXISTENCE_ERROR"] = 44] = "PROLOG_EXISTENCE_ERROR";
MessageName[MessageName["STACK_OVERFLOW_RESOLUTION"] = 45] = "STACK_OVERFLOW_RESOLUTION";
MessageName[MessageName["AUTOMERGE_FAILED_TO_PARSE"] = 46] = "AUTOMERGE_FAILED_TO_PARSE";
MessageName[MessageName["AUTOMERGE_IMMUTABLE"] = 47] = "AUTOMERGE_IMMUTABLE";
MessageName[MessageName["AUTOMERGE_SUCCESS"] = 48] = "AUTOMERGE_SUCCESS";
MessageName[MessageName["AUTOMERGE_REQUIRED"] = 49] = "AUTOMERGE_REQUIRED";
MessageName[MessageName["DEPRECATED_CLI_SETTINGS"] = 50] = "DEPRECATED_CLI_SETTINGS";
MessageName[MessageName["PLUGIN_NAME_NOT_FOUND"] = 51] = "PLUGIN_NAME_NOT_FOUND";
MessageName[MessageName["INVALID_PLUGIN_REFERENCE"] = 52] = "INVALID_PLUGIN_REFERENCE";
MessageName[MessageName["CONSTRAINTS_AMBIGUITY"] = 53] = "CONSTRAINTS_AMBIGUITY";
MessageName[MessageName["CACHE_OUTSIDE_PROJECT"] = 54] = "CACHE_OUTSIDE_PROJECT";
MessageName[MessageName["IMMUTABLE_INSTALL"] = 55] = "IMMUTABLE_INSTALL";
MessageName[MessageName["IMMUTABLE_CACHE"] = 56] = "IMMUTABLE_CACHE";
MessageName[MessageName["INVALID_MANIFEST"] = 57] = "INVALID_MANIFEST";
MessageName[MessageName["PACKAGE_PREPARATION_FAILED"] = 58] = "PACKAGE_PREPARATION_FAILED";
MessageName[MessageName["INVALID_RANGE_PEER_DEPENDENCY"] = 59] = "INVALID_RANGE_PEER_DEPENDENCY";
MessageName[MessageName["INCOMPATIBLE_PEER_DEPENDENCY"] = 60] = "INCOMPATIBLE_PEER_DEPENDENCY";
MessageName[MessageName["DEPRECATED_PACKAGE"] = 61] = "DEPRECATED_PACKAGE";
MessageName[MessageName["INCOMPATIBLE_OS"] = 62] = "INCOMPATIBLE_OS";
MessageName[MessageName["INCOMPATIBLE_CPU"] = 63] = "INCOMPATIBLE_CPU";
MessageName[MessageName["FROZEN_ARTIFACT_EXCEPTION"] = 64] = "FROZEN_ARTIFACT_EXCEPTION";
MessageName[MessageName["TELEMETRY_NOTICE"] = 65] = "TELEMETRY_NOTICE";
MessageName[MessageName["PATCH_HUNK_FAILED"] = 66] = "PATCH_HUNK_FAILED";
MessageName[MessageName["INVALID_CONFIGURATION_VALUE"] = 67] = "INVALID_CONFIGURATION_VALUE";
MessageName[MessageName["UNUSED_PACKAGE_EXTENSION"] = 68] = "UNUSED_PACKAGE_EXTENSION";
MessageName[MessageName["REDUNDANT_PACKAGE_EXTENSION"] = 69] = "REDUNDANT_PACKAGE_EXTENSION";
})(MessageName = exports.MessageName || (exports.MessageName = {}));
function stringifyMessageName(name) {
return `YN${name.toString(10).padStart(4, `0`)}`;
}
exports.stringifyMessageName = stringifyMessageName;
/***/ }),
/***/ 74441:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.MultiFetcher = void 0;
const tslib_1 = __webpack_require__(70655);
const MessageName_1 = __webpack_require__(62755);
const Report_1 = __webpack_require__(50334);
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
class MultiFetcher {
constructor(fetchers) {
this.fetchers = fetchers;
}
supports(locator, opts) {
if (!this.tryFetcher(locator, opts))
return false;
return true;
}
getLocalPath(locator, opts) {
const fetcher = this.getFetcher(locator, opts);
return fetcher.getLocalPath(locator, opts);
}
async fetch(locator, opts) {
const fetcher = this.getFetcher(locator, opts);
return await fetcher.fetch(locator, opts);
}
tryFetcher(locator, opts) {
const fetcher = this.fetchers.find(fetcher => fetcher.supports(locator, opts));
if (!fetcher)
return null;
return fetcher;
}
getFetcher(locator, opts) {
const fetcher = this.fetchers.find(fetcher => fetcher.supports(locator, opts));
if (!fetcher)
throw new Report_1.ReportError(MessageName_1.MessageName.FETCHER_NOT_FOUND, `${structUtils.prettyLocator(opts.project.configuration, locator)} isn't supported by any available fetcher`);
return fetcher;
}
}
exports.MultiFetcher = MultiFetcher;
/***/ }),
/***/ 54123:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.MultiResolver = void 0;
const tslib_1 = __webpack_require__(70655);
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
class MultiResolver {
constructor(resolvers) {
this.resolvers = resolvers.filter(resolver => resolver);
}
supportsDescriptor(descriptor, opts) {
const resolver = this.tryResolverByDescriptor(descriptor, opts);
return !!resolver;
}
supportsLocator(locator, opts) {
const resolver = this.tryResolverByLocator(locator, opts);
return !!resolver;
}
shouldPersistResolution(locator, opts) {
const resolver = this.getResolverByLocator(locator, opts);
return resolver.shouldPersistResolution(locator, opts);
}
bindDescriptor(descriptor, fromLocator, opts) {
const resolver = this.getResolverByDescriptor(descriptor, opts);
return resolver.bindDescriptor(descriptor, fromLocator, opts);
}
getResolutionDependencies(descriptor, opts) {
const resolver = this.getResolverByDescriptor(descriptor, opts);
return resolver.getResolutionDependencies(descriptor, opts);
}
async getCandidates(descriptor, dependencies, opts) {
const resolver = this.getResolverByDescriptor(descriptor, opts);
return await resolver.getCandidates(descriptor, dependencies, opts);
}
async getSatisfying(descriptor, references, opts) {
const resolver = this.getResolverByDescriptor(descriptor, opts);
return resolver.getSatisfying(descriptor, references, opts);
}
async resolve(locator, opts) {
const resolver = this.getResolverByLocator(locator, opts);
return await resolver.resolve(locator, opts);
}
tryResolverByDescriptor(descriptor, opts) {
const resolver = this.resolvers.find(resolver => resolver.supportsDescriptor(descriptor, opts));
if (!resolver)
return null;
return resolver;
}
getResolverByDescriptor(descriptor, opts) {
const resolver = this.resolvers.find(resolver => resolver.supportsDescriptor(descriptor, opts));
if (!resolver)
throw new Error(`${structUtils.prettyDescriptor(opts.project.configuration, descriptor)} isn't supported by any available resolver`);
return resolver;
}
tryResolverByLocator(locator, opts) {
const resolver = this.resolvers.find(resolver => resolver.supportsLocator(locator, opts));
if (!resolver)
return null;
return resolver;
}
getResolverByLocator(locator, opts) {
const resolver = this.resolvers.find(resolver => resolver.supportsLocator(locator, opts));
if (!resolver)
throw new Error(`${structUtils.prettyLocator(opts.project.configuration, locator)} isn't supported by any available resolver`);
return resolver;
}
}
exports.MultiResolver = MultiResolver;
/***/ }),
/***/ 49015:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Project = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const parsers_1 = __webpack_require__(21717);
const clipanion_1 = __webpack_require__(87730);
const crypto_1 = __webpack_require__(76417);
const diff_1 = __webpack_require__(88507);
const pick_1 = tslib_1.__importDefault(__webpack_require__(78718));
const p_limit_1 = tslib_1.__importDefault(__webpack_require__(94498));
const semver_1 = tslib_1.__importDefault(__webpack_require__(74451));
const util_1 = __webpack_require__(31669);
const v8_1 = tslib_1.__importDefault(__webpack_require__(68987));
const zlib_1 = tslib_1.__importDefault(__webpack_require__(78761));
const Configuration_1 = __webpack_require__(62889);
const Installer_1 = __webpack_require__(45006);
const LegacyMigrationResolver_1 = __webpack_require__(48900);
const LockfileResolver_1 = __webpack_require__(59909);
const Manifest_1 = __webpack_require__(11658);
const MessageName_1 = __webpack_require__(62755);
const MultiResolver_1 = __webpack_require__(54123);
const Report_1 = __webpack_require__(50334);
const RunInstallPleaseResolver_1 = __webpack_require__(30460);
const ThrowReport_1 = __webpack_require__(6059);
const Workspace_1 = __webpack_require__(2234);
const folderUtils_1 = __webpack_require__(13061);
const formatUtils = tslib_1.__importStar(__webpack_require__(23821));
const hashUtils = tslib_1.__importStar(__webpack_require__(73279));
const miscUtils = tslib_1.__importStar(__webpack_require__(63111));
const scriptUtils = tslib_1.__importStar(__webpack_require__(50888));
const semverUtils = tslib_1.__importStar(__webpack_require__(51201));
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
const types_1 = __webpack_require__(79588);
const types_2 = __webpack_require__(79588);
// When upgraded, the lockfile entries have to be resolved again (but the specific
// versions are still pinned, no worry). Bump it when you change the fields within
// the Package type; no more no less.
const LOCKFILE_VERSION = 4;
// Same thing but must be bumped when the members of the Project class changes (we
// don't recommend our users to check-in this file, so it's fine to bump it even
// between patch or minor releases).
const INSTALL_STATE_VERSION = 1;
const MULTIPLE_KEYS_REGEXP = / *, */g;
const TRAILING_SLASH_REGEXP = /\/$/;
const FETCHER_CONCURRENCY = 32;
const gzip = util_1.promisify(zlib_1.default.gzip);
const gunzip = util_1.promisify(zlib_1.default.gunzip);
const INSTALL_STATE_FIELDS = {
restoreInstallersCustomData: [
`installersCustomData`,
],
restoreResolutions: [
`accessibleLocators`,
`optionalBuilds`,
`storedDescriptors`,
`storedResolutions`,
`storedPackages`,
`lockFileChecksum`,
],
};
class Project {
constructor(projectCwd, { configuration }) {
/**
* Is meant to be populated by the consumer. Should the descriptor referenced
* by the key be requested, the descriptor referenced in the value will be
* resolved instead. The resolved data will then be used as final resolution
* for the initial descriptor.
*
* Note that the lockfile will contain the second descriptor but not the
* first one (meaning that if you remove the alias during a subsequent
* install, it'll be lost and the real package will be resolved / installed).
*/
this.resolutionAliases = new Map();
this.workspaces = [];
this.workspacesByCwd = new Map();
this.workspacesByIdent = new Map();
this.storedResolutions = new Map();
this.storedDescriptors = new Map();
this.storedPackages = new Map();
this.storedChecksums = new Map();
this.accessibleLocators = new Set();
this.originalPackages = new Map();
this.optionalBuilds = new Set();
/**
* Populated by the `resolveEverything` method.
* *Not* stored inside the install state.
*
* The map keys are 6 hexadecimal characters except the first one, always `p`.
*/
this.peerRequirements = new Map();
this.installersCustomData = new Map();
this.lockFileChecksum = null;
this.configuration = configuration;
this.cwd = projectCwd;
}
static async find(configuration, startingCwd) {
var _a, _b, _c;
if (!configuration.projectCwd)
throw new clipanion_1.UsageError(`No project found in ${startingCwd}`);
let packageCwd = configuration.projectCwd;
let nextCwd = startingCwd;
let currentCwd = null;
while (currentCwd !== configuration.projectCwd) {
currentCwd = nextCwd;
if (fslib_1.xfs.existsSync(fslib_1.ppath.join(currentCwd, fslib_1.Filename.manifest))) {
packageCwd = currentCwd;
break;
}
nextCwd = fslib_1.ppath.dirname(currentCwd);
}
const project = new Project(configuration.projectCwd, { configuration });
(_a = Configuration_1.Configuration.telemetry) === null || _a === void 0 ? void 0 : _a.reportProject(project.cwd);
await project.setupResolutions();
await project.setupWorkspaces();
(_b = Configuration_1.Configuration.telemetry) === null || _b === void 0 ? void 0 : _b.reportWorkspaceCount(project.workspaces.length);
(_c = Configuration_1.Configuration.telemetry) === null || _c === void 0 ? void 0 : _c.reportDependencyCount(project.workspaces.reduce((sum, workspace) => sum + workspace.manifest.dependencies.size + workspace.manifest.devDependencies.size, 0));
// If we're in a workspace, no need to go any further to find which package we're in
const workspace = project.tryWorkspaceByCwd(packageCwd);
if (workspace)
return { project, workspace, locator: workspace.anchoredLocator };
// Otherwise, we need to ask the project (which will in turn ask the linkers for help)
// Note: the trailing slash is caused by a quirk in the PnP implementation that requires folders to end with a trailing slash to disambiguate them from regular files
const locator = await project.findLocatorForLocation(`${packageCwd}/`, { strict: true });
if (locator)
return { project, locator, workspace: null };
throw new clipanion_1.UsageError(`The nearest package directory (${formatUtils.pretty(configuration, packageCwd, formatUtils.Type.PATH)}) doesn't seem to be part of the project declared in ${formatUtils.pretty(configuration, project.cwd, formatUtils.Type.PATH)}.\n\n- If the project directory is right, it might be that you forgot to list ${formatUtils.pretty(configuration, fslib_1.ppath.relative(project.cwd, packageCwd), formatUtils.Type.PATH)} as a workspace.\n- If it isn't, it's likely because you have a yarn.lock or package.json file there, confusing the project root detection.`);
}
static generateBuildStateFile(buildState, locatorStore) {
let bstateFile = `# Warning: This file is automatically generated. Removing it is fine, but will\n# cause all your builds to become invalidated.\n`;
const bstateData = [...buildState].map(([locatorHash, hash]) => {
const locator = locatorStore.get(locatorHash);
if (typeof locator === `undefined`)
throw new Error(`Assertion failed: The locator should have been registered`);
return [structUtils.stringifyLocator(locator), locator.locatorHash, hash];
});
for (const [locatorString, locatorHash, buildHash] of miscUtils.sortMap(bstateData, [d => d[0], d => d[1]])) {
bstateFile += `\n`;
bstateFile += `# ${locatorString}\n`;
bstateFile += `${JSON.stringify(locatorHash)}:\n`;
bstateFile += ` ${buildHash}\n`;
}
return bstateFile;
}
async setupResolutions() {
this.storedResolutions = new Map();
this.storedDescriptors = new Map();
this.storedPackages = new Map();
this.lockFileChecksum = null;
const lockfilePath = fslib_1.ppath.join(this.cwd, this.configuration.get(`lockfileFilename`));
const defaultLanguageName = this.configuration.get(`defaultLanguageName`);
if (fslib_1.xfs.existsSync(lockfilePath)) {
const content = await fslib_1.xfs.readFilePromise(lockfilePath, `utf8`);
// We store the salted checksum of the lockfile in order to invalidate the install state when needed
this.lockFileChecksum = hashUtils.makeHash(`${INSTALL_STATE_VERSION}`, content);
const parsed = parsers_1.parseSyml(content);
// Protects against v1 lockfiles
if (parsed.__metadata) {
const lockfileVersion = parsed.__metadata.version;
const cacheKey = parsed.__metadata.cacheKey;
for (const key of Object.keys(parsed)) {
if (key === `__metadata`)
continue;
const data = parsed[key];
if (typeof data.resolution === `undefined`)
throw new Error(`Assertion failed: Expected the lockfile entry to have a resolution field (${key})`);
const locator = structUtils.parseLocator(data.resolution, true);
const manifest = new Manifest_1.Manifest();
manifest.load(data, { yamlCompatibilityMode: true });
const version = manifest.version;
const languageName = manifest.languageName || defaultLanguageName;
const linkType = data.linkType.toUpperCase();
const dependencies = manifest.dependencies;
const peerDependencies = manifest.peerDependencies;
const dependenciesMeta = manifest.dependenciesMeta;
const peerDependenciesMeta = manifest.peerDependenciesMeta;
const bin = manifest.bin;
if (data.checksum != null) {
const checksum = typeof cacheKey !== `undefined` && !data.checksum.includes(`/`)
? `${cacheKey}/${data.checksum}`
: data.checksum;
this.storedChecksums.set(locator.locatorHash, checksum);
}
if (lockfileVersion >= LOCKFILE_VERSION) {
const pkg = { ...locator, version, languageName, linkType, dependencies, peerDependencies, dependenciesMeta, peerDependenciesMeta, bin };
this.originalPackages.set(pkg.locatorHash, pkg);
}
for (const entry of key.split(MULTIPLE_KEYS_REGEXP)) {
const descriptor = structUtils.parseDescriptor(entry);
this.storedDescriptors.set(descriptor.descriptorHash, descriptor);
if (lockfileVersion >= LOCKFILE_VERSION) {
// If the lockfile is up-to-date, we can simply register the
// resolution as a done deal.
this.storedResolutions.set(descriptor.descriptorHash, locator.locatorHash);
}
else {
// But if it isn't, then we instead setup an alias so that the
// descriptor will be re-resolved (so that we get to retrieve the
// new fields) while still resolving to the same locators.
const resolutionDescriptor = structUtils.convertLocatorToDescriptor(locator);
if (resolutionDescriptor.descriptorHash !== descriptor.descriptorHash) {
this.storedDescriptors.set(resolutionDescriptor.descriptorHash, resolutionDescriptor);
this.resolutionAliases.set(descriptor.descriptorHash, resolutionDescriptor.descriptorHash);
}
}
}
}
}
}
}
async setupWorkspaces() {
this.workspaces = [];
this.workspacesByCwd = new Map();
this.workspacesByIdent = new Map();
let workspaceCwds = [this.cwd];
while (workspaceCwds.length > 0) {
const passCwds = workspaceCwds;
workspaceCwds = [];
for (const workspaceCwd of passCwds) {
if (this.workspacesByCwd.has(workspaceCwd))
continue;
const workspace = await this.addWorkspace(workspaceCwd);
const workspacePkg = this.storedPackages.get(workspace.anchoredLocator.locatorHash);
if (workspacePkg)
workspace.dependencies = workspacePkg.dependencies;
for (const workspaceCwd of workspace.workspacesCwds) {
workspaceCwds.push(workspaceCwd);
}
}
}
}
async addWorkspace(workspaceCwd) {
const workspace = new Workspace_1.Workspace(workspaceCwd, { project: this });
await workspace.setup();
const dup = this.workspacesByIdent.get(workspace.locator.identHash);
if (typeof dup !== `undefined`)
throw new Error(`Duplicate workspace name ${structUtils.prettyIdent(this.configuration, workspace.locator)}: ${workspaceCwd} conflicts with ${dup.cwd}`);
this.workspaces.push(workspace);
this.workspacesByCwd.set(workspaceCwd, workspace);
this.workspacesByIdent.set(workspace.locator.identHash, workspace);
return workspace;
}
get topLevelWorkspace() {
return this.getWorkspaceByCwd(this.cwd);
}
tryWorkspaceByCwd(workspaceCwd) {
if (!fslib_1.ppath.isAbsolute(workspaceCwd))
workspaceCwd = fslib_1.ppath.resolve(this.cwd, workspaceCwd);
workspaceCwd = fslib_1.ppath.normalize(workspaceCwd)
.replace(/\/+$/, ``);
const workspace = this.workspacesByCwd.get(workspaceCwd);
if (!workspace)
return null;
return workspace;
}
getWorkspaceByCwd(workspaceCwd) {
const workspace = this.tryWorkspaceByCwd(workspaceCwd);
if (!workspace)
throw new Error(`Workspace not found (${workspaceCwd})`);
return workspace;
}
tryWorkspaceByFilePath(filePath) {
let bestWorkspace = null;
for (const workspace of this.workspaces) {
const rel = fslib_1.ppath.relative(workspace.cwd, filePath);
if (rel.startsWith(`../`))
continue;
if (bestWorkspace && bestWorkspace.cwd.length >= workspace.cwd.length)
continue;
bestWorkspace = workspace;
}
if (!bestWorkspace)
return null;
return bestWorkspace;
}
getWorkspaceByFilePath(filePath) {
const workspace = this.tryWorkspaceByFilePath(filePath);
if (!workspace)
throw new Error(`Workspace not found (${filePath})`);
return workspace;
}
tryWorkspaceByIdent(ident) {
const workspace = this.workspacesByIdent.get(ident.identHash);
if (typeof workspace === `undefined`)
return null;
return workspace;
}
getWorkspaceByIdent(ident) {
const workspace = this.tryWorkspaceByIdent(ident);
if (!workspace)
throw new Error(`Workspace not found (${structUtils.prettyIdent(this.configuration, ident)})`);
return workspace;
}
tryWorkspaceByDescriptor(descriptor) {
const workspace = this.tryWorkspaceByIdent(descriptor);
if (workspace === null || !workspace.accepts(descriptor.range))
return null;
return workspace;
}
getWorkspaceByDescriptor(descriptor) {
const workspace = this.tryWorkspaceByDescriptor(descriptor);
if (workspace === null)
throw new Error(`Workspace not found (${structUtils.prettyDescriptor(this.configuration, descriptor)})`);
return workspace;
}
tryWorkspaceByLocator(locator) {
if (structUtils.isVirtualLocator(locator))
locator = structUtils.devirtualizeLocator(locator);
const workspace = this.tryWorkspaceByIdent(locator);
if (workspace === null || (workspace.locator.locatorHash !== locator.locatorHash && workspace.anchoredLocator.locatorHash !== locator.locatorHash))
return null;
return workspace;
}
getWorkspaceByLocator(locator) {
const workspace = this.tryWorkspaceByLocator(locator);
if (!workspace)
throw new Error(`Workspace not found (${structUtils.prettyLocator(this.configuration, locator)})`);
return workspace;
}
/**
* Import the dependencies of each resolved workspace into their own
* `Workspace` instance.
*/
refreshWorkspaceDependencies() {
for (const workspace of this.workspaces) {
const pkg = this.storedPackages.get(workspace.anchoredLocator.locatorHash);
if (!pkg)
throw new Error(`Assertion failed: Expected workspace to have been resolved`);
workspace.dependencies = new Map(pkg.dependencies);
}
}
forgetResolution(dataStructure) {
const deleteDescriptor = (descriptorHash) => {
this.storedResolutions.delete(descriptorHash);
this.storedDescriptors.delete(descriptorHash);
};
const deleteLocator = (locatorHash) => {
this.originalPackages.delete(locatorHash);
this.storedPackages.delete(locatorHash);
this.accessibleLocators.delete(locatorHash);
};
if (`descriptorHash` in dataStructure) {
const locatorHash = this.storedResolutions.get(dataStructure.descriptorHash);
deleteDescriptor(dataStructure.descriptorHash);
// We delete unused locators
const remainingResolutions = new Set(this.storedResolutions.values());
if (typeof locatorHash !== `undefined` && !remainingResolutions.has(locatorHash)) {
deleteLocator(locatorHash);
}
}
if (`locatorHash` in dataStructure) {
deleteLocator(dataStructure.locatorHash);
// We delete all of the descriptors that have been resolved to the locator
for (const [descriptorHash, locatorHash] of this.storedResolutions) {
if (locatorHash === dataStructure.locatorHash) {
deleteDescriptor(descriptorHash);
}
}
}
}
forgetTransientResolutions() {
const resolver = this.configuration.makeResolver();
for (const pkg of this.originalPackages.values()) {
let shouldPersistResolution;
try {
shouldPersistResolution = resolver.shouldPersistResolution(pkg, { project: this, resolver });
}
catch (_a) {
shouldPersistResolution = false;
}
if (!shouldPersistResolution) {
this.forgetResolution(pkg);
}
}
}
forgetVirtualResolutions() {
for (const pkg of this.storedPackages.values()) {
for (const [dependencyHash, dependency] of pkg.dependencies) {
if (structUtils.isVirtualDescriptor(dependency)) {
pkg.dependencies.set(dependencyHash, structUtils.devirtualizeDescriptor(dependency));
}
}
}
}
getDependencyMeta(ident, version) {
const dependencyMeta = {};
const dependenciesMeta = this.topLevelWorkspace.manifest.dependenciesMeta;
const dependencyMetaSet = dependenciesMeta.get(structUtils.stringifyIdent(ident));
if (!dependencyMetaSet)
return dependencyMeta;
const defaultMeta = dependencyMetaSet.get(null);
if (defaultMeta)
Object.assign(dependencyMeta, defaultMeta);
if (version === null || !semver_1.default.valid(version))
return dependencyMeta;
for (const [range, meta] of dependencyMetaSet)
if (range !== null && range === version)
Object.assign(dependencyMeta, meta);
return dependencyMeta;
}
async findLocatorForLocation(cwd, { strict = false } = {}) {
const report = new ThrowReport_1.ThrowReport();
const linkers = this.configuration.getLinkers();
const linkerOptions = { project: this, report };
for (const linker of linkers) {
const locator = await linker.findPackageLocator(cwd, linkerOptions);
if (locator) {
// If strict mode, the specified cwd must be a package,
// not merely contained in a package.
if (strict) {
const location = await linker.findPackageLocation(locator, linkerOptions);
if (location.replace(TRAILING_SLASH_REGEXP, ``) !== cwd.replace(TRAILING_SLASH_REGEXP, ``)) {
continue;
}
}
return locator;
}
}
return null;
}
async resolveEverything(opts) {
if (!this.workspacesByCwd || !this.workspacesByIdent)
throw new Error(`Workspaces must have been setup before calling this function`);
// Reverts the changes that have been applied to the tree because of any previous virtual resolution pass
this.forgetVirtualResolutions();
// Ensures that we notice it when dependencies are added / removed from all sources coming from the filesystem
if (!opts.lockfileOnly)
this.forgetTransientResolutions();
// Note that the resolution process is "offline" until everything has been
// successfully resolved; all the processing is expected to have zero side
// effects until we're ready to set all the variables at once (the one
// exception being when a resolver needs to fetch a package, in which case
// we might need to populate the cache).
//
// This makes it possible to use the same Project instance for multiple
// purposes at the same time (since `resolveEverything` is async, it might
// happen that we want to do something while waiting for it to end; if we
// were to mutate the project then it would end up in a partial state that
// could lead to hard-to-debug issues).
const realResolver = opts.resolver || this.configuration.makeResolver();
const legacyMigrationResolver = new LegacyMigrationResolver_1.LegacyMigrationResolver();
await legacyMigrationResolver.setup(this, { report: opts.report });
const resolver = opts.lockfileOnly
? new MultiResolver_1.MultiResolver([new LockfileResolver_1.LockfileResolver(), new RunInstallPleaseResolver_1.RunInstallPleaseResolver(realResolver)])
: new MultiResolver_1.MultiResolver([new LockfileResolver_1.LockfileResolver(), legacyMigrationResolver, realResolver]);
const fetcher = this.configuration.makeFetcher();
const resolveOptions = opts.lockfileOnly
? { project: this, report: opts.report, resolver }
: { project: this, report: opts.report, resolver, fetchOptions: { project: this, cache: opts.cache, checksums: this.storedChecksums, report: opts.report, fetcher } };
const allDescriptors = new Map();
const allPackages = new Map();
const allResolutions = new Map();
const originalPackages = new Map();
const packageResolutionPromises = new Map();
const descriptorResolutionPromises = new Map();
const resolutionQueue = [];
const startPackageResolution = async (locator) => {
const originalPkg = await miscUtils.prettifyAsyncErrors(async () => {
return await resolver.resolve(locator, resolveOptions);
}, message => {
return `${structUtils.prettyLocator(this.configuration, locator)}: ${message}`;
});
if (!structUtils.areLocatorsEqual(locator, originalPkg))
throw new Error(`Assertion failed: The locator cannot be changed by the resolver (went from ${structUtils.prettyLocator(this.configuration, locator)} to ${structUtils.prettyLocator(this.configuration, originalPkg)})`);
originalPackages.set(originalPkg.locatorHash, originalPkg);
const pkg = this.configuration.normalizePackage(originalPkg);
for (const [identHash, descriptor] of pkg.dependencies) {
const dependency = await this.configuration.reduceHook(hooks => {
return hooks.reduceDependency;
}, descriptor, this, pkg, descriptor, {
resolver,
resolveOptions,
});
if (!structUtils.areIdentsEqual(descriptor, dependency))
throw new Error(`Assertion failed: The descriptor ident cannot be changed through aliases`);
const bound = resolver.bindDescriptor(dependency, locator, resolveOptions);
pkg.dependencies.set(identHash, bound);
}
resolutionQueue.push(Promise.all([...pkg.dependencies.values()].map(descriptor => {
return scheduleDescriptorResolution(descriptor);
})));
allPackages.set(pkg.locatorHash, pkg);
return pkg;
};
const schedulePackageResolution = async (locator) => {
const promise = packageResolutionPromises.get(locator.locatorHash);
if (typeof promise !== `undefined`)
return promise;
const newPromise = Promise.resolve().then(() => startPackageResolution(locator));
packageResolutionPromises.set(locator.locatorHash, newPromise);
return newPromise;
};
const startDescriptorAliasing = async (descriptor, alias) => {
const resolution = await scheduleDescriptorResolution(alias);
allDescriptors.set(descriptor.descriptorHash, descriptor);
allResolutions.set(descriptor.descriptorHash, resolution.locatorHash);
return resolution;
};
const startDescriptorResolution = async (descriptor) => {
const alias = this.resolutionAliases.get(descriptor.descriptorHash);
if (typeof alias !== `undefined`)
return startDescriptorAliasing(descriptor, this.storedDescriptors.get(alias));
const resolutionDependencies = resolver.getResolutionDependencies(descriptor, resolveOptions);
const resolvedDependencies = new Map(await Promise.all(resolutionDependencies.map(async (dependency) => {
return [dependency.descriptorHash, await scheduleDescriptorResolution(dependency)];
})));
const candidateResolutions = await miscUtils.prettifyAsyncErrors(async () => {
return await resolver.getCandidates(descriptor, resolvedDependencies, resolveOptions);
}, message => {
return `${structUtils.prettyDescriptor(this.configuration, descriptor)}: ${message}`;
});
const finalResolution = candidateResolutions[0];
if (typeof finalResolution === `undefined`)
throw new Error(`${structUtils.prettyDescriptor(this.configuration, descriptor)}: No candidates found`);
allDescriptors.set(descriptor.descriptorHash, descriptor);
allResolutions.set(descriptor.descriptorHash, finalResolution.locatorHash);
return schedulePackageResolution(finalResolution);
};
const scheduleDescriptorResolution = (descriptor) => {
const promise = descriptorResolutionPromises.get(descriptor.descriptorHash);
if (typeof promise !== `undefined`)
return promise;
allDescriptors.set(descriptor.descriptorHash, descriptor);
const newPromise = Promise.resolve().then(() => startDescriptorResolution(descriptor));
descriptorResolutionPromises.set(descriptor.descriptorHash, newPromise);
return newPromise;
};
for (const workspace of this.workspaces) {
const workspaceDescriptor = workspace.anchoredDescriptor;
resolutionQueue.push(scheduleDescriptorResolution(workspaceDescriptor));
}
while (resolutionQueue.length > 0) {
const copy = [...resolutionQueue];
resolutionQueue.length = 0;
await Promise.all(copy);
}
// In this step we now create virtual packages for each package with at
// least one peer dependency. We also use it to search for the alias
// descriptors that aren't depended upon by anything and can be safely
// pruned.
const volatileDescriptors = new Set(this.resolutionAliases.values());
const optionalBuilds = new Set(allPackages.keys());
const accessibleLocators = new Set();
const peerRequirements = new Map();
applyVirtualResolutionMutations({
project: this,
report: opts.report,
accessibleLocators,
volatileDescriptors,
optionalBuilds,
peerRequirements,
allDescriptors,
allResolutions,
allPackages,
});
// All descriptors still referenced within the volatileDescriptors set are
// descriptors that aren't depended upon by anything in the dependency tree.
for (const descriptorHash of volatileDescriptors) {
allDescriptors.delete(descriptorHash);
allResolutions.delete(descriptorHash);
}
// Everything is done, we can now update our internal resolutions to
// reference the new ones
this.storedResolutions = allResolutions;
this.storedDescriptors = allDescriptors;
this.storedPackages = allPackages;
this.accessibleLocators = accessibleLocators;
this.originalPackages = originalPackages;
this.optionalBuilds = optionalBuilds;
this.peerRequirements = peerRequirements;
// Now that the internal resolutions have been updated, we can refresh the
// dependencies of each resolved workspace's `Workspace` instance.
this.refreshWorkspaceDependencies();
}
async fetchEverything({ cache, report, fetcher: userFetcher }) {
const fetcher = userFetcher || this.configuration.makeFetcher();
const fetcherOptions = { checksums: this.storedChecksums, project: this, cache, fetcher, report };
const locatorHashes = Array.from(new Set(miscUtils.sortMap(this.storedResolutions.values(), [
(locatorHash) => {
const pkg = this.storedPackages.get(locatorHash);
if (!pkg)
throw new Error(`Assertion failed: The locator should have been registered`);
return structUtils.stringifyLocator(pkg);
},
])));
let firstError = false;
const progress = Report_1.Report.progressViaCounter(locatorHashes.length);
report.reportProgress(progress);
const limit = p_limit_1.default(FETCHER_CONCURRENCY);
await report.startCacheReport(async () => {
await Promise.all(locatorHashes.map(locatorHash => limit(async () => {
const pkg = this.storedPackages.get(locatorHash);
if (!pkg)
throw new Error(`Assertion failed: The locator should have been registered`);
if (structUtils.isVirtualLocator(pkg))
return;
let fetchResult;
try {
fetchResult = await fetcher.fetch(pkg, fetcherOptions);
}
catch (error) {
error.message = `${structUtils.prettyLocator(this.configuration, pkg)}: ${error.message}`;
report.reportExceptionOnce(error);
firstError = error;
return;
}
if (fetchResult.checksum)
this.storedChecksums.set(pkg.locatorHash, fetchResult.checksum);
else
this.storedChecksums.delete(pkg.locatorHash);
if (fetchResult.releaseFs) {
fetchResult.releaseFs();
}
}).finally(() => {
progress.tick();
})));
});
if (firstError) {
throw firstError;
}
}
async linkEverything({ cache, report, fetcher: optFetcher, skipBuild }) {
var _a;
const fetcher = optFetcher || this.configuration.makeFetcher();
const fetcherOptions = { checksums: this.storedChecksums, project: this, cache, fetcher, report, skipIntegrityCheck: true };
const linkers = this.configuration.getLinkers();
const linkerOptions = { project: this, report };
const installers = new Map(linkers.map(linker => {
const installer = linker.makeInstaller(linkerOptions);
const customDataKey = installer.getCustomDataKey();
const customData = this.installersCustomData.get(customDataKey);
if (typeof customData !== `undefined`)
installer.attachCustomData(customData);
return [linker, installer];
}));
const packageLinkers = new Map();
const packageLocations = new Map();
const packageBuildDirectives = new Map();
const fetchResultsPerPackage = new Map(await Promise.all([...this.accessibleLocators].map(async (locatorHash) => {
const pkg = this.storedPackages.get(locatorHash);
if (!pkg)
throw new Error(`Assertion failed: The locator should have been registered`);
return [locatorHash, await fetcher.fetch(pkg, fetcherOptions)];
})));
// Step 1: Installing the packages on the disk
for (const locatorHash of this.accessibleLocators) {
const pkg = this.storedPackages.get(locatorHash);
if (typeof pkg === `undefined`)
throw new Error(`Assertion failed: The locator should have been registered`);
const fetchResult = fetchResultsPerPackage.get(pkg.locatorHash);
if (typeof fetchResult === `undefined`)
throw new Error(`Assertion failed: The fetch result should have been registered`);
const workspace = this.tryWorkspaceByLocator(pkg);
if (workspace !== null) {
const buildScripts = [];
const { scripts } = workspace.manifest;
for (const scriptName of [`preinstall`, `install`, `postinstall`])
if (scripts.has(scriptName))
buildScripts.push([Installer_1.BuildType.SCRIPT, scriptName]);
try {
for (const installer of installers.values()) {
const result = await installer.installPackage(pkg, fetchResult);
if (result.buildDirective !== null) {
throw new Error(`Assertion failed: Linkers can't return build directives for workspaces; this responsibility befalls to the Yarn core`);
}
}
}
finally {
if (fetchResult.releaseFs) {
fetchResult.releaseFs();
}
}
const location = fslib_1.ppath.join(fetchResult.packageFs.getRealPath(), fetchResult.prefixPath);
packageLocations.set(pkg.locatorHash, location);
if (buildScripts.length > 0) {
packageBuildDirectives.set(pkg.locatorHash, {
directives: buildScripts,
buildLocations: [location],
});
}
}
else {
const linker = linkers.find(linker => linker.supportsPackage(pkg, linkerOptions));
if (!linker)
throw new Report_1.ReportError(MessageName_1.MessageName.LINKER_NOT_FOUND, `${structUtils.prettyLocator(this.configuration, pkg)} isn't supported by any available linker`);
const installer = installers.get(linker);
if (!installer)
throw new Error(`Assertion failed: The installer should have been registered`);
let installStatus;
try {
installStatus = await installer.installPackage(pkg, fetchResult);
}
finally {
if (fetchResult.releaseFs) {
fetchResult.releaseFs();
}
}
packageLinkers.set(pkg.locatorHash, linker);
packageLocations.set(pkg.locatorHash, installStatus.packageLocation);
if (installStatus.buildDirective && installStatus.packageLocation) {
packageBuildDirectives.set(pkg.locatorHash, {
directives: installStatus.buildDirective,
buildLocations: [installStatus.packageLocation],
});
}
}
}
// Step 2: Link packages together
const externalDependents = new Map();
for (const locatorHash of this.accessibleLocators) {
const pkg = this.storedPackages.get(locatorHash);
if (!pkg)
throw new Error(`Assertion failed: The locator should have been registered`);
const isWorkspace = this.tryWorkspaceByLocator(pkg) !== null;
const linkPackage = async (packageLinker, installer) => {
const packageLocation = packageLocations.get(pkg.locatorHash);
if (typeof packageLocation === `undefined`)
throw new Error(`Assertion failed: The package (${structUtils.prettyLocator(this.configuration, pkg)}) should have been registered`);
const internalDependencies = [];
for (const descriptor of pkg.dependencies.values()) {
const resolution = this.storedResolutions.get(descriptor.descriptorHash);
if (typeof resolution === `undefined`)
throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(this.configuration, descriptor)}, from ${structUtils.prettyLocator(this.configuration, pkg)})should have been registered`);
const dependency = this.storedPackages.get(resolution);
if (typeof dependency === `undefined`)
throw new Error(`Assertion failed: The package (${resolution}, resolved from ${structUtils.prettyDescriptor(this.configuration, descriptor)}) should have been registered`);
const dependencyLinker = this.tryWorkspaceByLocator(dependency) === null
? packageLinkers.get(resolution)
: null;
if (typeof dependencyLinker === `undefined`)
throw new Error(`Assertion failed: The package (${resolution}, resolved from ${structUtils.prettyDescriptor(this.configuration, descriptor)}) should have been registered`);
const isWorkspaceDependency = dependencyLinker === null;
if (dependencyLinker === packageLinker || isWorkspace || isWorkspaceDependency) {
if (packageLocations.get(dependency.locatorHash) !== null) {
internalDependencies.push([descriptor, dependency]);
}
}
else if (packageLocation !== null) {
const externalEntry = miscUtils.getArrayWithDefault(externalDependents, resolution);
externalEntry.push(packageLocation);
}
}
if (packageLocation !== null) {
await installer.attachInternalDependencies(pkg, internalDependencies);
}
};
if (isWorkspace) {
for (const [packageLinker, installer] of installers) {
await linkPackage(packageLinker, installer);
}
}
else {
const packageLinker = packageLinkers.get(pkg.locatorHash);
if (!packageLinker)
throw new Error(`Assertion failed: The linker should have been found`);
const installer = installers.get(packageLinker);
if (!installer)
throw new Error(`Assertion failed: The installer should have been registered`);
await linkPackage(packageLinker, installer);
}
}
for (const [locatorHash, dependentPaths] of externalDependents) {
const pkg = this.storedPackages.get(locatorHash);
if (!pkg)
throw new Error(`Assertion failed: The package should have been registered`);
const packageLinker = packageLinkers.get(pkg.locatorHash);
if (!packageLinker)
throw new Error(`Assertion failed: The linker should have been found`);
const installer = installers.get(packageLinker);
if (!installer)
throw new Error(`Assertion failed: The installer should have been registered`);
await installer.attachExternalDependents(pkg, dependentPaths);
}
// Step 3: Inform our linkers that they should have all the info needed
const installersCustomData = new Map();
for (const installer of installers.values()) {
const finalizeInstallData = await installer.finalizeInstall();
for (const installStatus of (_a = finalizeInstallData === null || finalizeInstallData === void 0 ? void 0 : finalizeInstallData.records) !== null && _a !== void 0 ? _a : []) {
packageBuildDirectives.set(installStatus.locatorHash, {
directives: installStatus.buildDirective,
buildLocations: installStatus.buildLocations,
});
}
if (typeof (finalizeInstallData === null || finalizeInstallData === void 0 ? void 0 : finalizeInstallData.customData) !== `undefined`) {
installersCustomData.set(installer.getCustomDataKey(), finalizeInstallData.customData);
}
}
this.installersCustomData = installersCustomData;
await this.persistInstallStateFile();
// Step 4: Build the packages in multiple steps
if (skipBuild)
return;
const readyPackages = new Set(this.storedPackages.keys());
const buildablePackages = new Set(packageBuildDirectives.keys());
for (const locatorHash of buildablePackages)
readyPackages.delete(locatorHash);
const globalHashGenerator = crypto_1.createHash(`sha512`);
globalHashGenerator.update(process.versions.node);
this.configuration.triggerHook(hooks => {
return hooks.globalHashGeneration;
}, this, (data) => {
globalHashGenerator.update(`\0`);
globalHashGenerator.update(data);
});
const globalHash = globalHashGenerator.digest(`hex`);
const packageHashMap = new Map();
// We'll use this function is order to compute a hash for each package
// that exposes a build directive. If the hash changes compared to the
// previous run, the package is rebuilt. This has the advantage of making
// the rebuilds much more predictable than before, and to give us the tools
// later to improve this further by explaining *why* a rebuild happened.
const getBaseHash = (locator) => {
let hash = packageHashMap.get(locator.locatorHash);
if (typeof hash !== `undefined`)
return hash;
const pkg = this.storedPackages.get(locator.locatorHash);
if (typeof pkg === `undefined`)
throw new Error(`Assertion failed: The package should have been registered`);
const builder = crypto_1.createHash(`sha512`);
builder.update(locator.locatorHash);
// To avoid the case where one dependency depends on itself somehow
packageHashMap.set(locator.locatorHash, ``);
for (const descriptor of pkg.dependencies.values()) {
const resolution = this.storedResolutions.get(descriptor.descriptorHash);
if (typeof resolution === `undefined`)
throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(this.configuration, descriptor)}) should have been registered`);
const dependency = this.storedPackages.get(resolution);
if (typeof dependency === `undefined`)
throw new Error(`Assertion failed: The package should have been registered`);
builder.update(getBaseHash(dependency));
}
hash = builder.digest(`hex`);
packageHashMap.set(locator.locatorHash, hash);
return hash;
};
const getBuildHash = (locator, buildLocations) => {
const builder = crypto_1.createHash(`sha512`);
builder.update(globalHash);
builder.update(getBaseHash(locator));
for (const location of buildLocations)
builder.update(location);
return builder.digest(`hex`);
};
const bstatePath = this.configuration.get(`bstatePath`);
const bstate = fslib_1.xfs.existsSync(bstatePath)
? parsers_1.parseSyml(await fslib_1.xfs.readFilePromise(bstatePath, `utf8`))
: {};
// We reconstruct the build state from an empty object because we want to
// remove the state from packages that got removed
const nextBState = new Map();
while (buildablePackages.size > 0) {
const savedSize = buildablePackages.size;
const buildPromises = [];
for (const locatorHash of buildablePackages) {
const pkg = this.storedPackages.get(locatorHash);
if (!pkg)
throw new Error(`Assertion failed: The package should have been registered`);
let isBuildable = true;
for (const dependency of pkg.dependencies.values()) {
const resolution = this.storedResolutions.get(dependency.descriptorHash);
if (!resolution)
throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(this.configuration, dependency)}) should have been registered`);
if (buildablePackages.has(resolution)) {
isBuildable = false;
break;
}
}
// Wait until all dependencies of the current package have been built
// before trying to build it (since it might need them to build itself)
if (!isBuildable)
continue;
buildablePackages.delete(locatorHash);
const buildInfo = packageBuildDirectives.get(pkg.locatorHash);
if (!buildInfo)
throw new Error(`Assertion failed: The build directive should have been registered`);
const buildHash = getBuildHash(pkg, buildInfo.buildLocations);
// No need to rebuild the package if its hash didn't change
if (Object.prototype.hasOwnProperty.call(bstate, pkg.locatorHash) && bstate[pkg.locatorHash] === buildHash) {
nextBState.set(pkg.locatorHash, buildHash);
continue;
}
if (Object.prototype.hasOwnProperty.call(bstate, pkg.locatorHash))
report.reportInfo(MessageName_1.MessageName.MUST_REBUILD, `${structUtils.prettyLocator(this.configuration, pkg)} must be rebuilt because its dependency tree changed`);
else
report.reportInfo(MessageName_1.MessageName.MUST_BUILD, `${structUtils.prettyLocator(this.configuration, pkg)} must be built because it never did before or the last one failed`);
for (const location of buildInfo.buildLocations) {
if (!fslib_1.ppath.isAbsolute(location))
throw new Error(`Assertion failed: Expected the build location to be absolute (not ${location})`);
buildPromises.push((async () => {
for (const [buildType, scriptName] of buildInfo.directives) {
let header = `# This file contains the result of Yarn building a package (${structUtils.stringifyLocator(pkg)})\n`;
switch (buildType) {
case Installer_1.BuildType.SCRIPT:
{
header += `# Script name: ${scriptName}\n`;
}
break;
case Installer_1.BuildType.SHELLCODE:
{
header += `# Script code: ${scriptName}\n`;
}
break;
}
const stdin = null;
await fslib_1.xfs.mktempPromise(async (logDir) => {
const logFile = fslib_1.ppath.join(logDir, `build.log`);
const { stdout, stderr } = this.configuration.getSubprocessStreams(logFile, {
header,
prefix: structUtils.prettyLocator(this.configuration, pkg),
report,
});
let exitCode;
try {
switch (buildType) {
case Installer_1.BuildType.SCRIPT:
{
exitCode = await scriptUtils.executePackageScript(pkg, scriptName, [], { cwd: location, project: this, stdin, stdout, stderr });
}
break;
case Installer_1.BuildType.SHELLCODE:
{
exitCode = await scriptUtils.executePackageShellcode(pkg, scriptName, [], { cwd: location, project: this, stdin, stdout, stderr });
}
break;
}
}
catch (error) {
stderr.write(error.stack);
exitCode = 1;
}
stdout.end();
stderr.end();
if (exitCode === 0) {
nextBState.set(pkg.locatorHash, buildHash);
return true;
}
fslib_1.xfs.detachTemp(logDir);
const buildMessage = `${structUtils.prettyLocator(this.configuration, pkg)} couldn't be built successfully (exit code ${formatUtils.pretty(this.configuration, exitCode, formatUtils.Type.NUMBER)}, logs can be found here: ${formatUtils.pretty(this.configuration, logFile, formatUtils.Type.PATH)})`;
report.reportInfo(MessageName_1.MessageName.BUILD_FAILED, buildMessage);
if (this.optionalBuilds.has(pkg.locatorHash)) {
nextBState.set(pkg.locatorHash, buildHash);
return true;
}
report.reportError(MessageName_1.MessageName.BUILD_FAILED, buildMessage);
return false;
});
}
})());
}
}
await Promise.all(buildPromises);
// If we reach this code, it means that we have circular dependencies
// somewhere. Worst, it means that the circular dependencies both have
// build scripts, making them unsatisfiable.
if (savedSize === buildablePackages.size) {
const prettyLocators = Array.from(buildablePackages).map(locatorHash => {
const pkg = this.storedPackages.get(locatorHash);
if (!pkg)
throw new Error(`Assertion failed: The package should have been registered`);
return structUtils.prettyLocator(this.configuration, pkg);
}).join(`, `);
report.reportError(MessageName_1.MessageName.CYCLIC_DEPENDENCIES, `Some packages have circular dependencies that make their build order unsatisfiable - as a result they won't be built (affected packages are: ${prettyLocators})`);
break;
}
}
// We can now generate the bstate file, which will allow us to "remember"
// what's the dependency tree subset that we used to build a specific
// package (and avoid rebuilding it later if it didn't change).
if (nextBState.size > 0) {
const bstatePath = this.configuration.get(`bstatePath`);
const bstateFile = Project.generateBuildStateFile(nextBState, this.storedPackages);
await fslib_1.xfs.mkdirPromise(fslib_1.ppath.dirname(bstatePath), { recursive: true });
await fslib_1.xfs.changeFilePromise(bstatePath, bstateFile, {
automaticNewlines: true,
});
}
else {
await fslib_1.xfs.removePromise(bstatePath);
}
}
async install(opts) {
var _a, _b;
const nodeLinker = this.configuration.get(`nodeLinker`);
(_a = Configuration_1.Configuration.telemetry) === null || _a === void 0 ? void 0 : _a.reportInstall(nodeLinker);
await opts.report.startTimerPromise(`Project validation`, {
skipIfEmpty: true,
}, async () => {
await this.configuration.triggerHook(hooks => {
return hooks.validateProject;
}, this, {
reportWarning: opts.report.reportWarning.bind(opts.report),
reportError: opts.report.reportError.bind(opts.report),
});
});
for (const extensionsByIdent of this.configuration.packageExtensions.values())
for (const [, extensionsByRange] of extensionsByIdent)
for (const extension of extensionsByRange)
extension.status = types_1.PackageExtensionStatus.Inactive;
const lockfilePath = fslib_1.ppath.join(this.cwd, this.configuration.get(`lockfileFilename`));
// If we operate with a frozen lockfile, we take a snapshot of it to later make sure it didn't change
let initialLockfile = null;
if (opts.immutable) {
try {
initialLockfile = await fslib_1.xfs.readFilePromise(lockfilePath, `utf8`);
}
catch (error) {
if (error.code === `ENOENT`) {
throw new Report_1.ReportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, `The lockfile would have been created by this install, which is explicitly forbidden.`);
}
else {
throw error;
}
}
}
await opts.report.startTimerPromise(`Resolution step`, async () => {
await this.resolveEverything(opts);
});
await opts.report.startTimerPromise(`Post-resolution validation`, {
skipIfEmpty: true,
}, async () => {
for (const [, extensionsPerRange] of this.configuration.packageExtensions) {
for (const [, extensions] of extensionsPerRange) {
for (const extension of extensions) {
if (extension.userProvided) {
const prettyPackageExtension = formatUtils.pretty(this.configuration, extension, formatUtils.Type.PACKAGE_EXTENSION);
switch (extension.status) {
case types_1.PackageExtensionStatus.Inactive:
{
opts.report.reportWarning(MessageName_1.MessageName.UNUSED_PACKAGE_EXTENSION, `${prettyPackageExtension}: No matching package in the dependency tree; you may not need this rule anymore.`);
}
break;
case types_1.PackageExtensionStatus.Redundant:
{
opts.report.reportWarning(MessageName_1.MessageName.REDUNDANT_PACKAGE_EXTENSION, `${prettyPackageExtension}: This rule seems redundant when applied on the original package; the extension may have been applied upstream.`);
}
break;
}
}
}
}
}
if (initialLockfile !== null) {
const newLockfile = fslib_1.normalizeLineEndings(initialLockfile, this.generateLockfile());
if (newLockfile !== initialLockfile) {
const diff = diff_1.structuredPatch(lockfilePath, lockfilePath, initialLockfile, newLockfile);
opts.report.reportSeparator();
for (const hunk of diff.hunks) {
opts.report.reportInfo(null, `@@ -${hunk.oldStart},${hunk.oldLines} +${hunk.newStart},${hunk.newLines} @@`);
for (const line of hunk.lines) {
if (line.startsWith(`+`)) {
opts.report.reportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, formatUtils.pretty(this.configuration, line, formatUtils.Type.ADDED));
}
else if (line.startsWith(`-`)) {
opts.report.reportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, formatUtils.pretty(this.configuration, line, formatUtils.Type.REMOVED));
}
else {
opts.report.reportInfo(null, formatUtils.pretty(this.configuration, line, `grey`));
}
}
}
opts.report.reportSeparator();
throw new Report_1.ReportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, `The lockfile would have been modified by this install, which is explicitly forbidden.`);
}
}
});
for (const extensionsByIdent of this.configuration.packageExtensions.values())
for (const [, extensionsByRange] of extensionsByIdent)
for (const extension of extensionsByRange)
if (extension.userProvided && extension.status === types_1.PackageExtensionStatus.Active)
(_b = Configuration_1.Configuration.telemetry) === null || _b === void 0 ? void 0 : _b.reportPackageExtension(formatUtils.json(extension, formatUtils.Type.PACKAGE_EXTENSION));
await opts.report.startTimerPromise(`Fetch step`, async () => {
await this.fetchEverything(opts);
if (typeof opts.persistProject === `undefined` || opts.persistProject) {
await this.cacheCleanup(opts);
}
});
if (typeof opts.persistProject === `undefined` || opts.persistProject)
await this.persist();
await opts.report.startTimerPromise(`Link step`, async () => {
const immutablePatterns = opts.immutable
? [...new Set(this.configuration.get(`immutablePatterns`))].sort()
: [];
const before = await Promise.all(immutablePatterns.map(async (pattern) => {
return hashUtils.checksumPattern(pattern, { cwd: this.cwd });
}));
await this.linkEverything(opts);
const after = await Promise.all(immutablePatterns.map(async (pattern) => {
return hashUtils.checksumPattern(pattern, { cwd: this.cwd });
}));
for (let t = 0; t < immutablePatterns.length; ++t) {
if (before[t] !== after[t]) {
opts.report.reportError(MessageName_1.MessageName.FROZEN_ARTIFACT_EXCEPTION, `The checksum for ${immutablePatterns[t]} has been modified by this install, which is explicitly forbidden.`);
}
}
});
await this.persistInstallStateFile();
await this.configuration.triggerHook(hooks => {
return hooks.afterAllInstalled;
}, this, opts);
}
generateLockfile() {
// We generate the data structure that will represent our lockfile. To do this, we create a
// reverse lookup table, where the key will be the resolved locator and the value will be a set
// of all the descriptors that resolved to it. Then we use it to construct an optimized version
// if the final object.
const reverseLookup = new Map();
for (const [descriptorHash, locatorHash] of this.storedResolutions.entries()) {
let descriptorHashes = reverseLookup.get(locatorHash);
if (!descriptorHashes)
reverseLookup.set(locatorHash, descriptorHashes = new Set());
descriptorHashes.add(descriptorHash);
}
const optimizedLockfile = {};
optimizedLockfile.__metadata = {
version: LOCKFILE_VERSION,
};
for (const [locatorHash, descriptorHashes] of reverseLookup.entries()) {
const pkg = this.originalPackages.get(locatorHash);
// A resolution that isn't in `originalPackages` is a virtual packages.
// Since virtual packages can be derived from the information stored in
// the rest of the lockfile we don't want to bother storing them.
if (!pkg)
continue;
const descriptors = [];
for (const descriptorHash of descriptorHashes) {
const descriptor = this.storedDescriptors.get(descriptorHash);
if (!descriptor)
throw new Error(`Assertion failed: The descriptor should have been registered`);
descriptors.push(descriptor);
}
const key = descriptors.map(descriptor => {
return structUtils.stringifyDescriptor(descriptor);
}).sort().join(`, `);
const manifest = new Manifest_1.Manifest();
manifest.version = pkg.linkType === types_2.LinkType.HARD
? pkg.version
: `0.0.0-use.local`;
manifest.languageName = pkg.languageName;
manifest.dependencies = new Map(pkg.dependencies);
manifest.peerDependencies = new Map(pkg.peerDependencies);
manifest.dependenciesMeta = new Map(pkg.dependenciesMeta);
manifest.peerDependenciesMeta = new Map(pkg.peerDependenciesMeta);
manifest.bin = new Map(pkg.bin);
let entryChecksum;
const checksum = this.storedChecksums.get(pkg.locatorHash);
if (typeof checksum !== `undefined`) {
const cacheKeyIndex = checksum.indexOf(`/`);
if (cacheKeyIndex === -1)
throw new Error(`Assertion failed: Expecte the checksum to reference its cache key`);
const cacheKey = checksum.slice(0, cacheKeyIndex);
const hash = checksum.slice(cacheKeyIndex + 1);
if (typeof optimizedLockfile.__metadata.cacheKey === `undefined`)
optimizedLockfile.__metadata.cacheKey = cacheKey;
if (cacheKey === optimizedLockfile.__metadata.cacheKey) {
entryChecksum = hash;
}
else {
entryChecksum = checksum;
}
}
optimizedLockfile[key] = {
...manifest.exportTo({}, {
compatibilityMode: false,
}),
linkType: pkg.linkType.toLowerCase(),
resolution: structUtils.stringifyLocator(pkg),
checksum: entryChecksum,
};
}
const header = `${[
`# This file is generated by running "yarn install" inside your project.\n`,
`# Manual changes might be lost - proceed with caution!\n`,
].join(``)}\n`;
return header + parsers_1.stringifySyml(optimizedLockfile);
}
async persistLockfile() {
const lockfilePath = fslib_1.ppath.join(this.cwd, this.configuration.get(`lockfileFilename`));
const lockfileContent = this.generateLockfile();
await fslib_1.xfs.changeFilePromise(lockfilePath, lockfileContent, {
automaticNewlines: true,
});
}
async persistInstallStateFile() {
const fields = [];
for (const category of Object.values(INSTALL_STATE_FIELDS))
fields.push(...category);
const installState = pick_1.default(this, fields);
const serializedState = await gzip(v8_1.default.serialize(installState));
const installStatePath = this.configuration.get(`installStatePath`);
await fslib_1.xfs.mkdirPromise(fslib_1.ppath.dirname(installStatePath), { recursive: true });
await fslib_1.xfs.changeFilePromise(installStatePath, serializedState);
}
async restoreInstallState({ restoreInstallersCustomData = true, restoreResolutions = true } = {}) {
const installStatePath = this.configuration.get(`installStatePath`);
if (!fslib_1.xfs.existsSync(installStatePath)) {
if (restoreResolutions)
await this.applyLightResolution();
return;
}
const serializedState = await fslib_1.xfs.readFilePromise(installStatePath);
const installState = v8_1.default.deserialize(await gunzip(serializedState));
if (restoreInstallersCustomData)
if (typeof installState.installersCustomData !== `undefined`)
this.installersCustomData = installState.installersCustomData;
if (restoreResolutions) {
if (installState.lockFileChecksum === this.lockFileChecksum) {
Object.assign(this, pick_1.default(installState, INSTALL_STATE_FIELDS.restoreResolutions));
this.refreshWorkspaceDependencies();
}
else {
await this.applyLightResolution();
}
}
}
async applyLightResolution() {
await this.resolveEverything({
lockfileOnly: true,
report: new ThrowReport_1.ThrowReport(),
});
await this.persistInstallStateFile();
}
async persist() {
await this.persistLockfile();
for (const workspace of this.workspacesByCwd.values()) {
await workspace.persistManifest();
}
}
async cacheCleanup({ cache, report }) {
const PRESERVED_FILES = new Set([
`.gitignore`,
]);
if (!fslib_1.xfs.existsSync(cache.cwd))
return;
if (!folderUtils_1.isFolderInside(cache.cwd, this.cwd))
return;
for (const entry of await fslib_1.xfs.readdirPromise(cache.cwd)) {
if (PRESERVED_FILES.has(entry))
continue;
const entryPath = fslib_1.ppath.resolve(cache.cwd, entry);
if (cache.markedFiles.has(entryPath))
continue;
if (cache.immutable) {
report.reportError(MessageName_1.MessageName.IMMUTABLE_CACHE, `${formatUtils.pretty(this.configuration, fslib_1.ppath.basename(entryPath), `magenta`)} appears to be unused and would marked for deletion, but the cache is immutable`);
}
else {
report.reportInfo(MessageName_1.MessageName.UNUSED_CACHE_ENTRY, `${formatUtils.pretty(this.configuration, fslib_1.ppath.basename(entryPath), `magenta`)} appears to be unused - removing`);
await fslib_1.xfs.removePromise(entryPath);
}
}
cache.markedFiles.clear();
}
}
exports.Project = Project;
/**
* This function is worth some documentation. It takes a set of packages,
* traverses them all, and generates virtual packages for each package that
* lists peer dependencies.
*
* We also take advantage of the tree traversal to detect which packages are
* actually used and which have disappeared, and to know which packages truly
* have an optional build (since a package may be optional in one part of the
* tree but not another).
*/
function applyVirtualResolutionMutations({ project, allDescriptors, allResolutions, allPackages, accessibleLocators = new Set(), optionalBuilds = new Set(), volatileDescriptors = new Set(), peerRequirements = new Map(), report, tolerateMissingPackages = false, }) {
var _a;
const virtualStack = new Map();
const resolutionStack = [];
const allIdents = new Map();
// We'll be keeping track of all virtual descriptors; once they have all
// been generated we'll check whether they can be consolidated into one.
const allVirtualInstances = new Map();
const allVirtualDependents = new Map();
// First key is the first package that requests the peer dependency. Second
// key is the name of the package in the peer dependency. Value is the list
// of all packages that extend the original peer requirement.
const peerDependencyLinks = new Map();
// We keep track on which package depend on which other package with peer
// dependencies; this way we can emit warnings for them later on.
const peerDependencyDependents = new Map();
// We must keep a copy of the workspaces original dependencies, because they
// may be overriden during the virtual package resolution - cf Dragon Test #5
const originalWorkspaceDefinitions = new Map(project.workspaces.map(workspace => {
const locatorHash = workspace.anchoredLocator.locatorHash;
const pkg = allPackages.get(locatorHash);
if (typeof pkg === `undefined`) {
if (tolerateMissingPackages) {
return [locatorHash, null];
}
else {
throw new Error(`Assertion failed: The workspace should have an associated package`);
}
}
return [locatorHash, structUtils.copyPackage(pkg)];
}));
const reportStackOverflow = () => {
const logDir = fslib_1.xfs.mktempSync();
const logFile = fslib_1.ppath.join(logDir, `stacktrace.log`);
const maxSize = String(resolutionStack.length + 1).length;
const content = resolutionStack.map((locator, index) => {
const prefix = `${index + 1}.`.padStart(maxSize, ` `);
return `${prefix} ${structUtils.stringifyLocator(locator)}\n`;
}).join(``);
fslib_1.xfs.writeFileSync(logFile, content);
throw new Report_1.ReportError(MessageName_1.MessageName.STACK_OVERFLOW_RESOLUTION, `Encountered a stack overflow when resolving peer dependencies; cf ${logFile}`);
};
const getPackageFromDescriptor = (descriptor) => {
const resolution = allResolutions.get(descriptor.descriptorHash);
if (typeof resolution === `undefined`)
throw new Error(`Assertion failed: The resolution should have been registered`);
const pkg = allPackages.get(resolution);
if (!pkg)
throw new Error(`Assertion failed: The package could not be found`);
return pkg;
};
const resolvePeerDependencies = (parentLocator, peerSlots, { first, optional }) => {
if (resolutionStack.length > 1000)
reportStackOverflow();
resolutionStack.push(parentLocator);
const result = resolvePeerDependenciesImpl(parentLocator, peerSlots, { first, optional });
resolutionStack.pop();
return result;
};
const resolvePeerDependenciesImpl = (parentLocator, peerSlots, { first, optional }) => {
if (accessibleLocators.has(parentLocator.locatorHash))
return;
accessibleLocators.add(parentLocator.locatorHash);
if (!optional)
optionalBuilds.delete(parentLocator.locatorHash);
const parentPackage = allPackages.get(parentLocator.locatorHash);
if (!parentPackage) {
if (tolerateMissingPackages) {
return;
}
else {
throw new Error(`Assertion failed: The package (${structUtils.prettyLocator(project.configuration, parentLocator)}) should have been registered`);
}
}
const newVirtualInstances = [];
const firstPass = [];
const secondPass = [];
const thirdPass = [];
const fourthPass = [];
// During this first pass we virtualize the descriptors. This allows us
// to reference them from their sibling without being order-dependent,
// which is required to solve cases where packages with peer dependencies
// have peer dependencies themselves.
for (const descriptor of Array.from(parentPackage.dependencies.values())) {
// We shouldn't virtualize the package if it was obtained through a peer
// dependency (which can't be the case for workspaces when resolved
// through their top-level)
if (parentPackage.peerDependencies.has(descriptor.identHash) && !first)
continue;
// We had some issues where virtual packages were incorrectly set inside
// workspaces, causing leaks. Check the Dragon Test #5 for more details.
if (structUtils.isVirtualDescriptor(descriptor))
throw new Error(`Assertion failed: Virtual packages shouldn't be encountered when virtualizing a branch`);
// Mark this package as being used (won't be removed from the lockfile)
volatileDescriptors.delete(descriptor.descriptorHash);
// Detect whether this package is being required
let isOptional = optional;
if (!isOptional) {
const dependencyMetaSet = parentPackage.dependenciesMeta.get(structUtils.stringifyIdent(descriptor));
if (typeof dependencyMetaSet !== `undefined`) {
const dependencyMeta = dependencyMetaSet.get(null);
if (typeof dependencyMeta !== `undefined` && dependencyMeta.optional) {
isOptional = true;
}
}
}
const resolution = allResolutions.get(descriptor.descriptorHash);
if (!resolution) {
// Note that we can't use `getPackageFromDescriptor` (defined below,
// because when doing the initial tree building right after loading the
// project it's possible that we get some entries that haven't been
// registered into the lockfile yet - for example when the user has
// manually changed the package.json dependencies)
if (tolerateMissingPackages) {
continue;
}
else {
throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(project.configuration, descriptor)}) should have been registered`);
}
}
const pkg = originalWorkspaceDefinitions.get(resolution) || allPackages.get(resolution);
if (!pkg)
throw new Error(`Assertion failed: The package (${resolution}, resolved from ${structUtils.prettyDescriptor(project.configuration, descriptor)}) should have been registered`);
if (pkg.peerDependencies.size === 0) {
resolvePeerDependencies(pkg, new Map(), { first: false, optional: isOptional });
continue;
}
// The stack overflow is checked against two level because a workspace
// may have a dev dependency on another workspace that lists the first
// one as a regular dependency. In this case the loop will break so we
// don't need to throw an exception.
const stackDepth = virtualStack.get(pkg.locatorHash);
if (typeof stackDepth === `number` && stackDepth >= 2)
reportStackOverflow();
let virtualizedDescriptor;
let virtualizedPackage;
const missingPeerDependencies = new Set();
let nextPeerSlots;
firstPass.push(() => {
virtualizedDescriptor = structUtils.virtualizeDescriptor(descriptor, parentLocator.locatorHash);
virtualizedPackage = structUtils.virtualizePackage(pkg, parentLocator.locatorHash);
parentPackage.dependencies.delete(descriptor.identHash);
parentPackage.dependencies.set(virtualizedDescriptor.identHash, virtualizedDescriptor);
allResolutions.set(virtualizedDescriptor.descriptorHash, virtualizedPackage.locatorHash);
allDescriptors.set(virtualizedDescriptor.descriptorHash, virtualizedDescriptor);
allPackages.set(virtualizedPackage.locatorHash, virtualizedPackage);
// Keep track of all new virtual packages since we'll want to dedupe them
newVirtualInstances.push([pkg, virtualizedDescriptor, virtualizedPackage]);
});
secondPass.push(() => {
var _a;
nextPeerSlots = new Map();
for (const peerRequest of virtualizedPackage.peerDependencies.values()) {
let peerDescriptor = parentPackage.dependencies.get(peerRequest.identHash);
if (!peerDescriptor && structUtils.areIdentsEqual(parentLocator, peerRequest)) {
peerDescriptor = structUtils.convertLocatorToDescriptor(parentLocator);
allDescriptors.set(peerDescriptor.descriptorHash, peerDescriptor);
allResolutions.set(peerDescriptor.descriptorHash, parentLocator.locatorHash);
volatileDescriptors.delete(peerDescriptor.descriptorHash);
}
if (!peerDescriptor && virtualizedPackage.dependencies.has(peerRequest.identHash)) {
virtualizedPackage.peerDependencies.delete(peerRequest.identHash);
continue;
}
if (!peerDescriptor)
peerDescriptor = structUtils.makeDescriptor(peerRequest, `missing:`);
virtualizedPackage.dependencies.set(peerDescriptor.identHash, peerDescriptor);
// Need to track when a virtual descriptor is set as a dependency in case
// the descriptor will be consolidated.
if (structUtils.isVirtualDescriptor(peerDescriptor)) {
const dependents = miscUtils.getSetWithDefault(allVirtualDependents, peerDescriptor.descriptorHash);
dependents.add(virtualizedPackage.locatorHash);
}
allIdents.set(peerDescriptor.identHash, peerDescriptor);
if (peerDescriptor.range === `missing:`)
missingPeerDependencies.add(peerDescriptor.identHash);
nextPeerSlots.set(peerRequest.identHash, (_a = peerSlots.get(peerRequest.identHash)) !== null && _a !== void 0 ? _a : virtualizedPackage.locatorHash);
}
// Since we've had to add new dependencies we need to sort them all over again
virtualizedPackage.dependencies = new Map(miscUtils.sortMap(virtualizedPackage.dependencies, ([identHash, descriptor]) => {
return structUtils.stringifyIdent(descriptor);
}));
});
thirdPass.push(() => {
if (!allPackages.has(virtualizedPackage.locatorHash))
return;
const current = virtualStack.get(pkg.locatorHash);
const next = typeof current !== `undefined` ? current + 1 : 1;
virtualStack.set(pkg.locatorHash, next);
resolvePeerDependencies(virtualizedPackage, nextPeerSlots, { first: false, optional: isOptional });
virtualStack.set(pkg.locatorHash, next - 1);
});
fourthPass.push(() => {
// Regardless of whether the initial virtualized package got deduped
// or not, we now register that *this* package is now a dependent on
// whatever its peer dependencies have been resolved to. We'll later
// use this information to generate warnings.
const finalDescriptor = parentPackage.dependencies.get(descriptor.identHash);
if (typeof finalDescriptor === `undefined`)
throw new Error(`Assertion failed: Expected the peer dependency to have been turned into a dependency`);
const finalResolution = allResolutions.get(finalDescriptor.descriptorHash);
if (typeof finalResolution === `undefined`)
throw new Error(`Assertion failed: Expected the descriptor to be registered`);
miscUtils.getSetWithDefault(peerDependencyDependents, finalResolution).add(parentLocator.locatorHash);
if (!allPackages.has(virtualizedPackage.locatorHash))
return;
for (const descriptor of virtualizedPackage.peerDependencies.values()) {
const root = nextPeerSlots.get(descriptor.identHash);
if (typeof root === `undefined`)
throw new Error(`Assertion failed: Expected the peer dependency ident to be registered`);
miscUtils.getArrayWithDefault(miscUtils.getMapWithDefault(peerDependencyLinks, root), structUtils.stringifyIdent(descriptor)).push(virtualizedPackage.locatorHash);
}
for (const missingPeerDependency of missingPeerDependencies) {
virtualizedPackage.dependencies.delete(missingPeerDependency);
}
});
}
for (const fn of [...firstPass, ...secondPass])
fn();
let stable;
do {
stable = true;
for (const [physicalLocator, virtualDescriptor, virtualPackage] of newVirtualInstances) {
if (!allPackages.has(virtualPackage.locatorHash))
continue;
const otherVirtualInstances = miscUtils.getMapWithDefault(allVirtualInstances, physicalLocator.locatorHash);
// We take all the dependencies from the new virtual instance and
// generate a hash from it. By checking if this hash is already
// registered, we know whether we can trim the new version.
const dependencyHash = hashUtils.makeHash(...[...virtualPackage.dependencies.values()].map(descriptor => {
const resolution = descriptor.range !== `missing:`
? allResolutions.get(descriptor.descriptorHash)
: `missing:`;
if (typeof resolution === `undefined`)
throw new Error(`Assertion failed: Expected the resolution for ${structUtils.prettyDescriptor(project.configuration, descriptor)} to have been registered`);
return resolution;
}),
// We use the identHash to disambiguate between virtual descriptors
// with different base idents being resolved to the same virtual package.
// Note: We don't use the descriptorHash because the whole point of duplicate
// virtual descriptors is that they have different `virtual:` ranges.
// This causes the virtual descriptors with different base idents
// to be preserved, while the virtual package they resolve to gets deduped.
virtualDescriptor.identHash);
const masterDescriptor = otherVirtualInstances.get(dependencyHash);
if (typeof masterDescriptor === `undefined`) {
otherVirtualInstances.set(dependencyHash, virtualDescriptor);
continue;
}
// Since we're applying multiple pass, we might have already registered
// ourselves as the "master" descriptor in the previous pass.
if (masterDescriptor === virtualDescriptor)
continue;
stable = false;
allPackages.delete(virtualPackage.locatorHash);
allDescriptors.delete(virtualDescriptor.descriptorHash);
allResolutions.delete(virtualDescriptor.descriptorHash);
accessibleLocators.delete(virtualPackage.locatorHash);
const dependents = allVirtualDependents.get(virtualDescriptor.descriptorHash) || [];
const allDependents = [parentPackage.locatorHash, ...dependents];
allVirtualDependents.delete(virtualDescriptor.descriptorHash);
for (const dependent of allDependents) {
const pkg = allPackages.get(dependent);
if (typeof pkg === `undefined`)
continue;
pkg.dependencies.set(virtualDescriptor.identHash, masterDescriptor);
}
}
} while (!stable);
for (const fn of [...thirdPass, ...fourthPass]) {
fn();
}
};
for (const workspace of project.workspaces) {
volatileDescriptors.delete(workspace.anchoredDescriptor.descriptorHash);
resolvePeerDependencies(workspace.anchoredLocator, new Map(), { first: true, optional: false });
}
let WarningType;
(function (WarningType) {
WarningType[WarningType["NotProvided"] = 0] = "NotProvided";
WarningType[WarningType["NotCompatible"] = 1] = "NotCompatible";
})(WarningType || (WarningType = {}));
const warnings = [];
for (const [rootHash, dependents] of peerDependencyDependents) {
const root = allPackages.get(rootHash);
if (typeof root === `undefined`)
throw new Error(`Assertion failed: Expected the root to be registered`);
// We retrieve the set of packages that provide complementary peer
// dependencies to the one already offered by our root package, and to
// whom other package.
//
// We simply skip if the record doesn't exist because a package may not
// have any records if it didn't contribute any new peer (it only exists
// if the package has at least one peer that isn't listed by its parent
// packages).
//
const rootLinks = peerDependencyLinks.get(rootHash);
if (typeof rootLinks === `undefined`)
continue;
for (const dependentHash of dependents) {
const dependent = allPackages.get(dependentHash);
// The package may have been pruned during a deduplication
if (typeof dependent === `undefined`)
continue;
for (const [identStr, linkHashes] of rootLinks) {
const ident = structUtils.parseIdent(identStr);
// This dependent may have a peer dep itself, in which case it's not
// the true root, and we can ignore it
if (dependent.peerDependencies.has(ident.identHash))
continue;
const hash = `p${hashUtils.makeHash(dependentHash, identStr, rootHash).slice(0, 5)}`;
peerRequirements.set(hash, {
subject: dependentHash,
requested: ident,
rootRequester: rootHash,
allRequesters: linkHashes,
});
// Note: this can be undefined when the peer dependency isn't provided at all
const resolvedDescriptor = root.dependencies.get(ident.identHash);
if (typeof resolvedDescriptor !== `undefined`) {
const peerResolution = getPackageFromDescriptor(resolvedDescriptor);
const peerVersion = (_a = peerResolution.version) !== null && _a !== void 0 ? _a : `0.0.0`;
const ranges = new Set();
for (const linkHash of linkHashes) {
const link = allPackages.get(linkHash);
if (typeof link === `undefined`)
throw new Error(`Assertion failed: Expected the link to be registered`);
const peerDependency = link.peerDependencies.get(ident.identHash);
if (typeof peerDependency === `undefined`)
throw new Error(`Assertion failed: Expected the ident to be registered`);
ranges.add(peerDependency.range);
}
const satisfiesAll = [...ranges].every(range => {
return semverUtils.satisfiesWithPrereleases(peerVersion, range);
});
if (!satisfiesAll) {
warnings.push({
type: WarningType.NotCompatible,
subject: dependent,
requested: ident,
requester: root,
version: peerVersion,
hash,
requirementCount: linkHashes.length,
});
}
}
else {
const peerDependencyMeta = root.peerDependenciesMeta.get(identStr);
if (!(peerDependencyMeta === null || peerDependencyMeta === void 0 ? void 0 : peerDependencyMeta.optional)) {
warnings.push({
type: WarningType.NotProvided,
subject: dependent,
requested: ident,
requester: root,
hash,
});
}
}
}
}
}
const warningSortCriterias = [
warning => structUtils.prettyLocatorNoColors(warning.subject),
warning => structUtils.stringifyIdent(warning.requested),
warning => `${warning.type}`,
];
for (const warning of miscUtils.sortMap(warnings, warningSortCriterias)) {
switch (warning.type) {
case WarningType.NotProvided:
{
report === null || report === void 0 ? void 0 : report.reportWarning(MessageName_1.MessageName.MISSING_PEER_DEPENDENCY, `${structUtils.prettyLocator(project.configuration, warning.subject)} doesn't provide ${structUtils.prettyIdent(project.configuration, warning.requested)} (${formatUtils.pretty(project.configuration, warning.hash, formatUtils.Type.CODE)}), requested by ${structUtils.prettyIdent(project.configuration, warning.requester)}`);
}
break;
case WarningType.NotCompatible:
{
const andDescendants = warning.requirementCount > 1
? `and some of its descendants request`
: `requests`;
report === null || report === void 0 ? void 0 : report.reportWarning(MessageName_1.MessageName.INCOMPATIBLE_PEER_DEPENDENCY, `${structUtils.prettyLocator(project.configuration, warning.subject)} provides ${structUtils.prettyIdent(project.configuration, warning.requested)} (${formatUtils.pretty(project.configuration, warning.hash, formatUtils.Type.CODE)}) with version ${structUtils.prettyReference(project.configuration, warning.version)}, which doesn't satisfy what ${structUtils.prettyIdent(project.configuration, warning.requester)} ${andDescendants}`);
}
break;
}
}
if (warnings.length > 0) {
report === null || report === void 0 ? void 0 : report.reportWarning(MessageName_1.MessageName.UNNAMED, `Some peer dependencies are incorrectly met; run ${formatUtils.pretty(project.configuration, `yarn explain peer-requirements `, formatUtils.Type.CODE)} for details, where ${formatUtils.pretty(project.configuration, ``, formatUtils.Type.CODE)} is the six-letter p-prefixed code`);
}
}
/***/ }),
/***/ 6729:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.ProtocolResolver = exports.TAG_REGEXP = void 0;
const tslib_1 = __webpack_require__(70655);
const semver_1 = tslib_1.__importDefault(__webpack_require__(74451));
const semverUtils = tslib_1.__importStar(__webpack_require__(51201));
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
exports.TAG_REGEXP = /^(?!v)[a-z0-9-.]+$/i;
class ProtocolResolver {
supportsDescriptor(descriptor, opts) {
if (semverUtils.validRange(descriptor.range))
return true;
if (exports.TAG_REGEXP.test(descriptor.range))
return true;
return false;
}
supportsLocator(locator, opts) {
if (semver_1.default.valid(locator.reference))
return true;
if (exports.TAG_REGEXP.test(locator.reference))
return true;
return false;
}
shouldPersistResolution(locator, opts) {
return opts.resolver.shouldPersistResolution(this.forwardLocator(locator, opts), opts);
}
bindDescriptor(descriptor, fromLocator, opts) {
return opts.resolver.bindDescriptor(this.forwardDescriptor(descriptor, opts), fromLocator, opts);
}
getResolutionDependencies(descriptor, opts) {
return opts.resolver.getResolutionDependencies(this.forwardDescriptor(descriptor, opts), opts);
}
async getCandidates(descriptor, dependencies, opts) {
return await opts.resolver.getCandidates(this.forwardDescriptor(descriptor, opts), dependencies, opts);
}
async getSatisfying(descriptor, references, opts) {
return await opts.resolver.getSatisfying(this.forwardDescriptor(descriptor, opts), references, opts);
}
async resolve(locator, opts) {
const pkg = await opts.resolver.resolve(this.forwardLocator(locator, opts), opts);
return structUtils.renamePackage(pkg, locator);
}
forwardDescriptor(descriptor, opts) {
return structUtils.makeDescriptor(descriptor, `${opts.project.configuration.get(`defaultProtocol`)}${descriptor.range}`);
}
forwardLocator(locator, opts) {
return structUtils.makeLocator(locator, `${opts.project.configuration.get(`defaultProtocol`)}${locator.reference}`);
}
}
exports.ProtocolResolver = ProtocolResolver;
/***/ }),
/***/ 50334:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Report = exports.isReportError = exports.ReportError = void 0;
const stream_1 = __webpack_require__(92413);
const string_decoder_1 = __webpack_require__(24304);
const MessageName_1 = __webpack_require__(62755);
class ReportError extends Error {
constructor(code, message, reportExtra) {
super(message);
this.reportExtra = reportExtra;
this.reportCode = code;
}
}
exports.ReportError = ReportError;
function isReportError(error) {
return typeof error.reportCode !== `undefined`;
}
exports.isReportError = isReportError;
class Report {
constructor() {
this.reportedInfos = new Set();
this.reportedWarnings = new Set();
this.reportedErrors = new Set();
}
static progressViaCounter(max) {
let current = 0;
let unlock;
let lock = new Promise(resolve => {
unlock = resolve;
});
const set = (n) => {
const thisUnlock = unlock;
lock = new Promise(resolve => {
unlock = resolve;
});
current = n;
thisUnlock();
};
const tick = (n = 0) => {
set(current + 1);
};
const gen = (async function* () {
while (current < max) {
await lock;
yield {
progress: current / max,
};
}
})();
return {
[Symbol.asyncIterator]() {
return gen;
},
set,
tick,
};
}
reportInfoOnce(name, text, opts) {
const key = opts && opts.key ? opts.key : text;
if (!this.reportedInfos.has(key)) {
this.reportedInfos.add(key);
this.reportInfo(name, text);
}
}
reportWarningOnce(name, text, opts) {
const key = opts && opts.key ? opts.key : text;
if (!this.reportedWarnings.has(key)) {
this.reportedWarnings.add(key);
this.reportWarning(name, text);
}
}
reportErrorOnce(name, text, opts) {
var _a;
const key = opts && opts.key ? opts.key : text;
if (!this.reportedErrors.has(key)) {
this.reportedErrors.add(key);
this.reportError(name, text);
(_a = opts === null || opts === void 0 ? void 0 : opts.reportExtra) === null || _a === void 0 ? void 0 : _a.call(opts, this);
}
}
reportExceptionOnce(error) {
if (isReportError(error)) {
this.reportErrorOnce(error.reportCode, error.message, { key: error, reportExtra: error.reportExtra });
}
else {
this.reportErrorOnce(MessageName_1.MessageName.EXCEPTION, error.stack || error.message, { key: error });
}
}
createStreamReporter(prefix = null) {
const stream = new stream_1.PassThrough();
const decoder = new string_decoder_1.StringDecoder();
let buffer = ``;
stream.on(`data`, chunk => {
let chunkStr = decoder.write(chunk);
let lineIndex;
do {
lineIndex = chunkStr.indexOf(`\n`);
if (lineIndex !== -1) {
const line = buffer + chunkStr.substr(0, lineIndex);
chunkStr = chunkStr.substr(lineIndex + 1);
buffer = ``;
if (prefix !== null) {
this.reportInfo(null, `${prefix} ${line}`);
}
else {
this.reportInfo(null, line);
}
}
} while (lineIndex !== -1);
buffer += chunkStr;
});
stream.on(`end`, () => {
const last = decoder.end();
if (last !== ``) {
if (prefix !== null) {
this.reportInfo(null, `${prefix} ${last}`);
}
else {
this.reportInfo(null, last);
}
}
});
return stream;
}
}
exports.Report = Report;
/***/ }),
/***/ 30460:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.RunInstallPleaseResolver = void 0;
const MessageName_1 = __webpack_require__(62755);
const Report_1 = __webpack_require__(50334);
class RunInstallPleaseResolver {
constructor(resolver) {
this.resolver = resolver;
}
supportsDescriptor(descriptor, opts) {
return this.resolver.supportsDescriptor(descriptor, opts);
}
supportsLocator(locator, opts) {
return this.resolver.supportsLocator(locator, opts);
}
shouldPersistResolution(locator, opts) {
return this.resolver.shouldPersistResolution(locator, opts);
}
bindDescriptor(descriptor, fromLocator, opts) {
return this.resolver.bindDescriptor(descriptor, fromLocator, opts);
}
getResolutionDependencies(descriptor, opts) {
return this.resolver.getResolutionDependencies(descriptor, opts);
}
async getCandidates(descriptor, dependencies, opts) {
throw new Report_1.ReportError(MessageName_1.MessageName.MISSING_LOCKFILE_ENTRY, `This package doesn't seem to be present in your lockfile; try to make an install to update your resolutions`);
}
async getSatisfying(descriptor, references, opts) {
throw new Report_1.ReportError(MessageName_1.MessageName.MISSING_LOCKFILE_ENTRY, `This package doesn't seem to be present in your lockfile; try to make an install to update your resolutions`);
}
async resolve(locator, opts) {
throw new Report_1.ReportError(MessageName_1.MessageName.MISSING_LOCKFILE_ENTRY, `This package doesn't seem to be present in your lockfile; try to make an install to update your resolutions`);
}
}
exports.RunInstallPleaseResolver = RunInstallPleaseResolver;
/***/ }),
/***/ 73759:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.StreamReport = exports.formatNameWithHyperlink = exports.formatName = void 0;
const tslib_1 = __webpack_require__(70655);
const slice_ansi_1 = tslib_1.__importDefault(__webpack_require__(28231));
const MessageName_1 = __webpack_require__(62755);
const Report_1 = __webpack_require__(50334);
const formatUtils = tslib_1.__importStar(__webpack_require__(23821));
const PROGRESS_FRAMES = [`⠋`, `⠙`, `⠹`, `⠸`, `⠼`, `⠴`, `⠦`, `⠧`, `⠇`, `⠏`];
const PROGRESS_INTERVAL = 80;
const BASE_FORGETTABLE_NAMES = new Set([MessageName_1.MessageName.FETCH_NOT_CACHED, MessageName_1.MessageName.UNUSED_CACHE_ENTRY]);
const BASE_FORGETTABLE_BUFFER_SIZE = 5;
const GROUP = process.env.GITHUB_ACTIONS
? { start: (what) => `::group::${what}\n`, end: (what) => `::endgroup::\n` }
: process.env.TRAVIS
? { start: (what) => `travis_fold:start:${what}\n`, end: (what) => `travis_fold:end:${what}\n` }
: process.env.GITLAB_CI
? { start: (what) => `section_start:${Math.floor(Date.now() / 1000)}:${what.toLowerCase().replace(/\W+/g, `_`)}\r\x1b[0K${what}\n`, end: (what) => `section_end:${Math.floor(Date.now() / 1000)}:${what.toLowerCase().replace(/\W+/g, `_`)}\r\x1b[0K` }
: null;
const now = new Date();
// We only want to support environments that will out-of-the-box accept the
// characters we want to use. Others can enforce the style from the project
// configuration.
const supportsEmojis = [`iTerm.app`, `Apple_Terminal`].includes(process.env.TERM_PROGRAM) || !!process.env.WT_SESSION;
const makeRecord = (obj) => obj;
const PROGRESS_STYLES = makeRecord({
patrick: {
date: [17, 3],
chars: [`🍀`, `🌱`],
size: 40,
},
simba: {
date: [19, 7],
chars: [`🦁`, `🌴`],
size: 40,
},
jack: {
date: [31, 10],
chars: [`🎃`, `🦇`],
size: 40,
},
hogsfather: {
date: [31, 12],
chars: [`🎉`, `🎄`],
size: 40,
},
default: {
chars: [`=`, `-`],
size: 80,
},
});
const defaultStyle = (supportsEmojis && Object.keys(PROGRESS_STYLES).find(name => {
const style = PROGRESS_STYLES[name];
if (style.date && (style.date[0] !== now.getDate() || style.date[1] !== now.getMonth() + 1))
return false;
return true;
})) || `default`;
function formatName(name, { configuration, json }) {
const num = name === null ? 0 : name;
const label = MessageName_1.stringifyMessageName(num);
if (!json && name === null) {
return formatUtils.pretty(configuration, label, `grey`);
}
else {
return label;
}
}
exports.formatName = formatName;
function formatNameWithHyperlink(name, { configuration, json }) {
const code = formatName(name, { configuration, json });
// Only print hyperlinks if allowed per configuration
if (!configuration.get(`enableHyperlinks`))
return code;
// Don't print hyperlinks for the generic messages
if (name === null || name === MessageName_1.MessageName.UNNAMED)
return code;
const desc = MessageName_1.MessageName[name];
const href = `https://yarnpkg.com/advanced/error-codes#${code}---${desc}`.toLowerCase();
// We use BELL as ST because it seems that iTerm doesn't properly support
// the \x1b\\ sequence described in the reference document
// https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda#the-escape-sequence
return `\u001b]8;;${href}\u0007${code}\u001b]8;;\u0007`;
}
exports.formatNameWithHyperlink = formatNameWithHyperlink;
class StreamReport extends Report_1.Report {
constructor({ configuration, stdout, json = false, includeFooter = true, includeLogs = !json, includeInfos = includeLogs, includeWarnings = includeLogs, forgettableBufferSize = BASE_FORGETTABLE_BUFFER_SIZE, forgettableNames = new Set(), }) {
super();
this.uncommitted = new Set();
this.cacheHitCount = 0;
this.cacheMissCount = 0;
this.warningCount = 0;
this.errorCount = 0;
this.startTime = Date.now();
this.indent = 0;
this.progress = new Map();
this.progressTime = 0;
this.progressFrame = 0;
this.progressTimeout = null;
this.forgettableLines = [];
formatUtils.addLogFilterSupport(this, { configuration });
this.configuration = configuration;
this.forgettableBufferSize = forgettableBufferSize;
this.forgettableNames = new Set([...forgettableNames, ...BASE_FORGETTABLE_NAMES]);
this.includeFooter = includeFooter;
this.includeInfos = includeInfos;
this.includeWarnings = includeWarnings;
this.json = json;
this.stdout = stdout;
const styleName = this.configuration.get(`progressBarStyle`) || defaultStyle;
if (!Object.prototype.hasOwnProperty.call(PROGRESS_STYLES, styleName))
throw new Error(`Assertion failed: Invalid progress bar style`);
this.progressStyle = PROGRESS_STYLES[styleName];
const PAD_LEFT = `➤ YN0000: ┌ `.length;
const maxWidth = Math.max(0, Math.min(process.stdout.columns - PAD_LEFT, 80));
this.progressMaxScaledSize = Math.floor(this.progressStyle.size * maxWidth / 80);
}
static async start(opts, cb) {
const report = new this(opts);
const emitWarning = process.emitWarning;
process.emitWarning = (message, name) => {
if (typeof message !== `string`) {
const error = message;
message = error.message;
name = name !== null && name !== void 0 ? name : error.name;
}
const fullMessage = typeof name !== `undefined`
? `${name}: ${message}`
: message;
report.reportWarning(MessageName_1.MessageName.UNNAMED, fullMessage);
};
try {
await cb(report);
}
catch (error) {
report.reportExceptionOnce(error);
}
finally {
await report.finalize();
process.emitWarning = emitWarning;
}
return report;
}
hasErrors() {
return this.errorCount > 0;
}
exitCode() {
return this.hasErrors() ? 1 : 0;
}
reportCacheHit(locator) {
this.cacheHitCount += 1;
}
reportCacheMiss(locator, message) {
this.cacheMissCount += 1;
if (typeof message !== `undefined` && !this.configuration.get(`preferAggregateCacheInfo`)) {
this.reportInfo(MessageName_1.MessageName.FETCH_NOT_CACHED, message);
}
}
startTimerSync(what, opts, cb) {
const realOpts = typeof opts === `function` ? {} : opts;
const realCb = typeof opts === `function` ? opts : cb;
const mark = { committed: false, action: () => {
this.reportInfo(null, `┌ ${what}`);
this.indent += 1;
if (GROUP !== null) {
this.stdout.write(GROUP.start(what));
}
} };
if (realOpts.skipIfEmpty) {
this.uncommitted.add(mark);
}
else {
mark.action();
mark.committed = true;
}
const before = Date.now();
try {
return realCb();
}
catch (error) {
this.reportExceptionOnce(error);
throw error;
}
finally {
const after = Date.now();
this.uncommitted.delete(mark);
if (mark.committed) {
this.indent -= 1;
if (GROUP !== null)
this.stdout.write(GROUP.end(what));
if (this.configuration.get(`enableTimers`) && after - before > 200) {
this.reportInfo(null, `└ Completed in ${formatUtils.pretty(this.configuration, after - before, formatUtils.Type.DURATION)}`);
}
else {
this.reportInfo(null, `└ Completed`);
}
}
}
}
async startTimerPromise(what, opts, cb) {
const realOpts = typeof opts === `function` ? {} : opts;
const realCb = typeof opts === `function` ? opts : cb;
const mark = { committed: false, action: () => {
this.reportInfo(null, `┌ ${what}`);
this.indent += 1;
if (GROUP !== null) {
this.stdout.write(GROUP.start(what));
}
} };
if (realOpts.skipIfEmpty) {
this.uncommitted.add(mark);
}
else {
mark.action();
mark.committed = true;
}
const before = Date.now();
try {
return await realCb();
}
catch (error) {
this.reportExceptionOnce(error);
throw error;
}
finally {
const after = Date.now();
this.uncommitted.delete(mark);
if (mark.committed) {
this.indent -= 1;
if (GROUP !== null)
this.stdout.write(GROUP.end(what));
if (this.configuration.get(`enableTimers`) && after - before > 200) {
this.reportInfo(null, `└ Completed in ${formatUtils.pretty(this.configuration, after - before, formatUtils.Type.DURATION)}`);
}
else {
this.reportInfo(null, `└ Completed`);
}
}
}
}
async startCacheReport(cb) {
const cacheInfo = this.configuration.get(`preferAggregateCacheInfo`)
? { cacheHitCount: this.cacheHitCount, cacheMissCount: this.cacheMissCount }
: null;
try {
return await cb();
}
catch (error) {
this.reportExceptionOnce(error);
throw error;
}
finally {
if (cacheInfo !== null) {
this.reportCacheChanges(cacheInfo);
}
}
}
reportSeparator() {
if (this.indent === 0) {
this.writeLineWithForgettableReset(``);
}
else {
this.reportInfo(null, ``);
}
}
reportInfo(name, text) {
if (!this.includeInfos)
return;
this.commit();
const message = `${formatUtils.pretty(this.configuration, `➤`, `blueBright`)} ${this.formatNameWithHyperlink(name)}: ${this.formatIndent()}${text}`;
if (!this.json) {
if (this.forgettableNames.has(name)) {
this.forgettableLines.push(message);
if (this.forgettableLines.length > this.forgettableBufferSize) {
while (this.forgettableLines.length > this.forgettableBufferSize)
this.forgettableLines.shift();
this.writeLines(this.forgettableLines, { truncate: true });
}
else {
this.writeLine(message, { truncate: true });
}
}
else {
this.writeLineWithForgettableReset(message);
}
}
else {
this.reportJson({ type: `info`, name, displayName: this.formatName(name), indent: this.formatIndent(), data: text });
}
}
reportWarning(name, text) {
this.warningCount += 1;
if (!this.includeWarnings)
return;
this.commit();
if (!this.json) {
this.writeLineWithForgettableReset(`${formatUtils.pretty(this.configuration, `➤`, `yellowBright`)} ${this.formatNameWithHyperlink(name)}: ${this.formatIndent()}${text}`);
}
else {
this.reportJson({ type: `warning`, name, displayName: this.formatName(name), indent: this.formatIndent(), data: text });
}
}
reportError(name, text) {
this.errorCount += 1;
this.commit();
if (!this.json) {
this.writeLineWithForgettableReset(`${formatUtils.pretty(this.configuration, `➤`, `redBright`)} ${this.formatNameWithHyperlink(name)}: ${this.formatIndent()}${text}`, { truncate: false });
}
else {
this.reportJson({ type: `error`, name, displayName: this.formatName(name), indent: this.formatIndent(), data: text });
}
}
reportProgress(progressIt) {
let stopped = false;
const promise = Promise.resolve().then(async () => {
const progressDefinition = {
progress: 0,
title: undefined,
};
this.progress.set(progressIt, {
definition: progressDefinition,
lastScaledSize: -1,
});
this.refreshProgress(-1);
for await (const { progress, title } of progressIt) {
if (stopped)
continue;
if (progressDefinition.progress === progress && progressDefinition.title === title)
continue;
progressDefinition.progress = progress;
progressDefinition.title = title;
this.refreshProgress();
}
stop();
});
const stop = () => {
if (stopped)
return;
stopped = true;
this.progress.delete(progressIt);
this.refreshProgress(+1);
};
return { ...promise, stop };
}
reportJson(data) {
if (this.json) {
this.writeLineWithForgettableReset(`${JSON.stringify(data)}`);
}
}
async finalize() {
if (!this.includeFooter)
return;
let installStatus = ``;
if (this.errorCount > 0)
installStatus = `Failed with errors`;
else if (this.warningCount > 0)
installStatus = `Done with warnings`;
else
installStatus = `Done`;
const timing = formatUtils.pretty(this.configuration, Date.now() - this.startTime, formatUtils.Type.DURATION);
const message = this.configuration.get(`enableTimers`)
? `${installStatus} in ${timing}`
: installStatus;
if (this.errorCount > 0) {
this.reportError(MessageName_1.MessageName.UNNAMED, message);
}
else if (this.warningCount > 0) {
this.reportWarning(MessageName_1.MessageName.UNNAMED, message);
}
else {
this.reportInfo(MessageName_1.MessageName.UNNAMED, message);
}
}
writeLine(str, { truncate } = {}) {
this.clearProgress({ clear: true });
this.stdout.write(`${this.truncate(str, { truncate })}\n`);
this.writeProgress();
}
writeLineWithForgettableReset(str, { truncate } = {}) {
this.forgettableLines = [];
this.writeLine(str, { truncate });
}
writeLines(lines, { truncate } = {}) {
this.clearProgress({ delta: lines.length });
for (const line of lines)
this.stdout.write(`${this.truncate(line, { truncate })}\n`);
this.writeProgress();
}
reportCacheChanges({ cacheHitCount, cacheMissCount }) {
const cacheHitDelta = this.cacheHitCount - cacheHitCount;
const cacheMissDelta = this.cacheMissCount - cacheMissCount;
if (cacheHitDelta === 0 && cacheMissDelta === 0)
return;
let fetchStatus = ``;
if (this.cacheHitCount > 1)
fetchStatus += `${this.cacheHitCount} packages were already cached`;
else if (this.cacheHitCount === 1)
fetchStatus += ` - one package was already cached`;
else
fetchStatus += `No packages were cached`;
if (this.cacheHitCount > 0) {
if (this.cacheMissCount > 1) {
fetchStatus += `, ${this.cacheMissCount} had to be fetched`;
}
else if (this.cacheMissCount === 1) {
fetchStatus += `, one had to be fetched`;
}
}
else {
if (this.cacheMissCount > 1) {
fetchStatus += ` - ${this.cacheMissCount} packages had to be fetched`;
}
else if (this.cacheMissCount === 1) {
fetchStatus += ` - one package had to be fetched`;
}
}
this.reportInfo(MessageName_1.MessageName.FETCH_NOT_CACHED, fetchStatus);
}
commit() {
const marks = this.uncommitted;
this.uncommitted = new Set();
for (const mark of marks) {
mark.committed = true;
mark.action();
}
}
clearProgress({ delta = 0, clear = false }) {
if (!this.configuration.get(`enableProgressBars`) || this.json)
return;
if (this.progress.size + delta > 0) {
this.stdout.write(`\x1b[${this.progress.size + delta}A`);
if (delta > 0 || clear) {
this.stdout.write(`\x1b[0J`);
}
}
}
writeProgress() {
if (!this.configuration.get(`enableProgressBars`) || this.json)
return;
if (this.progressTimeout !== null)
clearTimeout(this.progressTimeout);
this.progressTimeout = null;
if (this.progress.size === 0)
return;
const now = Date.now();
if (now - this.progressTime > PROGRESS_INTERVAL) {
this.progressFrame = (this.progressFrame + 1) % PROGRESS_FRAMES.length;
this.progressTime = now;
}
const spinner = PROGRESS_FRAMES[this.progressFrame];
for (const progress of this.progress.values()) {
const ok = this.progressStyle.chars[0].repeat(progress.lastScaledSize);
const ko = this.progressStyle.chars[1].repeat(this.progressMaxScaledSize - progress.lastScaledSize);
this.stdout.write(`${formatUtils.pretty(this.configuration, `➤`, `blueBright`)} ${this.formatName(null)}: ${spinner} ${ok}${ko}\n`);
}
this.progressTimeout = setTimeout(() => {
this.refreshProgress();
}, PROGRESS_INTERVAL);
}
refreshProgress(delta = 0) {
let needsUpdate = false;
if (this.progress.size === 0) {
needsUpdate = true;
}
else {
for (const progress of this.progress.values()) {
const refreshedScaledSize = Math.trunc(this.progressMaxScaledSize * progress.definition.progress);
const previousScaledSize = progress.lastScaledSize;
progress.lastScaledSize = refreshedScaledSize;
if (refreshedScaledSize !== previousScaledSize) {
needsUpdate = true;
break;
}
}
}
if (needsUpdate) {
this.clearProgress({ delta });
this.writeProgress();
}
}
truncate(str, { truncate } = {}) {
if (!this.configuration.get(`enableProgressBars`))
truncate = false;
if (typeof truncate === `undefined`)
truncate = this.configuration.get(`preferTruncatedLines`);
// The -1 is to account for terminals that would wrap after
// the last column rather before the first overwrite
if (truncate)
str = slice_ansi_1.default(str, 0, process.stdout.columns - 1);
return str;
}
formatName(name) {
return formatName(name, {
configuration: this.configuration,
json: this.json,
});
}
formatNameWithHyperlink(name) {
return formatNameWithHyperlink(name, {
configuration: this.configuration,
json: this.json,
});
}
formatIndent() {
return `│ `.repeat(this.indent);
}
}
exports.StreamReport = StreamReport;
/***/ }),
/***/ 58166:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.TelemetryManager = exports.MetricName = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const httpUtils = tslib_1.__importStar(__webpack_require__(57392));
const miscUtils = tslib_1.__importStar(__webpack_require__(63111));
var MetricName;
(function (MetricName) {
MetricName["VERSION"] = "version";
MetricName["COMMAND_NAME"] = "commandName";
MetricName["PLUGIN_NAME"] = "pluginName";
MetricName["INSTALL_COUNT"] = "installCount";
MetricName["PROJECT_COUNT"] = "projectCount";
MetricName["WORKSPACE_COUNT"] = "workspaceCount";
MetricName["DEPENDENCY_COUNT"] = "dependencyCount";
MetricName["EXTENSION"] = "packageExtension";
})(MetricName = exports.MetricName || (exports.MetricName = {}));
class TelemetryManager {
constructor(configuration, accountId) {
this.values = new Map();
this.hits = new Map();
this.enumerators = new Map();
this.configuration = configuration;
const registryFile = this.getRegistryPath();
this.isNew = !fslib_1.xfs.existsSync(registryFile);
this.sendReport(accountId);
this.startBuffer();
}
reportVersion(value) {
this.reportValue(MetricName.VERSION, value);
}
reportCommandName(value) {
this.reportValue(MetricName.COMMAND_NAME, value || ``);
}
reportPluginName(value) {
this.reportValue(MetricName.PLUGIN_NAME, value);
}
reportProject(cwd) {
this.reportEnumerator(MetricName.PROJECT_COUNT, cwd);
}
reportInstall(nodeLinker) {
this.reportHit(MetricName.INSTALL_COUNT, nodeLinker);
}
reportPackageExtension(value) {
this.reportValue(MetricName.EXTENSION, value);
}
reportWorkspaceCount(count) {
this.reportValue(MetricName.WORKSPACE_COUNT, String(count));
}
reportDependencyCount(count) {
this.reportValue(MetricName.DEPENDENCY_COUNT, String(count));
}
reportValue(metric, value) {
miscUtils.getSetWithDefault(this.values, metric).add(value);
}
reportEnumerator(metric, value) {
miscUtils.getSetWithDefault(this.enumerators, metric).add(value);
}
reportHit(metric, extra = `*`) {
const ns = miscUtils.getMapWithDefault(this.hits, metric);
const current = miscUtils.getFactoryWithDefault(ns, extra, () => 0);
ns.set(extra, current + 1);
}
getRegistryPath() {
const registryFile = this.configuration.get(`globalFolder`);
return fslib_1.ppath.join(registryFile, `telemetry.json`);
}
sendReport(accountId) {
var _a, _b, _c;
const registryFile = this.getRegistryPath();
let content;
try {
content = fslib_1.xfs.readJsonSync(registryFile);
}
catch (_d) {
content = {};
}
const now = Date.now();
const interval = this.configuration.get(`telemetryInterval`) * 24 * 60 * 60 * 1000;
const lastUpdate = (_a = content.lastUpdate) !== null && _a !== void 0 ? _a : now + interval + Math.floor(interval * Math.random());
const nextUpdate = lastUpdate + interval;
if (nextUpdate > now && content.lastUpdate != null)
return;
try {
fslib_1.xfs.mkdirSync(fslib_1.ppath.dirname(registryFile), { recursive: true });
fslib_1.xfs.writeJsonSync(registryFile, { lastUpdate: now });
}
catch (_e) {
// In some cases this location is read-only. Too bad 🤷♀️
return;
}
if (nextUpdate > now)
return;
if (!content.blocks)
return;
for (const [userId, block] of Object.entries((_b = content.blocks) !== null && _b !== void 0 ? _b : {})) {
if (Object.keys(block).length === 0)
continue;
const upload = block;
upload.userId = userId;
for (const key of Object.keys((_c = upload.enumerators) !== null && _c !== void 0 ? _c : {}))
upload.enumerators[key] = upload.enumerators[key].length;
const rawUrl = `https://browser-http-intake.logs.datadoghq.eu/v1/input/${accountId}?ddsource=yarn`;
httpUtils.post(rawUrl, upload, {
configuration: this.configuration,
}).catch(() => {
// Nothing we can do
});
}
}
applyChanges() {
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
const registryFile = this.getRegistryPath();
let content;
try {
content = fslib_1.xfs.readJsonSync(registryFile);
}
catch (_k) {
content = {};
}
const userId = (_a = this.configuration.get(`telemetryUserId`)) !== null && _a !== void 0 ? _a : `*`;
const blocks = content.blocks = (_b = content.blocks) !== null && _b !== void 0 ? _b : {};
const block = blocks[userId] = (_c = blocks[userId]) !== null && _c !== void 0 ? _c : {};
for (const key of this.hits.keys()) {
const store = block.hits = (_d = block.hits) !== null && _d !== void 0 ? _d : {};
const ns = store[key] = (_e = store[key]) !== null && _e !== void 0 ? _e : {};
for (const [extra, value] of this.hits.get(key)) {
ns[extra] = ((_f = ns[extra]) !== null && _f !== void 0 ? _f : 0) + value;
}
}
for (const field of [`values`, `enumerators`]) {
for (const key of this[field].keys()) {
const store = block[field] = (_g = block[field]) !== null && _g !== void 0 ? _g : {};
store[key] = [...new Set([
...(_h = store[key]) !== null && _h !== void 0 ? _h : [],
...(_j = this[field].get(key)) !== null && _j !== void 0 ? _j : [],
])];
}
}
fslib_1.xfs.mkdirSync(fslib_1.ppath.dirname(registryFile), { recursive: true });
fslib_1.xfs.writeJsonSync(registryFile, content);
}
startBuffer() {
process.on(`exit`, () => {
try {
this.applyChanges();
}
catch (_a) {
// Explicitly ignore errors
}
});
}
}
exports.TelemetryManager = TelemetryManager;
/***/ }),
/***/ 6059:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.ThrowReport = void 0;
const Report_1 = __webpack_require__(50334);
class ThrowReport extends Report_1.Report {
reportCacheHit(locator) {
}
reportCacheMiss(locator) {
}
startTimerSync(what, opts, cb) {
const realCb = typeof opts === `function` ? opts : cb;
return realCb();
}
async startTimerPromise(what, opts, cb) {
const realCb = typeof opts === `function` ? opts : cb;
return await realCb();
}
async startCacheReport(cb) {
return await cb();
}
reportSeparator() {
}
reportInfo(name, text) {
}
reportWarning(name, text) {
}
reportError(name, text) {
}
reportProgress(progress) {
const promise = Promise.resolve().then(async () => {
// eslint-disable-next-line no-empty-pattern
for await (const {} of progress) {
// No need to do anything; we just want to consume the progress events
}
});
const stop = () => {
// Nothing to stop
};
return { ...promise, stop };
}
reportJson(data) {
// Just ignore the json output
}
async finalize() {
}
}
exports.ThrowReport = ThrowReport;
/***/ }),
/***/ 7100:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.VirtualFetcher = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
class VirtualFetcher {
supports(locator) {
if (!locator.reference.startsWith(`virtual:`))
return false;
return true;
}
getLocalPath(locator, opts) {
const splitPoint = locator.reference.indexOf(`#`);
if (splitPoint === -1)
throw new Error(`Invalid virtual package reference`);
const nextReference = locator.reference.slice(splitPoint + 1);
const nextLocator = structUtils.makeLocator(locator, nextReference);
return opts.fetcher.getLocalPath(nextLocator, opts);
}
async fetch(locator, opts) {
const splitPoint = locator.reference.indexOf(`#`);
if (splitPoint === -1)
throw new Error(`Invalid virtual package reference`);
const nextReference = locator.reference.slice(splitPoint + 1);
const nextLocator = structUtils.makeLocator(locator, nextReference);
const parentFetch = await opts.fetcher.fetch(nextLocator, opts);
return await this.ensureVirtualLink(locator, parentFetch, opts);
}
getLocatorFilename(locator) {
return structUtils.slugifyLocator(locator);
}
async ensureVirtualLink(locator, sourceFetch, opts) {
const to = sourceFetch.packageFs.getRealPath();
const virtualFolder = opts.project.configuration.get(`virtualFolder`);
const virtualName = this.getLocatorFilename(locator);
const virtualPath = fslib_1.VirtualFS.makeVirtualPath(virtualFolder, virtualName, to);
// We then use an alias to tell anyone that asks us that we're operating within the virtual folder, while still using the same old fs
const aliasFs = new fslib_1.AliasFS(virtualPath, { baseFs: sourceFetch.packageFs, pathUtils: fslib_1.ppath });
return { ...sourceFetch, packageFs: aliasFs };
}
}
exports.VirtualFetcher = VirtualFetcher;
/***/ }),
/***/ 55605:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.VirtualResolver = void 0;
class VirtualResolver {
static isVirtualDescriptor(descriptor) {
if (!descriptor.range.startsWith(VirtualResolver.protocol))
return false;
return true;
}
static isVirtualLocator(locator) {
if (!locator.reference.startsWith(VirtualResolver.protocol))
return false;
return true;
}
supportsDescriptor(descriptor, opts) {
return VirtualResolver.isVirtualDescriptor(descriptor);
}
supportsLocator(locator, opts) {
return VirtualResolver.isVirtualLocator(locator);
}
shouldPersistResolution(locator, opts) {
return false;
}
bindDescriptor(descriptor, locator, opts) {
// It's unsupported because packages inside the dependency tree should
// only become virtual AFTER they have all been resolved, by which point
// you shouldn't need to call `bindDescriptor` anymore.
throw new Error(`Assertion failed: calling "bindDescriptor" on a virtual descriptor is unsupported`);
}
getResolutionDependencies(descriptor, opts) {
// It's unsupported because packages inside the dependency tree should
// only become virtual AFTER they have all been resolved, by which point
// you shouldn't need to call `bindDescriptor` anymore.
throw new Error(`Assertion failed: calling "getResolutionDependencies" on a virtual descriptor is unsupported`);
}
async getCandidates(descriptor, dependencies, opts) {
// It's unsupported because packages inside the dependency tree should
// only become virtual AFTER they have all been resolved, by which point
// you shouldn't need to call `getCandidates` anymore.
throw new Error(`Assertion failed: calling "getCandidates" on a virtual descriptor is unsupported`);
}
async getSatisfying(descriptor, candidates, opts) {
// It's unsupported because packages inside the dependency tree should
// only become virtual AFTER they have all been resolved, by which point
// you shouldn't need to call `getSatisfying` anymore.
throw new Error(`Assertion failed: calling "getSatisfying" on a virtual descriptor is unsupported`);
}
async resolve(locator, opts) {
// It's unsupported because packages inside the dependency tree should
// only become virtual AFTER they have all been resolved, by which point
// you shouldn't need to call `resolve` anymore.
throw new Error(`Assertion failed: calling "resolve" on a virtual locator is unsupported`);
}
}
exports.VirtualResolver = VirtualResolver;
VirtualResolver.protocol = `virtual:`;
/***/ }),
/***/ 2234:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Workspace = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const globby_1 = tslib_1.__importDefault(__webpack_require__(25839));
const semver_1 = tslib_1.__importDefault(__webpack_require__(74451));
const Manifest_1 = __webpack_require__(11658);
const WorkspaceResolver_1 = __webpack_require__(83740);
const hashUtils = tslib_1.__importStar(__webpack_require__(73279));
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
class Workspace {
constructor(workspaceCwd, { project }) {
this.workspacesCwds = new Set();
// Generated at resolution; basically dependencies + devDependencies + child workspaces
this.dependencies = new Map();
this.project = project;
this.cwd = workspaceCwd;
}
async setup() {
// @ts-expect-error: It's ok to initialize it now
this.manifest = fslib_1.xfs.existsSync(fslib_1.ppath.join(this.cwd, Manifest_1.Manifest.fileName))
? await Manifest_1.Manifest.find(this.cwd)
: new Manifest_1.Manifest();
// We use ppath.relative to guarantee that the default hash will be consistent even if the project is installed on different OS / path
// @ts-expect-error: It's ok to initialize it now, even if it's readonly (setup is called right after construction)
this.relativeCwd = fslib_1.ppath.relative(this.project.cwd, this.cwd) || fslib_1.PortablePath.dot;
const ident = this.manifest.name ? this.manifest.name : structUtils.makeIdent(null, `${this.computeCandidateName()}-${hashUtils.makeHash(this.relativeCwd).substr(0, 6)}`);
const reference = this.manifest.version ? this.manifest.version : `0.0.0`;
// @ts-expect-error: It's ok to initialize it now, even if it's readonly (setup is called right after construction)
this.locator = structUtils.makeLocator(ident, reference);
// @ts-expect-error: It's ok to initialize it now, even if it's readonly (setup is called right after construction)
this.anchoredDescriptor = structUtils.makeDescriptor(this.locator, `${WorkspaceResolver_1.WorkspaceResolver.protocol}${this.relativeCwd}`);
// @ts-expect-error: It's ok to initialize it now, even if it's readonly (setup is called right after construction)
this.anchoredLocator = structUtils.makeLocator(this.locator, `${WorkspaceResolver_1.WorkspaceResolver.protocol}${this.relativeCwd}`);
const patterns = this.manifest.workspaceDefinitions.map(({ pattern }) => pattern);
const relativeCwds = await globby_1.default(patterns, {
absolute: true,
cwd: fslib_1.npath.fromPortablePath(this.cwd),
expandDirectories: false,
onlyDirectories: true,
onlyFiles: false,
ignore: [`**/node_modules`, `**/.git`, `**/.yarn`],
});
// It seems that the return value of globby isn't in any guaranteed order - not even the directory listing order
relativeCwds.sort();
for (const relativeCwd of relativeCwds) {
const candidateCwd = fslib_1.ppath.resolve(this.cwd, fslib_1.npath.toPortablePath(relativeCwd));
if (fslib_1.xfs.existsSync(fslib_1.ppath.join(candidateCwd, `package.json`))) {
this.workspacesCwds.add(candidateCwd);
}
}
}
accepts(range) {
const protocolIndex = range.indexOf(`:`);
const protocol = protocolIndex !== -1
? range.slice(0, protocolIndex + 1)
: null;
const pathname = protocolIndex !== -1
? range.slice(protocolIndex + 1)
: range;
if (protocol === WorkspaceResolver_1.WorkspaceResolver.protocol && fslib_1.ppath.normalize(pathname) === this.relativeCwd)
return true;
if (protocol === WorkspaceResolver_1.WorkspaceResolver.protocol && pathname === `*`)
return true;
if (!semver_1.default.validRange(pathname))
return false;
if (protocol === WorkspaceResolver_1.WorkspaceResolver.protocol)
return semver_1.default.satisfies(this.manifest.version !== null ? this.manifest.version : `0.0.0`, pathname);
if (!this.project.configuration.get(`enableTransparentWorkspaces`))
return false;
if (this.manifest.version !== null)
return semver_1.default.satisfies(this.manifest.version, pathname);
return false;
}
computeCandidateName() {
if (this.cwd === this.project.cwd) {
return `root-workspace`;
}
else {
return `${fslib_1.ppath.basename(this.cwd)}` || `unnamed-workspace`;
}
}
async persistManifest() {
const data = {};
this.manifest.exportTo(data);
const path = fslib_1.ppath.join(this.cwd, Manifest_1.Manifest.fileName);
const content = `${JSON.stringify(data, null, this.manifest.indent)}\n`;
await fslib_1.xfs.changeFilePromise(path, content, {
automaticNewlines: true,
});
}
}
exports.Workspace = Workspace;
/***/ }),
/***/ 2925:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.WorkspaceFetcher = void 0;
const fslib_1 = __webpack_require__(91794);
const WorkspaceResolver_1 = __webpack_require__(83740);
class WorkspaceFetcher {
supports(locator) {
if (!locator.reference.startsWith(WorkspaceResolver_1.WorkspaceResolver.protocol))
return false;
return true;
}
getLocalPath(locator, opts) {
return this.getWorkspace(locator, opts).cwd;
}
async fetch(locator, opts) {
const sourcePath = this.getWorkspace(locator, opts).cwd;
return { packageFs: new fslib_1.CwdFS(sourcePath), prefixPath: fslib_1.PortablePath.dot, localPath: sourcePath };
}
getWorkspace(locator, opts) {
return opts.project.getWorkspaceByCwd(locator.reference.slice(WorkspaceResolver_1.WorkspaceResolver.protocol.length));
}
}
exports.WorkspaceFetcher = WorkspaceFetcher;
/***/ }),
/***/ 83740:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.WorkspaceResolver = void 0;
const types_1 = __webpack_require__(79588);
class WorkspaceResolver {
supportsDescriptor(descriptor, opts) {
if (descriptor.range.startsWith(WorkspaceResolver.protocol))
return true;
const workspace = opts.project.tryWorkspaceByDescriptor(descriptor);
if (workspace !== null)
return true;
return false;
}
supportsLocator(locator, opts) {
if (!locator.reference.startsWith(WorkspaceResolver.protocol))
return false;
return true;
}
shouldPersistResolution(locator, opts) {
return false;
}
bindDescriptor(descriptor, fromLocator, opts) {
return descriptor;
}
getResolutionDependencies(descriptor, opts) {
return [];
}
async getCandidates(descriptor, dependencies, opts) {
const workspace = opts.project.getWorkspaceByDescriptor(descriptor);
return [workspace.anchoredLocator];
}
async getSatisfying(descriptor, references, opts) {
return null;
}
async resolve(locator, opts) {
const workspace = opts.project.getWorkspaceByCwd(locator.reference.slice(WorkspaceResolver.protocol.length));
return {
...locator,
version: workspace.manifest.version || `0.0.0`,
languageName: `unknown`,
linkType: types_1.LinkType.SOFT,
dependencies: new Map([...workspace.manifest.dependencies, ...workspace.manifest.devDependencies]),
peerDependencies: new Map([...workspace.manifest.peerDependencies]),
dependenciesMeta: workspace.manifest.dependenciesMeta,
peerDependenciesMeta: workspace.manifest.peerDependenciesMeta,
bin: workspace.manifest.bin,
};
}
}
exports.WorkspaceResolver = WorkspaceResolver;
WorkspaceResolver.protocol = `workspace:`;
/***/ }),
/***/ 71774:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.YarnVersion = void 0;
exports.YarnVersion = typeof YARN_VERSION !== `undefined`
? YARN_VERSION
: null;
/***/ }),
/***/ 12915:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.execvp = exports.pipevp = exports.EndStrategy = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const cross_spawn_1 = tslib_1.__importDefault(__webpack_require__(19789));
var EndStrategy;
(function (EndStrategy) {
EndStrategy[EndStrategy["Never"] = 0] = "Never";
EndStrategy[EndStrategy["ErrorCode"] = 1] = "ErrorCode";
EndStrategy[EndStrategy["Always"] = 2] = "Always";
})(EndStrategy = exports.EndStrategy || (exports.EndStrategy = {}));
function hasFd(stream) {
// @ts-expect-error: Not sure how to typecheck this field
return stream !== null && typeof stream.fd === `number`;
}
function sigintHandler() {
// We don't want SIGINT to kill our process; we want it to kill the
// innermost process, whose end will cause our own to exit.
}
// Rather than attaching one SIGINT handler for each process, we
// attach a single one and use a refcount to detect once it's no
// longer needed.
let sigintRefCount = 0;
async function pipevp(fileName, args, { cwd, env = process.env, strict = false, stdin = null, stdout, stderr, end = EndStrategy.Always }) {
const stdio = [`pipe`, `pipe`, `pipe`];
if (stdin === null)
stdio[0] = `ignore`;
else if (hasFd(stdin))
stdio[0] = stdin;
if (hasFd(stdout))
stdio[1] = stdout;
if (hasFd(stderr))
stdio[2] = stderr;
if (sigintRefCount++ === 0)
process.on(`SIGINT`, sigintHandler);
const child = cross_spawn_1.default(fileName, args, {
cwd: fslib_1.npath.fromPortablePath(cwd),
env: {
...env,
PWD: fslib_1.npath.fromPortablePath(cwd),
},
stdio,
});
if (!hasFd(stdin) && stdin !== null)
stdin.pipe(child.stdin);
if (!hasFd(stdout))
child.stdout.pipe(stdout, { end: false });
if (!hasFd(stderr))
child.stderr.pipe(stderr, { end: false });
const closeStreams = () => {
for (const stream of new Set([stdout, stderr])) {
if (!hasFd(stream)) {
stream.end();
}
}
};
return new Promise((resolve, reject) => {
child.on(`error`, error => {
if (--sigintRefCount === 0)
process.off(`SIGINT`, sigintHandler);
if (end === EndStrategy.Always || end === EndStrategy.ErrorCode)
closeStreams();
reject(error);
});
child.on(`close`, (code, sig) => {
if (--sigintRefCount === 0)
process.off(`SIGINT`, sigintHandler);
if (end === EndStrategy.Always || (end === EndStrategy.ErrorCode && code > 0))
closeStreams();
if (code === 0 || !strict) {
resolve({ code: getExitCode(code, sig) });
}
else if (code !== null) {
reject(new Error(`Child "${fileName}" exited with exit code ${code}`));
}
else {
reject(new Error(`Child "${fileName}" exited with signal ${sig}`));
}
});
});
}
exports.pipevp = pipevp;
async function execvp(fileName, args, { cwd, env = process.env, encoding = `utf8`, strict = false }) {
const stdio = [`ignore`, `pipe`, `pipe`];
const stdoutChunks = [];
const stderrChunks = [];
const nativeCwd = fslib_1.npath.fromPortablePath(cwd);
if (typeof env.PWD !== `undefined`)
env = { ...env, PWD: nativeCwd };
const subprocess = cross_spawn_1.default(fileName, args, {
cwd: nativeCwd,
env,
stdio,
});
subprocess.stdout.on(`data`, (chunk) => {
stdoutChunks.push(chunk);
});
subprocess.stderr.on(`data`, (chunk) => {
stderrChunks.push(chunk);
});
return await new Promise((resolve, reject) => {
subprocess.on(`error`, reject);
subprocess.on(`close`, (code, signal) => {
const stdout = encoding === `buffer`
? Buffer.concat(stdoutChunks)
: Buffer.concat(stdoutChunks).toString(encoding);
const stderr = encoding === `buffer`
? Buffer.concat(stderrChunks)
: Buffer.concat(stderrChunks).toString(encoding);
if (code === 0 || !strict) {
resolve({
code: getExitCode(code, signal), stdout, stderr,
});
}
else {
reject(Object.assign(new Error(`Child "${fileName}" exited with exit code ${code}\n\n${stderr}`), {
code: getExitCode(code, signal), stdout, stderr,
}));
}
});
});
}
exports.execvp = execvp;
const signalToCodeMap = new Map([
[`SIGINT`, 2],
[`SIGQUIT`, 3],
[`SIGKILL`, 9],
[`SIGTERM`, 15],
]);
function getExitCode(code, signal) {
const signalCode = signalToCodeMap.get(signal);
if (typeof signalCode !== `undefined`) {
return 128 + signalCode;
}
else {
return code !== null && code !== void 0 ? code : 1;
}
}
/***/ }),
/***/ 13061:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.isFolderInside = exports.getHomeFolder = exports.getDefaultGlobalFolder = void 0;
const fslib_1 = __webpack_require__(91794);
const os_1 = __webpack_require__(12087);
function getDefaultGlobalFolder() {
if (process.platform === `win32`) {
const base = fslib_1.npath.toPortablePath(process.env.LOCALAPPDATA || fslib_1.npath.join(os_1.homedir(), `AppData`, `Local`));
return fslib_1.ppath.resolve(base, `Yarn/Berry`);
}
if (process.env.XDG_DATA_HOME) {
const base = fslib_1.npath.toPortablePath(process.env.XDG_DATA_HOME);
return fslib_1.ppath.resolve(base, `yarn/berry`);
}
return fslib_1.ppath.resolve(getHomeFolder(), `.yarn/berry`);
}
exports.getDefaultGlobalFolder = getDefaultGlobalFolder;
function getHomeFolder() {
return fslib_1.npath.toPortablePath(os_1.homedir() || `/usr/local/share`);
}
exports.getHomeFolder = getHomeFolder;
function isFolderInside(target, parent) {
const relative = fslib_1.ppath.relative(parent, target);
return relative && !relative.startsWith(`..`) && !fslib_1.ppath.isAbsolute(relative);
}
exports.isFolderInside = isFolderInside;
/***/ }),
/***/ 23821:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.addLogFilterSupport = exports.LogLevel = exports.mark = exports.json = exports.prettyList = exports.pretty = exports.applyColor = exports.applyStyle = exports.tuple = exports.supportsHyperlinks = exports.supportsColor = exports.Style = exports.Type = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const chalk_1 = tslib_1.__importDefault(__webpack_require__(59084));
const MessageName_1 = __webpack_require__(62755);
const miscUtils = tslib_1.__importStar(__webpack_require__(63111));
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
const types_1 = __webpack_require__(79588);
var Type;
(function (Type) {
Type["NO_HINT"] = "NO_HINT";
Type["NULL"] = "NULL";
Type["SCOPE"] = "SCOPE";
Type["NAME"] = "NAME";
Type["RANGE"] = "RANGE";
Type["REFERENCE"] = "REFERENCE";
Type["NUMBER"] = "NUMBER";
Type["PATH"] = "PATH";
Type["URL"] = "URL";
Type["ADDED"] = "ADDED";
Type["REMOVED"] = "REMOVED";
Type["CODE"] = "CODE";
Type["DURATION"] = "DURATION";
Type["SIZE"] = "SIZE";
Type["IDENT"] = "IDENT";
Type["DESCRIPTOR"] = "DESCRIPTOR";
Type["LOCATOR"] = "LOCATOR";
Type["RESOLUTION"] = "RESOLUTION";
Type["DEPENDENT"] = "DEPENDENT";
Type["PACKAGE_EXTENSION"] = "PACKAGE_EXTENSION";
})(Type = exports.Type || (exports.Type = {}));
var Style;
(function (Style) {
Style[Style["BOLD"] = 2] = "BOLD";
})(Style = exports.Style || (exports.Style = {}));
const chalkOptions = process.env.GITHUB_ACTIONS
? { level: 2 }
: chalk_1.default.supportsColor
? { level: chalk_1.default.supportsColor.level }
: { level: 0 };
exports.supportsColor = chalkOptions.level !== 0;
exports.supportsHyperlinks = exports.supportsColor && !process.env.GITHUB_ACTIONS;
const chalkInstance = new chalk_1.default.Instance(chalkOptions);
const colors = new Map([
[Type.NO_HINT, null],
[Type.NULL, [`#a853b5`, 129]],
[Type.SCOPE, [`#d75f00`, 166]],
[Type.NAME, [`#d7875f`, 173]],
[Type.RANGE, [`#00afaf`, 37]],
[Type.REFERENCE, [`#87afff`, 111]],
[Type.NUMBER, [`#ffd700`, 220]],
[Type.PATH, [`#d75fd7`, 170]],
[Type.URL, [`#d75fd7`, 170]],
[Type.ADDED, [`#5faf00`, 70]],
[Type.REMOVED, [`#d70000`, 160]],
[Type.CODE, [`#87afff`, 111]],
[Type.SIZE, [`#ffd700`, 220]],
]);
// Just to make sure that the individual fields of the transform map have
// compatible parameter types, without upcasting the map to a too generic type
//
// We also take the opportunity to downcast the configuration into `any`,
// otherwise TypeScript will detect a circular reference and won't allow us to
// properly type the `format` method from Configuration. Since transforms are
// internal to this file, it should be fine.
const validateTransform = (spec) => spec;
const transforms = {
[Type.NUMBER]: validateTransform({
pretty: (configuration, value) => {
return `${value}`;
},
json: (value) => {
return value;
},
}),
[Type.IDENT]: validateTransform({
pretty: (configuration, ident) => {
return structUtils.prettyIdent(configuration, ident);
},
json: (ident) => {
return structUtils.stringifyIdent(ident);
},
}),
[Type.LOCATOR]: validateTransform({
pretty: (configuration, locator) => {
return structUtils.prettyLocator(configuration, locator);
},
json: (locator) => {
return structUtils.stringifyLocator(locator);
},
}),
[Type.DESCRIPTOR]: validateTransform({
pretty: (configuration, descriptor) => {
return structUtils.prettyDescriptor(configuration, descriptor);
},
json: (descriptor) => {
return structUtils.stringifyDescriptor(descriptor);
},
}),
[Type.RESOLUTION]: validateTransform({
pretty: (configuration, { descriptor, locator }) => {
return structUtils.prettyResolution(configuration, descriptor, locator);
},
json: ({ descriptor, locator }) => {
return {
descriptor: structUtils.stringifyDescriptor(descriptor),
locator: locator !== null
? structUtils.stringifyLocator(locator)
: null,
};
},
}),
[Type.DEPENDENT]: validateTransform({
pretty: (configuration, { locator, descriptor }) => {
return structUtils.prettyDependent(configuration, locator, descriptor);
},
json: ({ locator, descriptor }) => {
return {
locator: structUtils.stringifyLocator(locator),
descriptor: structUtils.stringifyDescriptor(descriptor),
};
},
}),
[Type.PACKAGE_EXTENSION]: validateTransform({
pretty: (configuration, packageExtension) => {
switch (packageExtension.type) {
case types_1.PackageExtensionType.Dependency:
return `${structUtils.prettyIdent(configuration, packageExtension.parentDescriptor)} ➤ ${applyColor(configuration, `dependencies`, Type.CODE)} ➤ ${structUtils.prettyIdent(configuration, packageExtension.descriptor)}`;
case types_1.PackageExtensionType.PeerDependency:
return `${structUtils.prettyIdent(configuration, packageExtension.parentDescriptor)} ➤ ${applyColor(configuration, `peerDependencies`, Type.CODE)} ➤ ${structUtils.prettyIdent(configuration, packageExtension.descriptor)}`;
case types_1.PackageExtensionType.PeerDependencyMeta:
return `${structUtils.prettyIdent(configuration, packageExtension.parentDescriptor)} ➤ ${applyColor(configuration, `peerDependenciesMeta`, Type.CODE)} ➤ ${structUtils.prettyIdent(configuration, structUtils.parseIdent(packageExtension.selector))} ➤ ${applyColor(configuration, packageExtension.key, Type.CODE)}`;
default:
throw new Error(`Assertion failed: Unsupported package extension type: ${packageExtension.type}`);
}
},
json: (packageExtension) => {
switch (packageExtension.type) {
case types_1.PackageExtensionType.Dependency:
return `${structUtils.stringifyIdent(packageExtension.parentDescriptor)} > ${structUtils.stringifyIdent(packageExtension.descriptor)}`;
case types_1.PackageExtensionType.PeerDependency:
return `${structUtils.stringifyIdent(packageExtension.parentDescriptor)} >> ${structUtils.stringifyIdent(packageExtension.descriptor)}`;
case types_1.PackageExtensionType.PeerDependencyMeta:
return `${structUtils.stringifyIdent(packageExtension.parentDescriptor)} >> ${packageExtension.selector} / ${packageExtension.key}`;
default:
throw new Error(`Assertion failed: Unsupported package extension type: ${packageExtension.type}`);
}
},
}),
[Type.DURATION]: validateTransform({
pretty: (configuration, duration) => {
if (duration > 1000 * 60) {
const minutes = Math.floor(duration / 1000 / 60);
const seconds = Math.ceil((duration - minutes * 60 * 1000) / 1000);
return seconds === 0 ? `${minutes}m` : `${minutes}m ${seconds}s`;
}
else {
const seconds = Math.floor(duration / 1000);
const milliseconds = duration - seconds * 1000;
return milliseconds === 0 ? `${seconds}s` : `${seconds}s ${milliseconds}ms`;
}
},
json: (duration) => {
return duration;
},
}),
[Type.SIZE]: validateTransform({
pretty: (configuration, size) => {
const thresholds = [`KB`, `MB`, `GB`, `TB`];
let power = thresholds.length;
while (power > 1 && size < 1024 ** power)
power -= 1;
const factor = 1024 ** power;
const value = Math.floor(size * 100 / factor) / 100;
return applyColor(configuration, `${value} ${thresholds[power - 1]}`, Type.NUMBER);
},
json: (size) => {
return size;
},
}),
[Type.PATH]: validateTransform({
pretty: (configuration, filePath) => {
return applyColor(configuration, fslib_1.npath.fromPortablePath(filePath), Type.PATH);
},
json: (filePath) => {
return fslib_1.npath.fromPortablePath(filePath);
},
}),
};
function tuple(formatType, value) {
return [value, formatType];
}
exports.tuple = tuple;
function applyStyle(configuration, text, flags) {
if (!configuration.get(`enableColors`))
return text;
if (flags & Style.BOLD)
text = chalk_1.default.bold(text);
return text;
}
exports.applyStyle = applyStyle;
function applyColor(configuration, value, formatType) {
if (!configuration.get(`enableColors`))
return value;
const colorSpec = colors.get(formatType);
if (colorSpec === null)
return value;
const color = typeof colorSpec === `undefined`
? formatType
: chalkOptions.level >= 3
? colorSpec[0]
: colorSpec[1];
const fn = typeof color === `number`
? chalkInstance.ansi256(color)
: color.startsWith(`#`)
? chalkInstance.hex(color)
: chalkInstance[color];
if (typeof fn !== `function`)
throw new Error(`Invalid format type ${color}`);
return fn(value);
}
exports.applyColor = applyColor;
function pretty(configuration, value, formatType) {
if (value === null)
return applyColor(configuration, `null`, Type.NULL);
if (Object.prototype.hasOwnProperty.call(transforms, formatType)) {
const transform = transforms[formatType];
const typedTransform = transform;
return typedTransform.pretty(configuration, value);
}
if (typeof value !== `string`)
throw new Error(`Assertion failed: Expected the value to be a string, got ${typeof value}`);
return applyColor(configuration, value, formatType);
}
exports.pretty = pretty;
function prettyList(configuration, values, formatType, { separator = `, ` } = {}) {
return [...values].map(value => pretty(configuration, value, formatType)).join(separator);
}
exports.prettyList = prettyList;
function json(value, formatType) {
if (value === null)
return null;
if (Object.prototype.hasOwnProperty.call(transforms, formatType)) {
miscUtils.overrideType(formatType);
return transforms[formatType].json(value);
}
if (typeof value !== `string`)
throw new Error(`Assertion failed: Expected the value to be a string, got ${typeof value}`);
return value;
}
exports.json = json;
function mark(configuration) {
return {
Check: applyColor(configuration, `✓`, `green`),
Cross: applyColor(configuration, `✘`, `red`),
Question: applyColor(configuration, `?`, `cyan`),
};
}
exports.mark = mark;
var LogLevel;
(function (LogLevel) {
LogLevel["Error"] = "error";
LogLevel["Warning"] = "warning";
LogLevel["Info"] = "info";
LogLevel["Discard"] = "discard";
})(LogLevel = exports.LogLevel || (exports.LogLevel = {}));
/**
* Add support support for the `logFilters` setting to the specified Report
* instance.
*/
function addLogFilterSupport(report, { configuration }) {
const logFilters = configuration.get(`logFilters`);
const logFiltersByCode = new Map();
const logFiltersByText = new Map();
for (const filter of logFilters) {
const level = filter.get(`level`);
if (typeof level === `undefined`)
continue;
const code = filter.get(`code`);
if (typeof code !== `undefined`)
logFiltersByCode.set(code, level);
const text = filter.get(`text`);
if (typeof text !== `undefined`) {
logFiltersByText.set(text, level);
}
}
const findLogLevel = (name, text, defaultLevel) => {
if (name === null || name === MessageName_1.MessageName.UNNAMED)
return defaultLevel;
if (logFiltersByText.size > 0) {
const level = logFiltersByText.get(chalk_1.default.reset(text));
if (typeof level !== `undefined`) {
return level !== null && level !== void 0 ? level : defaultLevel;
}
}
if (logFiltersByCode.size > 0) {
const level = logFiltersByCode.get(MessageName_1.stringifyMessageName(name));
if (typeof level !== `undefined`) {
return level !== null && level !== void 0 ? level : defaultLevel;
}
}
return defaultLevel;
};
const reportInfo = report.reportInfo;
const reportWarning = report.reportWarning;
const reportError = report.reportError;
const routeMessage = function (report, name, text, level) {
switch (findLogLevel(name, text, level)) {
case LogLevel.Info:
{
reportInfo.call(report, name, text);
}
break;
case LogLevel.Warning:
{
reportWarning.call(report, name !== null && name !== void 0 ? name : MessageName_1.MessageName.UNNAMED, text);
}
break;
case LogLevel.Error:
{
reportError.call(report, name !== null && name !== void 0 ? name : MessageName_1.MessageName.UNNAMED, text);
}
break;
}
};
report.reportInfo = function (...args) {
return routeMessage(this, ...args, LogLevel.Info);
};
report.reportWarning = function (...args) {
return routeMessage(this, ...args, LogLevel.Warning);
};
report.reportError = function (...args) {
return routeMessage(this, ...args, LogLevel.Error);
};
}
exports.addLogFilterSupport = addLogFilterSupport;
/***/ }),
/***/ 73279:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.checksumPattern = exports.checksumFile = exports.makeHash = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const crypto_1 = __webpack_require__(76417);
const globby_1 = tslib_1.__importDefault(__webpack_require__(25839));
function makeHash(...args) {
const hash = crypto_1.createHash(`sha512`);
for (const arg of args)
hash.update(arg ? arg : ``);
return hash.digest(`hex`);
}
exports.makeHash = makeHash;
function checksumFile(path) {
return new Promise((resolve, reject) => {
const hash = crypto_1.createHash(`sha512`);
const stream = fslib_1.xfs.createReadStream(path);
stream.on(`data`, chunk => {
hash.update(chunk);
});
stream.on(`error`, error => {
reject(error);
});
stream.on(`end`, () => {
resolve(hash.digest(`hex`));
});
});
}
exports.checksumFile = checksumFile;
async function checksumPattern(pattern, { cwd }) {
// Note: We use a two-pass glob instead of using the expandDirectories option
// from globby, because the native implementation is broken.
//
// Ref: https://github.com/sindresorhus/globby/issues/147
const dirListing = await globby_1.default(pattern, {
cwd: fslib_1.npath.fromPortablePath(cwd),
expandDirectories: false,
onlyDirectories: true,
unique: true,
});
const dirPatterns = dirListing.map(entry => {
return `${entry}/**/*`;
});
const listing = await globby_1.default([pattern, ...dirPatterns], {
cwd: fslib_1.npath.fromPortablePath(cwd),
expandDirectories: false,
onlyFiles: false,
unique: true,
});
listing.sort();
const hashes = await Promise.all(listing.map(async (entry) => {
const parts = [Buffer.from(entry)];
const p = fslib_1.npath.toPortablePath(entry);
const stat = await fslib_1.xfs.lstatPromise(p);
if (stat.isSymbolicLink())
parts.push(Buffer.from(await fslib_1.xfs.readlinkPromise(p)));
else if (stat.isFile())
parts.push(await fslib_1.xfs.readFilePromise(p));
return parts.join(`\u0000`);
}));
const hash = crypto_1.createHash(`sha512`);
for (const sub of hashes)
hash.update(sub);
return hash.digest(`hex`);
}
exports.checksumPattern = checksumPattern;
/***/ }),
/***/ 57392:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.del = exports.post = exports.put = exports.get = exports.request = exports.Method = exports.getNetworkSettings = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const https_1 = __webpack_require__(57211);
const http_1 = __webpack_require__(98605);
const micromatch_1 = tslib_1.__importDefault(__webpack_require__(70850));
const tunnel_1 = tslib_1.__importDefault(__webpack_require__(6149));
const url_1 = __webpack_require__(78835);
const cache = new Map();
const certCache = new Map();
const globalHttpAgent = new http_1.Agent({ keepAlive: true });
const globalHttpsAgent = new https_1.Agent({ keepAlive: true });
function parseProxy(specifier) {
const url = new url_1.URL(specifier);
const proxy = { host: url.hostname, headers: {} };
if (url.port)
proxy.port = Number(url.port);
return { proxy };
}
async function getCachedCertificate(caFilePath) {
let certificate = certCache.get(caFilePath);
if (!certificate) {
certificate = fslib_1.xfs.readFilePromise(caFilePath).then(cert => {
certCache.set(caFilePath, cert);
return cert;
});
certCache.set(caFilePath, certificate);
}
return certificate;
}
/**
* Searches through networkSettings and returns the most specific match
*/
function getNetworkSettings(target, opts) {
// Sort the config by key length to match on the most specific pattern
const networkSettings = [...opts.configuration.get(`networkSettings`)].sort(([keyA], [keyB]) => {
return keyB.length - keyA.length;
});
const mergedNetworkSettings = {
enableNetwork: undefined,
caFilePath: undefined,
httpProxy: undefined,
httpsProxy: undefined,
};
const mergableKeys = Object.keys(mergedNetworkSettings);
const url = new url_1.URL(target);
for (const [glob, config] of networkSettings) {
if (micromatch_1.default.isMatch(url.hostname, glob)) {
for (const key of mergableKeys) {
const setting = config.get(key);
if (setting !== null && typeof mergedNetworkSettings[key] === `undefined`) {
mergedNetworkSettings[key] = setting;
}
}
}
}
// Apply defaults
for (const key of mergableKeys) {
if (typeof mergedNetworkSettings[key] === `undefined`) {
mergedNetworkSettings[key] = opts.configuration.get(key);
}
}
return mergedNetworkSettings;
}
exports.getNetworkSettings = getNetworkSettings;
var Method;
(function (Method) {
Method["GET"] = "GET";
Method["PUT"] = "PUT";
Method["POST"] = "POST";
Method["DELETE"] = "DELETE";
})(Method = exports.Method || (exports.Method = {}));
async function request(target, body, { configuration, headers, json, jsonRequest = json, jsonResponse = json, method = Method.GET }) {
const networkConfig = getNetworkSettings(target, { configuration });
if (networkConfig.enableNetwork === false)
throw new Error(`Request to '${target}' has been blocked because of your configuration settings`);
const url = new url_1.URL(target);
if (url.protocol === `http:` && !micromatch_1.default.isMatch(url.hostname, configuration.get(`unsafeHttpWhitelist`)))
throw new Error(`Unsafe http requests must be explicitly whitelisted in your configuration (${url.hostname})`);
const agent = {
http: networkConfig.httpProxy
? tunnel_1.default.httpOverHttp(parseProxy(networkConfig.httpProxy))
: globalHttpAgent,
https: networkConfig.httpsProxy
? tunnel_1.default.httpsOverHttp(parseProxy(networkConfig.httpsProxy))
: globalHttpsAgent,
};
const gotOptions = { agent, headers, method };
gotOptions.responseType = jsonResponse
? `json`
: `buffer`;
if (body !== null) {
if (Buffer.isBuffer(body) || (!jsonRequest && typeof body === `string`)) {
gotOptions.body = body;
}
else {
// @ts-expect-error: The got types only allow an object, but got can stringify any valid JSON
gotOptions.json = body;
}
}
const socketTimeout = configuration.get(`httpTimeout`);
const retry = configuration.get(`httpRetry`);
const rejectUnauthorized = configuration.get(`enableStrictSsl`);
const caFilePath = networkConfig.caFilePath;
const { default: got } = await Promise.resolve().then(() => tslib_1.__importStar(__webpack_require__(12210)));
const certificateAuthority = caFilePath
? await getCachedCertificate(caFilePath)
: undefined;
const gotClient = got.extend({
timeout: {
socket: socketTimeout,
},
retry,
https: {
rejectUnauthorized,
certificateAuthority,
},
...gotOptions,
});
return configuration.getLimit(`networkConcurrency`)(() => {
return gotClient(target);
});
}
exports.request = request;
async function get(target, { configuration, json, jsonResponse = json, ...rest }) {
let entry = cache.get(target);
if (!entry) {
entry = request(target, null, { configuration, ...rest }).then(response => {
cache.set(target, response.body);
return response.body;
});
cache.set(target, entry);
}
if (Buffer.isBuffer(entry) === false)
entry = await entry;
if (jsonResponse) {
return JSON.parse(entry.toString());
}
else {
return entry;
}
}
exports.get = get;
async function put(target, body, options) {
const response = await request(target, body, { ...options, method: Method.PUT });
return response.body;
}
exports.put = put;
async function post(target, body, options) {
const response = await request(target, body, { ...options, method: Method.POST });
return response.body;
}
exports.post = post;
async function del(target, options) {
const response = await request(target, null, { ...options, method: Method.DELETE });
return response.body;
}
exports.del = del;
/***/ }),
/***/ 74876:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.treeUtils = exports.tgzUtils = exports.structUtils = exports.semverUtils = exports.scriptUtils = exports.miscUtils = exports.formatUtils = exports.folderUtils = exports.execUtils = exports.httpUtils = exports.hashUtils = exports.PackageExtensionStatus = exports.PackageExtensionType = exports.LinkType = exports.YarnVersion = exports.Workspace = exports.WorkspaceResolver = exports.VirtualFetcher = exports.ThrowReport = exports.TelemetryManager = exports.StreamReport = exports.Report = exports.ReportError = exports.TAG_REGEXP = exports.Project = exports.MessageName = exports.Manifest = exports.LightReport = exports.BuildType = exports.SettingsType = exports.ProjectLookup = exports.FormatType = exports.Configuration = exports.DEFAULT_LOCK_FILENAME = exports.DEFAULT_RC_FILENAME = exports.Cache = void 0;
const tslib_1 = __webpack_require__(70655);
const execUtils = tslib_1.__importStar(__webpack_require__(12915));
exports.execUtils = execUtils;
const folderUtils = tslib_1.__importStar(__webpack_require__(13061));
exports.folderUtils = folderUtils;
const formatUtils = tslib_1.__importStar(__webpack_require__(23821));
exports.formatUtils = formatUtils;
const hashUtils = tslib_1.__importStar(__webpack_require__(73279));
exports.hashUtils = hashUtils;
const httpUtils = tslib_1.__importStar(__webpack_require__(57392));
exports.httpUtils = httpUtils;
const miscUtils = tslib_1.__importStar(__webpack_require__(63111));
exports.miscUtils = miscUtils;
const scriptUtils = tslib_1.__importStar(__webpack_require__(50888));
exports.scriptUtils = scriptUtils;
const semverUtils = tslib_1.__importStar(__webpack_require__(51201));
exports.semverUtils = semverUtils;
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
exports.structUtils = structUtils;
const tgzUtils = tslib_1.__importStar(__webpack_require__(83607));
exports.tgzUtils = tgzUtils;
const treeUtils = tslib_1.__importStar(__webpack_require__(55024));
exports.treeUtils = treeUtils;
var Cache_1 = __webpack_require__(19552);
Object.defineProperty(exports, "Cache", ({ enumerable: true, get: function () { return Cache_1.Cache; } }));
var Configuration_1 = __webpack_require__(62889);
Object.defineProperty(exports, "DEFAULT_RC_FILENAME", ({ enumerable: true, get: function () { return Configuration_1.DEFAULT_RC_FILENAME; } }));
Object.defineProperty(exports, "DEFAULT_LOCK_FILENAME", ({ enumerable: true, get: function () { return Configuration_1.DEFAULT_LOCK_FILENAME; } }));
var Configuration_2 = __webpack_require__(62889);
Object.defineProperty(exports, "Configuration", ({ enumerable: true, get: function () { return Configuration_2.Configuration; } }));
Object.defineProperty(exports, "FormatType", ({ enumerable: true, get: function () { return Configuration_2.FormatType; } }));
Object.defineProperty(exports, "ProjectLookup", ({ enumerable: true, get: function () { return Configuration_2.ProjectLookup; } }));
Object.defineProperty(exports, "SettingsType", ({ enumerable: true, get: function () { return Configuration_2.SettingsType; } }));
var Installer_1 = __webpack_require__(45006);
Object.defineProperty(exports, "BuildType", ({ enumerable: true, get: function () { return Installer_1.BuildType; } }));
var LightReport_1 = __webpack_require__(60613);
Object.defineProperty(exports, "LightReport", ({ enumerable: true, get: function () { return LightReport_1.LightReport; } }));
var Manifest_1 = __webpack_require__(11658);
Object.defineProperty(exports, "Manifest", ({ enumerable: true, get: function () { return Manifest_1.Manifest; } }));
var MessageName_1 = __webpack_require__(62755);
Object.defineProperty(exports, "MessageName", ({ enumerable: true, get: function () { return MessageName_1.MessageName; } }));
var Project_1 = __webpack_require__(49015);
Object.defineProperty(exports, "Project", ({ enumerable: true, get: function () { return Project_1.Project; } }));
var ProtocolResolver_1 = __webpack_require__(6729);
Object.defineProperty(exports, "TAG_REGEXP", ({ enumerable: true, get: function () { return ProtocolResolver_1.TAG_REGEXP; } }));
var Report_1 = __webpack_require__(50334);
Object.defineProperty(exports, "ReportError", ({ enumerable: true, get: function () { return Report_1.ReportError; } }));
Object.defineProperty(exports, "Report", ({ enumerable: true, get: function () { return Report_1.Report; } }));
var StreamReport_1 = __webpack_require__(73759);
Object.defineProperty(exports, "StreamReport", ({ enumerable: true, get: function () { return StreamReport_1.StreamReport; } }));
var TelemetryManager_1 = __webpack_require__(58166);
Object.defineProperty(exports, "TelemetryManager", ({ enumerable: true, get: function () { return TelemetryManager_1.TelemetryManager; } }));
var ThrowReport_1 = __webpack_require__(6059);
Object.defineProperty(exports, "ThrowReport", ({ enumerable: true, get: function () { return ThrowReport_1.ThrowReport; } }));
var VirtualFetcher_1 = __webpack_require__(7100);
Object.defineProperty(exports, "VirtualFetcher", ({ enumerable: true, get: function () { return VirtualFetcher_1.VirtualFetcher; } }));
var WorkspaceResolver_1 = __webpack_require__(83740);
Object.defineProperty(exports, "WorkspaceResolver", ({ enumerable: true, get: function () { return WorkspaceResolver_1.WorkspaceResolver; } }));
var Workspace_1 = __webpack_require__(2234);
Object.defineProperty(exports, "Workspace", ({ enumerable: true, get: function () { return Workspace_1.Workspace; } }));
var YarnVersion_1 = __webpack_require__(71774);
Object.defineProperty(exports, "YarnVersion", ({ enumerable: true, get: function () { return YarnVersion_1.YarnVersion; } }));
var types_1 = __webpack_require__(79588);
Object.defineProperty(exports, "LinkType", ({ enumerable: true, get: function () { return types_1.LinkType; } }));
Object.defineProperty(exports, "PackageExtensionType", ({ enumerable: true, get: function () { return types_1.PackageExtensionType; } }));
Object.defineProperty(exports, "PackageExtensionStatus", ({ enumerable: true, get: function () { return types_1.PackageExtensionStatus; } }));
/***/ }),
/***/ 63111:
/***/ ((module, exports, __webpack_require__) => {
"use strict";
/* module decorator */ module = __webpack_require__.nmd(module);
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.tryParseOptionalBoolean = exports.parseOptionalBoolean = exports.parseBoolean = exports.replaceEnvVariables = exports.buildIgnorePattern = exports.sortMap = exports.dynamicRequireNoCache = exports.dynamicRequire = exports.DefaultStream = exports.BufferStream = exports.bufferStream = exports.prettifySyncErrors = exports.prettifyAsyncErrors = exports.releaseAfterUseAsync = exports.getMapWithDefault = exports.getSetWithDefault = exports.getArrayWithDefault = exports.getFactoryWithDefault = exports.convertMapsToIndexableObjects = exports.isIndexableObject = exports.mapAndFind = exports.mapAndFilter = exports.validateEnum = exports.assertNever = exports.overrideType = exports.escapeRegExp = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const clipanion_1 = __webpack_require__(87730);
const micromatch_1 = tslib_1.__importDefault(__webpack_require__(70850));
const stream_1 = __webpack_require__(92413);
function escapeRegExp(str) {
return str.replace(/[.*+?^${}()|[\]\\]/g, `\\$&`);
}
exports.escapeRegExp = escapeRegExp;
function overrideType(val) {
}
exports.overrideType = overrideType;
function assertNever(arg) {
throw new Error(`Assertion failed: Unexpected object '${arg}'`);
}
exports.assertNever = assertNever;
function validateEnum(def, value) {
if (!Object.values(def).includes(value))
throw new Error(`Assertion failed: Invalid value for enumeration`);
return value;
}
exports.validateEnum = validateEnum;
function mapAndFilter(iterable, cb) {
const output = [];
for (const value of iterable) {
const out = cb(value);
if (out !== mapAndFilterSkip) {
output.push(out);
}
}
return output;
}
exports.mapAndFilter = mapAndFilter;
const mapAndFilterSkip = Symbol();
mapAndFilter.skip = mapAndFilterSkip;
function mapAndFind(iterable, cb) {
for (const value of iterable) {
const out = cb(value);
if (out !== mapAndFindSkip) {
return out;
}
}
return undefined;
}
exports.mapAndFind = mapAndFind;
const mapAndFindSkip = Symbol();
mapAndFind.skip = mapAndFindSkip;
function isIndexableObject(value) {
return typeof value === `object` && value !== null;
}
exports.isIndexableObject = isIndexableObject;
/**
* Converts Maps to indexable objects recursively.
*/
function convertMapsToIndexableObjects(arg) {
if (arg instanceof Map)
arg = Object.fromEntries(arg);
if (isIndexableObject(arg)) {
for (const key of Object.keys(arg)) {
const value = arg[key];
if (isIndexableObject(value)) {
// @ts-expect-error: Apparently nothing in this world can be used to index type 'T & { [key: string]: unknown; }'
arg[key] = convertMapsToIndexableObjects(value);
}
}
}
return arg;
}
exports.convertMapsToIndexableObjects = convertMapsToIndexableObjects;
function getFactoryWithDefault(map, key, factory) {
let value = map.get(key);
if (typeof value === `undefined`)
map.set(key, value = factory());
return value;
}
exports.getFactoryWithDefault = getFactoryWithDefault;
function getArrayWithDefault(map, key) {
let value = map.get(key);
if (typeof value === `undefined`)
map.set(key, value = []);
return value;
}
exports.getArrayWithDefault = getArrayWithDefault;
function getSetWithDefault(map, key) {
let value = map.get(key);
if (typeof value === `undefined`)
map.set(key, value = new Set());
return value;
}
exports.getSetWithDefault = getSetWithDefault;
function getMapWithDefault(map, key) {
let value = map.get(key);
if (typeof value === `undefined`)
map.set(key, value = new Map());
return value;
}
exports.getMapWithDefault = getMapWithDefault;
// Executes a chunk of code and calls a cleanup function once it returns (even
// if it throws an exception)
async function releaseAfterUseAsync(fn, cleanup) {
if (cleanup == null)
return await fn();
try {
return await fn();
}
finally {
await cleanup();
}
}
exports.releaseAfterUseAsync = releaseAfterUseAsync;
// Executes a chunk of code but slightly modify its exception message if it
// throws something
async function prettifyAsyncErrors(fn, update) {
try {
return await fn();
}
catch (error) {
error.message = update(error.message);
throw error;
}
}
exports.prettifyAsyncErrors = prettifyAsyncErrors;
// Same thing but synchronous
function prettifySyncErrors(fn, update) {
try {
return fn();
}
catch (error) {
error.message = update(error.message);
throw error;
}
}
exports.prettifySyncErrors = prettifySyncErrors;
// Converts a Node stream into a Buffer instance
async function bufferStream(stream) {
return await new Promise((resolve, reject) => {
const chunks = [];
stream.on(`error`, error => {
reject(error);
});
stream.on(`data`, chunk => {
chunks.push(chunk);
});
stream.on(`end`, () => {
resolve(Buffer.concat(chunks));
});
});
}
exports.bufferStream = bufferStream;
// A stream implementation that buffers a stream to send it all at once
class BufferStream extends stream_1.Transform {
constructor() {
super(...arguments);
this.chunks = [];
}
_transform(chunk, encoding, cb) {
if (encoding !== `buffer` || !Buffer.isBuffer(chunk))
throw new Error(`Assertion failed: BufferStream only accept buffers`);
this.chunks.push(chunk);
cb(null, null);
}
_flush(cb) {
cb(null, Buffer.concat(this.chunks));
}
}
exports.BufferStream = BufferStream;
// A stream implementation that prints a message if nothing was output
class DefaultStream extends stream_1.Transform {
constructor(ifEmpty = Buffer.alloc(0)) {
super();
this.active = true;
this.ifEmpty = ifEmpty;
}
_transform(chunk, encoding, cb) {
if (encoding !== `buffer` || !Buffer.isBuffer(chunk))
throw new Error(`Assertion failed: DefaultStream only accept buffers`);
this.active = false;
cb(null, chunk);
}
_flush(cb) {
if (this.active && this.ifEmpty.length > 0) {
cb(null, this.ifEmpty);
}
}
}
exports.DefaultStream = DefaultStream;
// Webpack has this annoying tendency to replace dynamic requires by a stub
// code that simply throws when called. It's all fine and dandy in the context
// of a web application, but is quite annoying when working with Node projects!
function dynamicRequire(path) {
// @ts-expect-error
if (typeof require !== `undefined`) {
// @ts-expect-error
return require(path);
}
else {
return __webpack_require__(61452)(path);
}
}
exports.dynamicRequire = dynamicRequire;
function dynamicRequireNoCache(path) {
const physicalPath = fslib_1.npath.fromPortablePath(path);
const currentCacheEntry = __webpack_require__.c[physicalPath];
delete __webpack_require__.c[physicalPath];
let result;
try {
result = dynamicRequire(physicalPath);
const freshCacheEntry = __webpack_require__.c[physicalPath];
const freshCacheIndex = module.children.indexOf(freshCacheEntry);
if (freshCacheIndex !== -1) {
module.children.splice(freshCacheIndex, 1);
}
}
finally {
__webpack_require__.c[physicalPath] = currentCacheEntry;
}
return result;
}
exports.dynamicRequireNoCache = dynamicRequireNoCache;
// This function transforms an iterable into an array and sorts it according to
// the mapper functions provided as parameter. The mappers are expected to take
// each element from the iterable and generate a string from it, that will then
// be used to compare the entries.
//
// Using sortMap is more efficient than kinda reimplementing the logic in a sort
// predicate because sortMap caches the result of the mappers in such a way that
// they are guaranteed to be executed exactly once for each element.
function sortMap(values, mappers) {
const asArray = Array.from(values);
if (!Array.isArray(mappers))
mappers = [mappers];
const stringified = [];
for (const mapper of mappers)
stringified.push(asArray.map(value => mapper(value)));
const indices = asArray.map((_, index) => index);
indices.sort((a, b) => {
for (const layer of stringified) {
const comparison = layer[a] < layer[b] ? -1 : layer[a] > layer[b] ? +1 : 0;
if (comparison !== 0) {
return comparison;
}
}
return 0;
});
return indices.map(index => {
return asArray[index];
});
}
exports.sortMap = sortMap;
/**
* Combines an Array of glob patterns into a regular expression.
*
* @param ignorePatterns An array of glob patterns
*
* @returns A `string` representing a regular expression or `null` if no glob patterns are provided
*/
function buildIgnorePattern(ignorePatterns) {
if (ignorePatterns.length === 0)
return null;
return ignorePatterns.map(pattern => {
return `(${micromatch_1.default.makeRe(pattern, {
windows: false,
}).source})`;
}).join(`|`);
}
exports.buildIgnorePattern = buildIgnorePattern;
function replaceEnvVariables(value, { env }) {
const regex = /\${(?[\d\w_]+)(?:)?(?:-(?[^}]*))?}/g;
return value.replace(regex, (...args) => {
const { variableName, colon, fallback } = args[args.length - 1];
const variableExist = Object.prototype.hasOwnProperty.call(env, variableName);
const variableValue = env[variableName];
if (variableValue)
return variableValue;
if (variableExist && !colon)
return variableValue;
if (fallback != null)
return fallback;
throw new clipanion_1.UsageError(`Environment variable not found (${variableName})`);
});
}
exports.replaceEnvVariables = replaceEnvVariables;
function parseBoolean(value) {
switch (value) {
case `true`:
case `1`:
case 1:
case true: {
return true;
}
case `false`:
case `0`:
case 0:
case false: {
return false;
}
default: {
throw new Error(`Couldn't parse "${value}" as a boolean`);
}
}
}
exports.parseBoolean = parseBoolean;
function parseOptionalBoolean(value) {
if (typeof value === `undefined`)
return value;
return parseBoolean(value);
}
exports.parseOptionalBoolean = parseOptionalBoolean;
function tryParseOptionalBoolean(value) {
try {
return parseOptionalBoolean(value);
}
catch (_a) {
return null;
}
}
exports.tryParseOptionalBoolean = tryParseOptionalBoolean;
/***/ }),
/***/ 83825:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.builtinModules = exports.dynamicRequire = void 0;
const tslib_1 = __webpack_require__(70655);
const module_1 = tslib_1.__importDefault(__webpack_require__(32282));
function dynamicRequire(request) {
const req = typeof require !== `undefined`
? require
: __webpack_require__(61452);
return req(request);
}
exports.dynamicRequire = dynamicRequire;
function builtinModules() {
// @ts-expect-error
return new Set(module_1.default.builtinModules || Object.keys(process.binding(`natives`)));
}
exports.builtinModules = builtinModules;
/***/ }),
/***/ 50888:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.executeWorkspaceAccessibleBinary = exports.executePackageAccessibleBinary = exports.getWorkspaceAccessibleBinaries = exports.getPackageAccessibleBinaries = exports.maybeExecuteWorkspaceLifecycleScript = exports.executeWorkspaceLifecycleScript = exports.hasWorkspaceScript = exports.executeWorkspaceScript = exports.executePackageShellcode = exports.executePackageScript = exports.hasPackageScript = exports.prepareExternalProject = exports.makeScriptEnv = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const fslib_2 = __webpack_require__(91794);
const libzip_1 = __webpack_require__(40364);
const shell_1 = __webpack_require__(56096);
const binjumper_1 = __webpack_require__(94372);
const capitalize_1 = tslib_1.__importDefault(__webpack_require__(48403));
const p_limit_1 = tslib_1.__importDefault(__webpack_require__(94498));
const stream_1 = __webpack_require__(92413);
const Manifest_1 = __webpack_require__(11658);
const MessageName_1 = __webpack_require__(62755);
const Report_1 = __webpack_require__(50334);
const StreamReport_1 = __webpack_require__(73759);
const YarnVersion_1 = __webpack_require__(71774);
const execUtils = tslib_1.__importStar(__webpack_require__(12915));
const formatUtils = tslib_1.__importStar(__webpack_require__(23821));
const miscUtils = tslib_1.__importStar(__webpack_require__(63111));
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
var PackageManager;
(function (PackageManager) {
PackageManager["Yarn1"] = "Yarn Classic";
PackageManager["Yarn2"] = "Yarn";
PackageManager["Npm"] = "npm";
PackageManager["Pnpm"] = "pnpm";
})(PackageManager || (PackageManager = {}));
async function makePathWrapper(location, name, argv0, args = []) {
if (process.platform === `win32`) {
await Promise.all([
fslib_2.xfs.writeFilePromise(fslib_2.ppath.format({ dir: location, name, ext: `.exe` }), binjumper_1.getBinjumper()),
fslib_2.xfs.writeFilePromise(fslib_2.ppath.format({ dir: location, name, ext: `.exe.info` }), [argv0, ...args].join(`\n`)),
fslib_2.xfs.writeFilePromise(fslib_2.ppath.format({ dir: location, name, ext: `.cmd` }), `@"${argv0}" ${args.map(arg => `"${arg.replace(`"`, `""`)}"`).join(` `)} %*\n`),
]);
}
await fslib_2.xfs.writeFilePromise(fslib_2.ppath.join(location, name), `#!/bin/sh\nexec "${argv0}" ${args.map(arg => `'${arg.replace(/'/g, `'"'"'`)}'`).join(` `)} "$@"\n`);
await fslib_2.xfs.chmodPromise(fslib_2.ppath.join(location, name), 0o755);
}
async function detectPackageManager(location) {
let yarnLock = null;
try {
yarnLock = await fslib_2.xfs.readFilePromise(fslib_2.ppath.join(location, fslib_1.Filename.lockfile), `utf8`);
}
catch (_a) { }
if (yarnLock !== null) {
if (yarnLock.match(/^__metadata:$/m)) {
return PackageManager.Yarn2;
}
else {
return PackageManager.Yarn1;
}
}
if (fslib_2.xfs.existsSync(fslib_2.ppath.join(location, `package-lock.json`)))
return PackageManager.Npm;
if (fslib_2.xfs.existsSync(fslib_2.ppath.join(location, `pnpm-lock.yaml`)))
return PackageManager.Pnpm;
return null;
}
async function makeScriptEnv({ project, binFolder, lifecycleScript }) {
const scriptEnv = {};
for (const [key, value] of Object.entries(process.env))
if (typeof value !== `undefined`)
scriptEnv[key.toLowerCase() !== `path` ? key : `PATH`] = value;
const nBinFolder = fslib_2.npath.fromPortablePath(binFolder);
// We expose the base folder in the environment so that we can later add the
// binaries for the dependencies of the active package
scriptEnv.BERRY_BIN_FOLDER = fslib_2.npath.fromPortablePath(nBinFolder);
// Register some binaries that must be made available in all subprocesses
// spawned by Yarn (we thus ensure that they always use the right version)
await makePathWrapper(binFolder, `node`, process.execPath);
if (YarnVersion_1.YarnVersion !== null) {
await makePathWrapper(binFolder, `run`, process.execPath, [process.argv[1], `run`]);
await makePathWrapper(binFolder, `yarn`, process.execPath, [process.argv[1]]);
await makePathWrapper(binFolder, `yarnpkg`, process.execPath, [process.argv[1]]);
await makePathWrapper(binFolder, `node-gyp`, process.execPath, [process.argv[1], `run`, `--top-level`, `node-gyp`]);
}
if (project)
scriptEnv.INIT_CWD = fslib_2.npath.fromPortablePath(project.configuration.startingCwd);
scriptEnv.PATH = scriptEnv.PATH
? `${nBinFolder}${fslib_2.npath.delimiter}${scriptEnv.PATH}`
: `${nBinFolder}`;
scriptEnv.npm_execpath = `${nBinFolder}${fslib_2.npath.sep}yarn`;
scriptEnv.npm_node_execpath = `${nBinFolder}${fslib_2.npath.sep}node`;
const version = YarnVersion_1.YarnVersion !== null
? `yarn/${YarnVersion_1.YarnVersion}`
: `yarn/${miscUtils.dynamicRequire(`@yarnpkg/core`).version}-core`;
scriptEnv.npm_config_user_agent = `${version} npm/? node/${process.versions.node} ${process.platform} ${process.arch}`;
if (lifecycleScript)
scriptEnv.npm_lifecycle_event = lifecycleScript;
if (project) {
await project.configuration.triggerHook(hook => hook.setupScriptEnvironment, project, scriptEnv, async (name, argv0, args) => {
return await makePathWrapper(binFolder, fslib_2.toFilename(name), argv0, args);
});
}
return scriptEnv;
}
exports.makeScriptEnv = makeScriptEnv;
/**
* Given a folder, prepares this project for use. Runs `yarn install` then
* `yarn build` if a `package.json` is found.
*/
const MAX_PREPARE_CONCURRENCY = 2;
const prepareLimit = p_limit_1.default(MAX_PREPARE_CONCURRENCY);
async function prepareExternalProject(cwd, outputPath, { configuration, report, workspace = null }) {
await prepareLimit(async () => {
await fslib_2.xfs.mktempPromise(async (logDir) => {
const logFile = fslib_2.ppath.join(logDir, `pack.log`);
const stdin = null;
const { stdout, stderr } = configuration.getSubprocessStreams(logFile, { prefix: cwd, report });
const packageManager = await detectPackageManager(cwd);
let effectivePackageManager;
if (packageManager !== null) {
stdout.write(`Installing the project using ${packageManager}\n\n`);
effectivePackageManager = packageManager;
}
else {
stdout.write(`No package manager detected; defaulting to Yarn\n\n`);
effectivePackageManager = PackageManager.Yarn2;
}
await fslib_2.xfs.mktempPromise(async (binFolder) => {
const env = await makeScriptEnv({ binFolder });
const workflows = new Map([
[PackageManager.Yarn1, async () => {
const workspaceCli = workspace !== null
? [`workspace`, workspace]
: [];
// Makes sure that we'll be using Yarn 1.x
const version = await execUtils.pipevp(`yarn`, [`set`, `version`, `classic`, `--only-if-needed`], { cwd, env, stdin, stdout, stderr, end: execUtils.EndStrategy.ErrorCode });
if (version.code !== 0)
return version.code;
// Otherwise Yarn 1 will pack the .yarn directory :(
await fslib_2.xfs.appendFilePromise(fslib_2.ppath.join(cwd, `.npmignore`), `/.yarn\n`);
stdout.write(`\n`);
// Run an install; we can't avoid it unless we inspect the
// package.json, which I don't want to do to keep the codebase
// clean (even if it has a slight perf cost when cloning v1 repos)
const install = await execUtils.pipevp(`yarn`, [`install`], { cwd, env, stdin, stdout, stderr, end: execUtils.EndStrategy.ErrorCode });
if (install.code !== 0)
return install.code;
stdout.write(`\n`);
const pack = await execUtils.pipevp(`yarn`, [...workspaceCli, `pack`, `--filename`, fslib_2.npath.fromPortablePath(outputPath)], { cwd, env, stdin, stdout, stderr });
if (pack.code !== 0)
return pack.code;
return 0;
}],
[PackageManager.Yarn2, async () => {
const workspaceCli = workspace !== null
? [`workspace`, workspace]
: [];
// We enable inline builds, because nobody wants to
// read a logfile telling them to open another logfile
env.YARN_ENABLE_INLINE_BUILDS = `1`;
// If a lockfile doesn't exist we create a empty one to
// prevent the project root detection from thinking it's in an
// undeclared workspace when the user has a lockfile in their home
// directory on Windows
const lockfilePath = fslib_2.ppath.join(cwd, fslib_1.Filename.lockfile);
if (!(await fslib_2.xfs.existsPromise(lockfilePath)))
await fslib_2.xfs.writeFilePromise(lockfilePath, ``);
// Yarn 2 supports doing the install and the pack in a single command,
// so we leverage that. We also don't need the "set version" call since
// we're already operating within a Yarn 2 context (plus people should
// really check-in their Yarn versions anyway).
const pack = await execUtils.pipevp(`yarn`, [...workspaceCli, `pack`, `--install-if-needed`, `--filename`, fslib_2.npath.fromPortablePath(outputPath)], { cwd, env, stdin, stdout, stderr });
if (pack.code !== 0)
return pack.code;
return 0;
}],
[PackageManager.Npm, async () => {
if (workspace !== null)
throw new Error(`Workspaces aren't supported by npm, which has been detected as the primary package manager for ${cwd}`);
// Otherwise npm won't properly set the user agent, using the Yarn
// one instead
delete env.npm_config_user_agent;
// We can't use `npm ci` because some projects don't have npm
// lockfiles that are up-to-date. Hopefully npm won't decide
// to change the versions randomly.
const install = await execUtils.pipevp(`npm`, [`install`], { cwd, env, stdin, stdout, stderr, end: execUtils.EndStrategy.ErrorCode });
if (install.code !== 0)
return install.code;
const packStream = new stream_1.PassThrough();
const packPromise = miscUtils.bufferStream(packStream);
packStream.pipe(stdout);
// It seems that npm doesn't support specifying the pack output path,
// so we have to extract the stdout on top of forking it to the logs.
const pack = await execUtils.pipevp(`npm`, [`pack`, `--silent`], { cwd, env, stdin, stdout: packStream, stderr });
if (pack.code !== 0)
return pack.code;
const packOutput = (await packPromise).toString().trim();
const packTarget = fslib_2.ppath.resolve(cwd, fslib_2.npath.toPortablePath(packOutput));
// Only then can we move the pack to its rightful location
await fslib_2.xfs.renamePromise(packTarget, outputPath);
return 0;
}],
]);
const workflow = workflows.get(effectivePackageManager);
if (typeof workflow === `undefined`)
throw new Error(`Assertion failed: Unsupported workflow`);
const code = await workflow();
if (code === 0 || typeof code === `undefined`)
return;
fslib_2.xfs.detachTemp(logDir);
throw new Report_1.ReportError(MessageName_1.MessageName.PACKAGE_PREPARATION_FAILED, `Packing the package failed (exit code ${code}, logs can be found here: ${logFile})`);
});
});
});
}
exports.prepareExternalProject = prepareExternalProject;
async function hasPackageScript(locator, scriptName, { project }) {
const pkg = project.storedPackages.get(locator.locatorHash);
if (!pkg)
throw new Error(`Package for ${structUtils.prettyLocator(project.configuration, locator)} not found in the project`);
return await fslib_1.ZipOpenFS.openPromise(async (zipOpenFs) => {
const configuration = project.configuration;
const linkers = project.configuration.getLinkers();
const linkerOptions = { project, report: new StreamReport_1.StreamReport({ stdout: new stream_1.PassThrough(), configuration }) };
const linker = linkers.find(linker => linker.supportsPackage(pkg, linkerOptions));
if (!linker)
throw new Error(`The package ${structUtils.prettyLocator(project.configuration, pkg)} isn't supported by any of the available linkers`);
const packageLocation = await linker.findPackageLocation(pkg, linkerOptions);
const packageFs = new fslib_1.CwdFS(packageLocation, { baseFs: zipOpenFs });
const manifest = await Manifest_1.Manifest.find(fslib_1.PortablePath.dot, { baseFs: packageFs });
return manifest.scripts.has(scriptName);
}, {
libzip: await libzip_1.getLibzipPromise(),
});
}
exports.hasPackageScript = hasPackageScript;
async function executePackageScript(locator, scriptName, args, { cwd, project, stdin, stdout, stderr }) {
return await fslib_2.xfs.mktempPromise(async (binFolder) => {
const { manifest, env, cwd: realCwd } = await initializePackageEnvironment(locator, { project, binFolder, cwd, lifecycleScript: scriptName });
const script = manifest.scripts.get(scriptName);
if (typeof script === `undefined`)
return 1;
const realExecutor = async () => {
return await shell_1.execute(script, args, { cwd: realCwd, env, stdin, stdout, stderr });
};
const executor = await project.configuration.reduceHook(hooks => {
return hooks.wrapScriptExecution;
}, realExecutor, project, locator, scriptName, {
script, args, cwd: realCwd, env, stdin, stdout, stderr,
});
return await executor();
});
}
exports.executePackageScript = executePackageScript;
async function executePackageShellcode(locator, command, args, { cwd, project, stdin, stdout, stderr }) {
return await fslib_2.xfs.mktempPromise(async (binFolder) => {
const { env, cwd: realCwd } = await initializePackageEnvironment(locator, { project, binFolder, cwd });
return await shell_1.execute(command, args, { cwd: realCwd, env, stdin, stdout, stderr });
});
}
exports.executePackageShellcode = executePackageShellcode;
async function initializePackageEnvironment(locator, { project, binFolder, cwd, lifecycleScript }) {
const pkg = project.storedPackages.get(locator.locatorHash);
if (!pkg)
throw new Error(`Package for ${structUtils.prettyLocator(project.configuration, locator)} not found in the project`);
return await fslib_1.ZipOpenFS.openPromise(async (zipOpenFs) => {
const configuration = project.configuration;
const linkers = project.configuration.getLinkers();
const linkerOptions = { project, report: new StreamReport_1.StreamReport({ stdout: new stream_1.PassThrough(), configuration }) };
const linker = linkers.find(linker => linker.supportsPackage(pkg, linkerOptions));
if (!linker)
throw new Error(`The package ${structUtils.prettyLocator(project.configuration, pkg)} isn't supported by any of the available linkers`);
const env = await makeScriptEnv({ project, binFolder, lifecycleScript });
await Promise.all(Array.from(await getPackageAccessibleBinaries(locator, { project }), ([binaryName, [, binaryPath]]) => makePathWrapper(binFolder, fslib_2.toFilename(binaryName), process.execPath, [binaryPath])));
const packageLocation = await linker.findPackageLocation(pkg, linkerOptions);
const packageFs = new fslib_1.CwdFS(packageLocation, { baseFs: zipOpenFs });
const manifest = await Manifest_1.Manifest.find(fslib_1.PortablePath.dot, { baseFs: packageFs });
if (typeof cwd === `undefined`)
cwd = packageLocation;
return { manifest, binFolder, env, cwd };
}, {
libzip: await libzip_1.getLibzipPromise(),
});
}
async function executeWorkspaceScript(workspace, scriptName, args, { cwd, stdin, stdout, stderr }) {
return await executePackageScript(workspace.anchoredLocator, scriptName, args, { cwd, project: workspace.project, stdin, stdout, stderr });
}
exports.executeWorkspaceScript = executeWorkspaceScript;
function hasWorkspaceScript(workspace, scriptName) {
return workspace.manifest.scripts.has(scriptName);
}
exports.hasWorkspaceScript = hasWorkspaceScript;
async function executeWorkspaceLifecycleScript(workspace, lifecycleScriptName, { cwd, report }) {
const { configuration } = workspace.project;
const stdin = null;
await fslib_2.xfs.mktempPromise(async (logDir) => {
const logFile = fslib_2.ppath.join(logDir, `${lifecycleScriptName}.log`);
const header = `# This file contains the result of Yarn calling the "${lifecycleScriptName}" lifecycle script inside a workspace ("${workspace.cwd}")\n`;
const { stdout, stderr } = configuration.getSubprocessStreams(logFile, {
report,
prefix: structUtils.prettyLocator(configuration, workspace.anchoredLocator),
header,
});
report.reportInfo(MessageName_1.MessageName.LIFECYCLE_SCRIPT, `Calling the "${lifecycleScriptName}" lifecycle script`);
const exitCode = await executeWorkspaceScript(workspace, lifecycleScriptName, [], { cwd, stdin, stdout, stderr });
stdout.end();
stderr.end();
if (exitCode !== 0) {
fslib_2.xfs.detachTemp(logDir);
throw new Report_1.ReportError(MessageName_1.MessageName.LIFECYCLE_SCRIPT, `${capitalize_1.default(lifecycleScriptName)} script failed (exit code ${formatUtils.pretty(configuration, exitCode, formatUtils.Type.NUMBER)}, logs can be found here: ${formatUtils.pretty(configuration, logFile, formatUtils.Type.PATH)}); run ${formatUtils.pretty(configuration, `yarn ${lifecycleScriptName}`, formatUtils.Type.CODE)} to investigate`);
}
});
}
exports.executeWorkspaceLifecycleScript = executeWorkspaceLifecycleScript;
async function maybeExecuteWorkspaceLifecycleScript(workspace, lifecycleScriptName, opts) {
if (hasWorkspaceScript(workspace, lifecycleScriptName)) {
await executeWorkspaceLifecycleScript(workspace, lifecycleScriptName, opts);
}
}
exports.maybeExecuteWorkspaceLifecycleScript = maybeExecuteWorkspaceLifecycleScript;
/**
* Return the binaries that can be accessed by the specified package
*
* @param locator The queried package
* @param project The project owning the package
*/
async function getPackageAccessibleBinaries(locator, { project }) {
const configuration = project.configuration;
const binaries = new Map();
const pkg = project.storedPackages.get(locator.locatorHash);
if (!pkg)
throw new Error(`Package for ${structUtils.prettyLocator(configuration, locator)} not found in the project`);
const stdout = new stream_1.Writable();
const linkers = configuration.getLinkers();
const linkerOptions = { project, report: new StreamReport_1.StreamReport({ configuration, stdout }) };
const visibleLocators = new Set([locator.locatorHash]);
for (const descriptor of pkg.dependencies.values()) {
const resolution = project.storedResolutions.get(descriptor.descriptorHash);
if (!resolution)
throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(configuration, descriptor)}) should have been registered`);
visibleLocators.add(resolution);
}
for (const locatorHash of visibleLocators) {
const dependency = project.storedPackages.get(locatorHash);
if (!dependency)
throw new Error(`Assertion failed: The package (${locatorHash}) should have been registered`);
if (dependency.bin.size === 0)
continue;
const linker = linkers.find(linker => linker.supportsPackage(dependency, linkerOptions));
if (!linker)
continue;
let packageLocation = null;
try {
packageLocation = await linker.findPackageLocation(dependency, linkerOptions);
}
catch (err) {
// Some packages may not be installed when they are incompatible
// with the current system.
if (err.code === `LOCATOR_NOT_INSTALLED`) {
continue;
}
else {
throw err;
}
}
for (const [name, target] of dependency.bin) {
binaries.set(name, [dependency, fslib_2.npath.fromPortablePath(fslib_2.ppath.resolve(packageLocation, target))]);
}
}
return binaries;
}
exports.getPackageAccessibleBinaries = getPackageAccessibleBinaries;
/**
* Return the binaries that can be accessed by the specified workspace
*
* @param workspace The queried workspace
*/
async function getWorkspaceAccessibleBinaries(workspace) {
return await getPackageAccessibleBinaries(workspace.anchoredLocator, { project: workspace.project });
}
exports.getWorkspaceAccessibleBinaries = getWorkspaceAccessibleBinaries;
/**
* Execute a binary from the specified package.
*
* Note that "binary" in this sense means "a Javascript file". Actual native
* binaries cannot be executed this way, because we use Node in order to
* transparently read from the archives.
*
* @param locator The queried package
* @param binaryName The name of the binary file to execute
* @param args The arguments to pass to the file
*/
async function executePackageAccessibleBinary(locator, binaryName, args, { cwd, project, stdin, stdout, stderr, nodeArgs = [] }) {
const packageAccessibleBinaries = await getPackageAccessibleBinaries(locator, { project });
const binary = packageAccessibleBinaries.get(binaryName);
if (!binary)
throw new Error(`Binary not found (${binaryName}) for ${structUtils.prettyLocator(project.configuration, locator)}`);
return await fslib_2.xfs.mktempPromise(async (binFolder) => {
const [, binaryPath] = binary;
const env = await makeScriptEnv({ project, binFolder });
await Promise.all(Array.from(packageAccessibleBinaries, ([binaryName, [, binaryPath]]) => makePathWrapper(env.BERRY_BIN_FOLDER, fslib_2.toFilename(binaryName), process.execPath, [binaryPath])));
let result;
try {
result = await execUtils.pipevp(process.execPath, [...nodeArgs, binaryPath, ...args], { cwd, env, stdin, stdout, stderr });
}
finally {
await fslib_2.xfs.removePromise(env.BERRY_BIN_FOLDER);
}
return result.code;
});
}
exports.executePackageAccessibleBinary = executePackageAccessibleBinary;
/**
* Execute a binary from the specified workspace
*
* @param workspace The queried package
* @param binaryName The name of the binary file to execute
* @param args The arguments to pass to the file
*/
async function executeWorkspaceAccessibleBinary(workspace, binaryName, args, { cwd, stdin, stdout, stderr }) {
return await executePackageAccessibleBinary(workspace.anchoredLocator, binaryName, args, { project: workspace.project, cwd, stdin, stdout, stderr });
}
exports.executeWorkspaceAccessibleBinary = executeWorkspaceAccessibleBinary;
/***/ }),
/***/ 51201:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.validRange = exports.satisfiesWithPrereleases = void 0;
const tslib_1 = __webpack_require__(70655);
const semver_1 = tslib_1.__importDefault(__webpack_require__(74451));
/**
* Returns whether the given semver version satisfies the given range. Notably
* this supports prerelease versions so that "2.0.0-rc.0" satisfies the range
* ">=1.0.0", for example.
*
* This function exists because the semver.satisfies method does not include
* pre releases. This means ranges such as * would not satisfy 1.0.0-rc. The
* includePrerelease flag has a weird behavior and cannot be used (if you want
* to try it out, just run the `semverUtils` testsuite using this flag instead
* of our own implementation, and you'll see the failing cases).
*
* See https://github.com/yarnpkg/berry/issues/575 for more context.
*/
function satisfiesWithPrereleases(version, range, loose = false) {
let semverRange;
try {
semverRange = new semver_1.default.Range(range, { includePrerelease: true, loose });
}
catch (err) {
return false;
}
if (!version)
return false;
let semverVersion;
try {
semverVersion = new semver_1.default.SemVer(version, semverRange);
if (semverVersion.prerelease) {
semverVersion.prerelease = [];
}
}
catch (err) {
return false;
}
// A range has multiple sets of comparators. A version must satisfy all
// comparators in a set and at least one set to satisfy the range.
return semverRange.set.some(comparatorSet => {
for (const comparator of comparatorSet)
if (comparator.semver.prerelease)
comparator.semver.prerelease = [];
return comparatorSet.every(comparator => {
return comparator.test(semverVersion);
});
});
}
exports.satisfiesWithPrereleases = satisfiesWithPrereleases;
const rangesCache = new Map();
/**
* A cached version of `new semver.Range(potentialRange)` that returns `null` on invalid ranges
*/
function validRange(potentialRange) {
if (potentialRange.indexOf(`:`) !== -1)
return null;
let range = rangesCache.get(potentialRange);
if (typeof range !== `undefined`)
return range;
try {
range = new semver_1.default.Range(potentialRange);
}
catch (_a) {
range = null;
}
rangesCache.set(potentialRange, range);
return range;
}
exports.validRange = validRange;
/***/ }),
/***/ 34103:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getIdentVendorPath = exports.prettyDependent = exports.prettyResolution = exports.prettyWorkspace = exports.sortDescriptors = exports.prettyLocatorNoColors = exports.prettyLocator = exports.prettyReference = exports.prettyDescriptor = exports.prettyRange = exports.prettyIdent = exports.slugifyLocator = exports.slugifyIdent = exports.stringifyLocator = exports.stringifyDescriptor = exports.stringifyIdent = exports.requirableIdent = exports.convertToManifestRange = exports.makeRange = exports.parseFileStyleRange = exports.parseRange = exports.tryParseLocator = exports.parseLocator = exports.tryParseDescriptor = exports.parseDescriptor = exports.tryParseIdent = exports.parseIdent = exports.areVirtualPackagesEquivalent = exports.areLocatorsEqual = exports.areDescriptorsEqual = exports.areIdentsEqual = exports.bindLocator = exports.bindDescriptor = exports.devirtualizeLocator = exports.devirtualizeDescriptor = exports.isVirtualLocator = exports.isVirtualDescriptor = exports.virtualizePackage = exports.virtualizeDescriptor = exports.copyPackage = exports.renamePackage = exports.convertPackageToLocator = exports.convertLocatorToDescriptor = exports.convertDescriptorToLocator = exports.convertToIdent = exports.makeLocator = exports.makeDescriptor = exports.makeIdent = void 0;
const tslib_1 = __webpack_require__(70655);
const fslib_1 = __webpack_require__(91794);
const querystring_1 = tslib_1.__importDefault(__webpack_require__(71191));
const semver_1 = tslib_1.__importDefault(__webpack_require__(74451));
const formatUtils = tslib_1.__importStar(__webpack_require__(23821));
const hashUtils = tslib_1.__importStar(__webpack_require__(73279));
const miscUtils = tslib_1.__importStar(__webpack_require__(63111));
const structUtils = tslib_1.__importStar(__webpack_require__(34103));
const VIRTUAL_PROTOCOL = `virtual:`;
const VIRTUAL_ABBREVIATE = 5;
/**
* Creates a package ident.
*
* @param scope The package scope without the `@` prefix (eg. `types`)
* @param name The name of the package
*/
function makeIdent(scope, name) {
if (scope === null || scope === void 0 ? void 0 : scope.startsWith(`@`))
throw new Error(`Invalid scope: don't prefix it with '@'`);
return { identHash: hashUtils.makeHash(scope, name), scope, name };
}
exports.makeIdent = makeIdent;
/**
* Creates a package descriptor.
*
* @param ident The base ident (see `makeIdent`)
* @param range The range to attach (eg. `^1.0.0`)
*/
function makeDescriptor(ident, range) {
return { identHash: ident.identHash, scope: ident.scope, name: ident.name, descriptorHash: hashUtils.makeHash(ident.identHash, range), range };
}
exports.makeDescriptor = makeDescriptor;
/**
* Creates a package locator.
*
* @param ident The base ident (see `makeIdent`)
* @param range The reference to attach (eg. `1.0.0`)
*/
function makeLocator(ident, reference) {
return { identHash: ident.identHash, scope: ident.scope, name: ident.name, locatorHash: hashUtils.makeHash(ident.identHash, reference), reference };
}
exports.makeLocator = makeLocator;
/**
* Turns a compatible source to an ident. You won't really have to use this
* function since by virtue of structural inheritance all descriptors and
* locators are already valid idents.
*
* This function is only useful if you absolutely need to remove the non-ident
* fields from a structure before storing it somewhere.
*
* @param source The data structure to convert into an ident.
*/
function convertToIdent(source) {
return { identHash: source.identHash, scope: source.scope, name: source.name };
}
exports.convertToIdent = convertToIdent;
/**
* Turns a descriptor into a locator.
*
* Note that this process may be unsafe, as descriptors may reference multiple
* packages, putting them at odd with locators' expected semantic. Only makes
* sense when used with single-resolution protocols, for instance `file:`.
*
* @param descriptor The descriptor to convert into a locator.
*/
function convertDescriptorToLocator(descriptor) {
return { identHash: descriptor.identHash, scope: descriptor.scope, name: descriptor.name, locatorHash: descriptor.descriptorHash, reference: descriptor.range };
}
exports.convertDescriptorToLocator = convertDescriptorToLocator;
/**
* Turns a locator into a descriptor.
*
* This should be safe to do regardless of the locator, since all locator
* references are expected to be valid descriptor ranges.
*
* @param locator The locator to convert into a descriptor.
*/
function convertLocatorToDescriptor(locator) {
return { identHash: locator.identHash, scope: locator.scope, name: locator.name, descriptorHash: locator.locatorHash, range: locator.reference };
}
exports.convertLocatorToDescriptor = convertLocatorToDescriptor;
/**
* Turns a package structure into a simple locator. You won't often need to
* call this function since packages are already valid locators by virtue of
* structural inheritance.
*
* This function is only useful if you absolutely need to remove the
* non-locator fields from a structure before storing it somewhere.
*
* @param pkg The package to convert into a locator.
*/
function convertPackageToLocator(pkg) {
return { identHash: pkg.identHash, scope: pkg.scope, name: pkg.name, locatorHash: pkg.locatorHash, reference: pkg.reference };
}
exports.convertPackageToLocator = convertPackageToLocator;
/**
* Deep copies a package then change its locator to something else.
*
* @param pkg The source package
* @param locator Its new new locator
*/
function renamePackage(pkg, locator) {
return {
identHash: locator.identHash,
scope: locator.scope,
name: locator.name,
locatorHash: locator.locatorHash,
reference: locator.reference,
version: pkg.version,
languageName: pkg.languageName,
linkType: pkg.linkType,
dependencies: new Map(pkg.dependencies),
peerDependencies: new Map(pkg.peerDependencies),
dependenciesMeta: new Map(pkg.dependenciesMeta),
peerDependenciesMeta: new Map(pkg.peerDependenciesMeta),
bin: new Map(pkg.bin),
};
}
exports.renamePackage = renamePackage;
/**
* Deep copies a package. The copy will share the same locator as the original.
*
* @param pkg The source package
*/
function copyPackage(pkg) {
return renamePackage(pkg, pkg);
}
exports.copyPackage = copyPackage;
/**
* Creates a new virtual descriptor from a non virtual one.
*
* @param descriptor The descriptor to virtualize
* @param entropy A hash that provides uniqueness to this virtualized descriptor (normally a locator hash)
*/
function virtualizeDescriptor(descriptor, entropy) {
if (entropy.includes(`#`))
throw new Error(`Invalid entropy`);
return makeDescriptor(descriptor, `virtual:${entropy}#${descriptor.range}`);
}
exports.virtualizeDescriptor = virtualizeDescriptor;
/**
* Creates a new virtual package from a non virtual one.
*
* @param pkg The package to virtualize
* @param entropy A hash that provides uniqueness to this virtualized package (normally a locator hash)
*/
function virtualizePackage(pkg, entropy) {
if (entropy.includes(`#`))
throw new Error(`Invalid entropy`);
return renamePackage(pkg, makeLocator(pkg, `virtual:${entropy}#${pkg.reference}`));
}
exports.virtualizePackage = virtualizePackage;
/**
* Returns `true` if the descriptor is virtual.
*/
function isVirtualDescriptor(descriptor) {
return descriptor.range.startsWith(VIRTUAL_PROTOCOL);
}
exports.isVirtualDescriptor = isVirtualDescriptor;
/**
* Returns `true` if the locator is virtual.
*/
function isVirtualLocator(locator) {
return locator.reference.startsWith(VIRTUAL_PROTOCOL);
}
exports.isVirtualLocator = isVirtualLocator;
/**
* Returns a new devirtualized descriptor based on a virtualized descriptor
*/
function devirtualizeDescriptor(descriptor) {
if (!isVirtualDescriptor(descriptor))
throw new Error(`Not a virtual descriptor`);
return makeDescriptor(descriptor, descriptor.range.replace(/^[^#]*#/, ``));
}
exports.devirtualizeDescriptor = devirtualizeDescriptor;
/**
* Returns a new devirtualized locator based on a virtualized locator
* @param locator the locator
*/
function devirtualizeLocator(locator) {
if (!isVirtualLocator(locator))
throw new Error(`Not a virtual descriptor`);
return makeLocator(locator, locator.reference.replace(/^[^#]*#/, ``));
}
exports.devirtualizeLocator = devirtualizeLocator;
/**
* Some descriptors only make sense when bound with some internal state. For
* instance that would be the case for the `file:` ranges, which require to
* be bound to their parent packages in order to resolve relative paths from
* the right location.
*
* This function will apply the specified parameters onto the requested
* descriptor, but only if it didn't get bound before (important to handle the
* case where we replace a descriptor by another, since when that happens the
* replacement has probably been already bound).
*
* @param descriptor The original descriptor
* @param params The parameters to encode in the range
*/
function bindDescriptor(descriptor, params) {
if (descriptor.range.includes(`::`))
return descriptor;
return makeDescriptor(descriptor, `${descriptor.range}::${querystring_1.default.stringify(params)}`);
}
exports.bindDescriptor = bindDescriptor;
/**
* Some locators only make sense when bound with some internal state. For
* instance that would be the case for the `file:` references, which require to
* be bound to their parent packages in order to resolve relative paths from
* the right location.
*
* This function will apply the specified parameters onto the requested
* locator, but only if it didn't get bound before (important to handle the
* case where we replace a locator by another, since when that happens the
* replacement has probably been already bound).
*
* @param locator The original locator
* @param params The parameters to encode in the reference
*/
function bindLocator(locator, params) {
if (locator.reference.includes(`::`))
return locator;
return makeLocator(locator, `${locator.reference}::${querystring_1.default.stringify(params)}`);
}
exports.bindLocator = bindLocator;
/**
* Returns `true` if the idents are equal
*/
function areIdentsEqual(a, b) {
return a.identHash === b.identHash;
}
exports.areIdentsEqual = areIdentsEqual;
/**
* Returns `true` if the descriptors are equal
*/
function areDescriptorsEqual(a, b) {
return a.descriptorHash === b.descriptorHash;
}
exports.areDescriptorsEqual = areDescriptorsEqual;
/**
* Returns `true` if the locators are equal
*/
function areLocatorsEqual(a, b) {
return a.locatorHash === b.locatorHash;
}
exports.areLocatorsEqual = areLocatorsEqual;
/**
* Virtual packages are considered equivalent when they belong to the same
* package identity and have the same dependencies. Note that equivalence
* is not the same as equality, as the references may be different.
*/
function areVirtualPackagesEquivalent(a, b) {
if (!isVirtualLocator(a))
throw new Error(`Invalid package type`);
if (!isVirtualLocator(b))
throw new Error(`Invalid package type`);
if (!areIdentsEqual(a, b))
return false;
if (a.dependencies.size !== b.dependencies.size)
return false;
for (const dependencyDescriptorA of a.dependencies.values()) {
const dependencyDescriptorB = b.dependencies.get(dependencyDescriptorA.identHash);
if (!dependencyDescriptorB)
return false;
if (!areDescriptorsEqual(dependencyDescriptorA, dependencyDescriptorB)) {
return false;
}
}
return true;
}
exports.areVirtualPackagesEquivalent = areVirtualPackagesEquivalent;
/**
* Parses a string into an ident.
*
* Throws an error if the ident cannot be parsed.
*
* @param string The ident string (eg. `@types/lodash`)
*/
function parseIdent(string) {
const ident = tryParseIdent(string);
if (!ident)
throw new Error(`Invalid ident (${string})`);
return ident;
}
exports.parseIdent = parseIdent;
/**
* Parses a string into an ident.
*
* Returns `null` if the ident cannot be parsed.
*
* @param string The ident string (eg. `@types/lodash`)
*/
function tryParseIdent(string) {
const match = string.match(/^(?:@([^/]+?)\/)?([^/]+)$/);
if (!match)
return null;
const [, scope, name] = match;
const realScope = typeof scope !== `undefined`
? scope
: null;
return makeIdent(realScope, name);
}
exports.tryParseIdent = tryParseIdent;
/**
* Parses a `string` into a descriptor
*
* Throws an error if the descriptor cannot be parsed.
*
* @param string The descriptor string (eg. `lodash@^1.0.0`)
* @param strict If `false`, the range is optional (`unknown` will be used as fallback)
*/
function parseDescriptor(string, strict = false) {
const descriptor = tryParseDescriptor(string, strict);
if (!descriptor)
throw new Error(`Invalid descriptor (${string})`);
return descriptor;
}
exports.parseDescriptor = parseDescriptor;
/**
* Parses a `string` into a descriptor
*
* Returns `null` if the descriptor cannot be parsed.
*
* @param string The descriptor string (eg. `lodash@^1.0.0`)
* @param strict If `false`, the range is optional (`unknown` will be used as fallback)
*/
function tryParseDescriptor(string, strict = false) {
const match = strict
? string.match(/^(?:@([^/]+?)\/)?([^/]+?)(?:@(.+))$/)
: string.match(/^(?:@([^/]+?)\/)?([^/]+?)(?:@(.+))?$/);
if (!match)
return null;
const [, scope, name, range] = match;
if (range === `unknown`)
throw new Error(`Invalid range (${string})`);
const realScope = typeof scope !== `undefined`
? scope
: null;
const realRange = typeof range !== `undefined`
? range
: `unknown`;
return makeDescriptor(makeIdent(realScope, name), realRange);
}
exports.tryParseDescriptor = tryParseDescriptor;
/**
* Parses a `string` into a locator
*
* Throws an error if the locator cannot be parsed.
*
* @param string The locator `string` (eg. `lodash@1.0.0`)
* @param strict If `false`, the reference is optional (`unknown` will be used as fallback)
*/
function parseLocator(string, strict = false) {
const locator = tryParseLocator(string, strict);
if (!locator)
throw new Error(`Invalid locator (${string})`);
return locator;
}
exports.parseLocator = parseLocator;
/**
* Parses a `string` into a locator
*
* Returns `null` if the locator cannot be parsed.
*
* @param string The locator string (eg. `lodash@1.0.0`)
* @param strict If `false`, the reference is optional (`unknown` will be used as fallback)
*/
function tryParseLocator(string, strict = false) {
const match = strict
? string.match(/^(?:@([^/]+?)\/)?([^/]+?)(?:@(.+))$/)
: string.match(/^(?:@([^/]+?)\/)?([^/]+?)(?:@(.+))?$/);
if (!match)
return null;
const [, scope, name, reference] = match;
if (reference === `unknown`)
throw new Error(`Invalid reference (${string})`);
const realScope = typeof scope !== `undefined`
? scope
: null;
const realReference = typeof reference !== `undefined`
? reference
: `unknown`;
return makeLocator(makeIdent(realScope, name), realReference);
}
exports.tryParseLocator = tryParseLocator;
/**
* Parses a range into its constituents. Ranges typically follow these forms,
* with both `protocol` and `bindings` being optionals:
*
* :::
* :