mirror of
https://github.com/tj/n.git
synced 2024-11-24 19:46:56 +01:00
Rework activate to support more configurations (#657)
* Rework activate to be (much) more explicit. Add archlinux image. Remove need for rsync. * Reword use case without npm
This commit is contained in:
parent
e7920e7872
commit
f1d0dd5a22
@ -162,14 +162,14 @@ Or run a downloaded `node` version with the `n run` command:
|
||||
n run 8.11.3 --debug some.js
|
||||
|
||||
Or execute a command with `PATH` modified so `node` and `npm` will be from the downloaded Node.js version.
|
||||
(NB: this `npm` will be working with a different and empty global node_modules directory, and you should not install global
|
||||
modules this way.)
|
||||
(NB: `npm` run this way will be using global node_modules from the target node version folder.)
|
||||
|
||||
n exec 10 my-script --fast test
|
||||
n exec lts zsh
|
||||
|
||||
## Preserving npm
|
||||
|
||||
A Node.js install normally includes `npm` as well, but you may wish to preserve an updated `npm` and `npx` leaving them out of the install using `--preserve` (requires rsync):
|
||||
A Node.js install normally includes `npm` as well, but you may wish to preserve an updated `npm` and `npx` leaving them out of the install using `--preserve`:
|
||||
|
||||
$ npm install -g npm@latest
|
||||
...
|
||||
|
80
bin/n
80
bin/n
@ -347,7 +347,7 @@ Options:
|
||||
|
||||
-V, --version Output version of n
|
||||
-h, --help Display help information
|
||||
-p, --preserve Preserve npm and npx during install of Node.js (requires rsync)
|
||||
-p, --preserve Preserve npm and npx during install of Node.js
|
||||
-q, --quiet Disable curl output (if available)
|
||||
-d, --download Download only
|
||||
-a, --arch Override system architecture
|
||||
@ -603,6 +603,19 @@ disable_pax_mprotect() {
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# clean_copy_folder <source> <target>
|
||||
#
|
||||
|
||||
clean_copy_folder() {
|
||||
local source="$1"
|
||||
local target="$2"
|
||||
if [[ -d "${source}" ]]; then
|
||||
rm -rf "${target}"
|
||||
cp -fR "${source}" "${target}"
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Activate <version>
|
||||
#
|
||||
@ -611,26 +624,51 @@ activate() {
|
||||
local version="$1"
|
||||
local dir="$CACHE_DIR/$version"
|
||||
local original_node="$(command -v node)"
|
||||
local installed_node="${N_PREFIX}/bin/node"
|
||||
|
||||
# Remove old npm to avoid potential issues with simple overwrite.
|
||||
if [[ -z "${N_PRESERVE_NPM}" && -d "$dir/lib/node_modules/npm" ]]; then
|
||||
if test -d "$N_PREFIX/lib/node_modules/npm"; then
|
||||
rm -rf "$N_PREFIX/lib/node_modules/npm"
|
||||
fi
|
||||
# Ideally we would just copy from cache to N_PREFIX, but there are some complications
|
||||
# - various linux versions use symlinks for folders in /usr/local and also error when copy folder onto symlink
|
||||
# - we have used cp for years, so keep using it for backwards compatibility (instead of say rsync)
|
||||
# - we allow preserving npm
|
||||
# - we want to be somewhat robust to changes in tarball contents, so use find instead of hard-code expected subfolders
|
||||
#
|
||||
# This code was purist and concises for a long time.
|
||||
# Now twice as much code, but using same code path for all uses, and supporting more setups.
|
||||
|
||||
# Copy lib before bin so symlink targets exist.
|
||||
# lib
|
||||
mkdir -p "$N_PREFIX/lib"
|
||||
# Copy everything except node_modules.
|
||||
find "$dir/lib" -mindepth 1 -maxdepth 1 \! -name node_modules -exec cp -fR "{}" "$N_PREFIX/lib" \;
|
||||
if [[ -z "${N_PRESERVE_NPM}" ]]; then
|
||||
mkdir -p "$N_PREFIX/lib/node_modules"
|
||||
# Copy just npm, skipping possible added global modules after download. Clean copy to avoid version change problems.
|
||||
clean_copy_folder "$dir/lib/node_modules/npm" "$N_PREFIX/lib/node_modules/npm"
|
||||
fi
|
||||
|
||||
# bin
|
||||
mkdir -p "$N_PREFIX/bin"
|
||||
# Remove old node to avoid potential problems with firewall getting confused on Darwin by overwrite.
|
||||
rm -f "$N_PREFIX/bin/node"
|
||||
# Copy (lib before bin to avoid error messages on Darwin when cp over dangling link)
|
||||
for subdir in lib bin include share; do
|
||||
if [[ -n "${N_PRESERVE_NPM}" ]]; then
|
||||
rsync --recursive --archive --keep-dirlinks --exclude=npm --exclude=npx "${dir}/${subdir}" "${N_PREFIX}"
|
||||
elif test -L "$N_PREFIX/$subdir"; then
|
||||
find "$dir/$subdir" -mindepth 1 -maxdepth 1 -exec cp -fR "{}" "$N_PREFIX/$subdir" \;
|
||||
else
|
||||
cp -fR "$dir/$subdir" "$N_PREFIX"
|
||||
fi
|
||||
done
|
||||
local installed_node="${N_PREFIX}/bin/node"
|
||||
# Copy just node, in case user has installed global npm modules into cache.
|
||||
cp -f "$dir/bin/node" "$N_PREFIX/bin"
|
||||
[[ -e "$dir/bin/node-waf" ]] && cp -f "$dir/bin/node-waf" "$N_PREFIX/bin" # v0.8.x
|
||||
if [[ -z "${N_PRESERVE_NPM}" ]]; then
|
||||
[[ -e "$dir/bin/npm" ]] && cp -fR "$dir/bin/npm" "$N_PREFIX/bin"
|
||||
[[ -e "$dir/bin/npx" ]] && cp -fR "$dir/bin/npx" "$N_PREFIX/bin"
|
||||
fi
|
||||
|
||||
# include
|
||||
mkdir -p "$N_PREFIX/include"
|
||||
find "$dir/include" -mindepth 1 -maxdepth 1 -exec cp -fR "{}" "$N_PREFIX/include" \;
|
||||
|
||||
# share
|
||||
mkdir -p "$N_PREFIX/share"
|
||||
# Copy everything except man, at it is a symlink on some Linux (e.g. archlinux).
|
||||
find "$dir/share" -mindepth 1 -maxdepth 1 \! -name man -exec cp -fR "{}" "$N_PREFIX/share" \;
|
||||
mkdir -p "$N_PREFIX/share/man"
|
||||
find "$dir/share/man" -mindepth 1 -maxdepth 1 -exec cp -fR "{}" "$N_PREFIX/share/man" \;
|
||||
|
||||
disable_pax_mprotect "${installed_node}"
|
||||
|
||||
local active_node="$(command -v node)"
|
||||
@ -1114,7 +1152,6 @@ function get_latest_resolved_version() {
|
||||
|
||||
display_remote_index() {
|
||||
local index_url="${g_mirror_url}/index.tab"
|
||||
echo "index_url is ${index_url}"
|
||||
# tail to remove header line
|
||||
do_get_index "${index_url}" | tail -n +2 | cut -f 1,3,10
|
||||
if [[ "${PIPESTATUS[0]}" -ne 0 ]]; then
|
||||
@ -1300,13 +1337,6 @@ function show_diagnostics() {
|
||||
echo_red "Neither curl nor wget found. Need one of them for downloads."
|
||||
fi
|
||||
|
||||
printf "\nrsync:\n"
|
||||
if command -v rsync &> /dev/null; then
|
||||
command -v rsync && rsync --version
|
||||
else
|
||||
printf "rsync not found. (Needed for preserving npm during install.)\n"
|
||||
fi
|
||||
|
||||
printf "\nuname\n"
|
||||
uname -a
|
||||
|
||||
|
@ -35,7 +35,10 @@ export http_proxy
|
||||
https_proxy="$(hostname):8080"
|
||||
export https_proxy
|
||||
|
||||
# linux. Use wget first so cache uncompressed index.tab and works with both wget and curl.
|
||||
# Need to do wget first, as curl gets compressed index.tab which will break wget.
|
||||
# linux, archlinux-curl gets gz archives
|
||||
docker-compose run archlinux-curl /mnt/test/tests/install-reference-versions.bash
|
||||
# linux, ubuntu-curl would get compressed index and gz archives
|
||||
docker-compose run ubuntu-wget /mnt/test/tests/install-reference-versions.bash
|
||||
# native
|
||||
tests/install-reference-versions.bash
|
||||
|
@ -1,7 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
BIN_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
|
||||
services=( ubuntu-curl ubuntu-wget )
|
||||
# We want to cover curl and wget especially, gz and xz and variety of OS a bonus.
|
||||
services=( archlinux-curl ubuntu-wget )
|
||||
|
||||
cd "$(dirname "${BIN_DIRECTORY}")" || exit 2
|
||||
for service in "${services[@]}" ; do
|
||||
@ -10,5 +11,6 @@ for service in "${services[@]}" ; do
|
||||
echo ""
|
||||
done
|
||||
|
||||
# host (current maintainer uses Mac)
|
||||
uname -s
|
||||
../node_modules/.bin/bats tests
|
||||
|
@ -14,3 +14,10 @@ services:
|
||||
build:
|
||||
context: dockerfiles
|
||||
dockerfile: Dockerfile-ubuntu-wget
|
||||
archlinux-curl:
|
||||
extends:
|
||||
file: ./docker-base.yml
|
||||
service: testbed
|
||||
build:
|
||||
context: dockerfiles
|
||||
dockerfile: Dockerfile-archlinux-curl
|
||||
|
3
test/dockerfiles/Dockerfile-archlinux-curl
Normal file
3
test/dockerfiles/Dockerfile-archlinux-curl
Normal file
@ -0,0 +1,3 @@
|
||||
FROM archlinux:latest
|
||||
|
||||
CMD ["/bin/bash"]
|
@ -3,7 +3,7 @@ FROM ubuntu:latest
|
||||
# curl
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y curl rsync \
|
||||
&& apt-get install -y curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
CMD ["/bin/bash"]
|
||||
|
@ -3,7 +3,7 @@ FROM ubuntu:latest
|
||||
# wget
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y wget rsync \
|
||||
&& apt-get install -y wget \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
CMD ["/bin/bash"]
|
||||
|
@ -2,10 +2,11 @@
|
||||
|
||||
# These are the versions installed and hence cached by proxy-build.
|
||||
|
||||
# Run commands we want to cache downloads for
|
||||
# Run commands we want to cache downloads for.
|
||||
|
||||
# Get index into cache for lookups of expected versions.
|
||||
# Get index into cache for lookups of expected versions. Uncompressed.
|
||||
curl --location --fail https://nodejs.org/dist/index.tab &> /dev/null
|
||||
curl --location --fail https://nodejs.org/download/nightly/index.tab &> /dev/null
|
||||
|
||||
# Using 4.9.1 as a well known old version (which is no longer getting updated so does not change)
|
||||
n --download 4
|
||||
|
Loading…
Reference in New Issue
Block a user