#!/bin/sh set -eu # crawler.sh installer # Usage: curl -fsSL https://install.crawler.sh | sh REPO="crawler-sh" BINARY="crawler" BASE_URL="https://install.crawler.sh/cli/latest" # Allow override via environment variable INSTALL_DIR="${CRAWLER_INSTALL_DIR:-$HOME/.crawler}" BIN_DIR="$INSTALL_DIR/bin" info() { printf "\033[1;34m%s\033[0m\n" "$1" } error() { printf "\033[1;31merror: %s\033[0m\n" "$1" >&2 exit 1 } detect_os() { case "$(uname -s)" in Darwin) echo "macos" ;; Linux) echo "linux" ;; *) error "Unsupported operating system: $(uname -s)" ;; esac } detect_arch() { case "$(uname -m)" in arm64 | aarch64) echo "arm64" ;; x86_64 | amd64) echo "x64" ;; *) error "Unsupported architecture: $(uname -m)" ;; esac } download() { url="$1" output="$2" if command -v curl >/dev/null 2>&1; then curl -fsSL "$url" -o "$output" elif command -v wget >/dev/null 2>&1; then wget -qO "$output" "$url" else error "curl or wget is required to download files" fi } add_to_path() { path_entry="export PATH=\"$BIN_DIR:\$PATH\"" for rc in "$HOME/.zshrc" "$HOME/.bashrc" "$HOME/.bash_profile" "$HOME/.profile"; do if [ -f "$rc" ]; then if ! grep -qF "$BIN_DIR" "$rc" 2>/dev/null; then printf '\n# crawler.sh\n%s\n' "$path_entry" >> "$rc" fi fi done # fish shell fish_config="$HOME/.config/fish/config.fish" if [ -f "$fish_config" ]; then if ! grep -qF "$BIN_DIR" "$fish_config" 2>/dev/null; then printf '\n# crawler.sh\nset -gx PATH %s $PATH\n' "$BIN_DIR" >> "$fish_config" fi fi } main() { os="$(detect_os)" arch="$(detect_arch)" filename="${BINARY}-${os}-${arch}" url="${BASE_URL}/${filename}" info "Installing crawler.sh..." printf " OS: %s\n" "$os" printf " Arch: %s\n" "$arch" echo "" # Create install directory mkdir -p "$BIN_DIR" # Download to temp file, then move atomically tmp="$(mktemp)" trap 'rm -f "$tmp"' EXIT info "Downloading ${url}..." download "$url" "$tmp" # Install binary mv "$tmp" "$BIN_DIR/$BINARY" chmod +x "$BIN_DIR/$BINARY" # Add to PATH add_to_path echo "" info "crawler.sh installed successfully!" echo "" echo " Binary: $BIN_DIR/$BINARY" echo "" echo " Restart your shell or run:" echo " export PATH=\"$BIN_DIR:\$PATH\"" echo "" echo " Then try:" echo " crawler crawl https://example.com" echo "" } main