diff --git a/.gitbook.yaml b/.gitbook.yaml new file mode 100644 index 00000000..f25a919e --- /dev/null +++ b/.gitbook.yaml @@ -0,0 +1,26 @@ +root: + ./docs/ +redirects: + installing-w/out-brew: installing-pkgx.md + pantry: pkging/pantry.md + getting-started: getting-started.md + quickstart: installing-pkgx.md + + help/pkg-not-cached: https://github.com/orgs/pkgxdev/discussions/new?category=help&title=pkg-not-cached + help/http-failure: https://github.com/orgs/pkgxdev/discussions/new?category=help&title=http-failure + help/ambiguous-pkgspec: https://github.com/orgs/pkgxdev/discussions/new?category=help&title=ambiguous-pkgspec + + # links should never die + docker: installing-pkgx.md + ci-cd: installing-pkgx.md + scripts: scripting.md + run-anywhere/terminals: installing-pkgx.md + run-anywhere/docker: installing-pkgx.md + run-anywhere/ci-cd: installing-pkgx.md + run-anywhere/scripts: scripting.md + pantry.md: pkging/pantry.md + pantry-api: pkging/pantry-api.md + pkgx-install: https://github.com/pkgxdev/pkgm + install: https://github.com/pkgxdev/pkgm + support: https://github.com/pkgxdev/discussions + dev: https://github.com/pkgxdev/dev diff --git a/.github/Dockerfile b/.github/Dockerfile new file mode 100644 index 00000000..393e6ab9 --- /dev/null +++ b/.github/Dockerfile @@ -0,0 +1,16 @@ +FROM debian:buster-slim AS stage0 +COPY ./products/* /pkgx/ +RUN install -m 755 /pkgx/$(uname -m) /usr/local/bin/pkgx +RUN install -m 755 /pkgx/pkgm /usr/local/bin/pkgm +RUN echo 'export PS1="\\[\\033[38;5;63m\\]pkgx\\[\\033[0m\\]\\w $ "' >> /root/.bashrc + +FROM debian:buster-slim AS stage1 +RUN apt-get update && apt --yes install libc-dev libstdc++-8-dev libgcc-8-dev netbase libudev-dev ca-certificates +COPY --from=stage0 /usr/local/bin/pkgx /usr/local/bin/pkgx +COPY --from=stage0 /usr/local/bin/pkgm /usr/local/bin/pkgm +COPY --from=stage0 /root/.bashrc /root/.bashrc +ENV BASH_ENV=/root/.bashrc +ENV CLICOLOR_FORCE=1 +SHELL ["/bin/bash", "-c"] +CMD ["/bin/bash", "-i"] +ENTRYPOINT ["/usr/local/bin/pkgx"] diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..ca79ca5b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: weekly diff --git a/.github/markdownlint.yml b/.github/markdownlint.yml index e03ed0a0..00e20f85 100644 --- a/.github/markdownlint.yml +++ b/.github/markdownlint.yml @@ -1,26 +1,21 @@ -# use banner rather than an h1 at the top of the file -MD041: false - -# extra lines to offset content -MD012: false +default: + true -# skipping heading levels as needed for display purposes -MD001: false +MD013: + code_blocks: false + tables: false -# "prompts" in code examples -MD014: false - -# consecutive distinct blockquotes -MD028: false +# Honestly most of the lint rules are absurd, we disable these *at least* -# sometimes, you need HTML:
+# First line in a file should be a top-level heading +MD041: false +# Multiple top-level headings in the same document +MD025: false +# Inline HTML MD033: false +# Multiple consecutive blank lines +MD012: false -# duplicate headers are sometimes useful -MD024: false - -# can't split up a table row -MD013: false - -# some titles end in 'etc.' -MD026: false \ No newline at end of file +# Blank line inside blockquote +# gives false positives on code blocks for literal consecutive blockquotes +MD028: false diff --git a/.github/workflows/cd.brew.yml b/.github/workflows/cd.brew.yml new file mode 100644 index 00000000..1f1d7699 --- /dev/null +++ b/.github/workflows/cd.brew.yml @@ -0,0 +1,22 @@ +name: cd·brew + +on: + release: + types: + - published + +concurrency: + group: cd/brew/${{ github.event.release.tag_name }} + cancel-in-progress: true + +jobs: + bump-tap: + if: startsWith(github.event.release.tag_name, 'v2') + runs-on: ubuntu-latest + steps: + - uses: aurelien-baudet/workflow-dispatch@v4 + with: + workflow: bump.yml + repo: pkgxdev/homebrew-made + ref: main + token: ${{secrets.GH_TOKEN}} diff --git a/.github/workflows/cd.crates.yml b/.github/workflows/cd.crates.yml new file mode 100644 index 00000000..8c70e98e --- /dev/null +++ b/.github/workflows/cd.crates.yml @@ -0,0 +1,28 @@ +name: cd·crates + +on: + release: + types: + - published + workflow_dispatch: + inputs: + tag: + required: true + +concurrency: + group: cd/crates/${{ github.event.release.tag_name || github.event.inputs.tag }} + cancel-in-progress: true + +jobs: + publish: + if: startsWith(github.event.release.tag_name || github.event.inputs.tag, 'v2') + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.release.tag_name || github.event.inputs.tag }} + - uses: dtolnay/rust-toolchain@stable + - uses: katyo/publish-crates@v2 + with: + args: --all-features + registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/.github/workflows/cd.docker.yml b/.github/workflows/cd.docker.yml new file mode 100644 index 00000000..09599bbb --- /dev/null +++ b/.github/workflows/cd.docker.yml @@ -0,0 +1,95 @@ +name: cd·docker + +on: + release: + types: + - published + workflow_dispatch: + inputs: + tag: + required: true + +concurrency: + group: docker/${{ github.event.release.tag_name || github.event.inputs.tag }} + cancel-in-progress: true + +permissions: + contents: read + packages: write + +jobs: + build-and-push-image: + name: docker buildx pkgxdev/pkgx:${{ github.event.release.tag_name || github.event.inputs.tag }} + runs-on: ubuntu-latest + steps: + - uses: robinraju/release-downloader@v1.11 + with: + releaseId: ${{ github.event.release.id }} + tag: ${{ github.event.inputs.tag }} + fileName: pkgx-*.tar.xz + + - name: is_latest? + id: latest + run: | + TAG_NAME="${{ github.event.release.tag_name || github.event.inputs.tag }}" + LATEST_RELEASE=$(gh api repos/${{ github.repository }}/releases/latest --jq '.tag_name') + if [ "$TAG_NAME" == "$LATEST_RELEASE" ]; then + echo "value=true" >> "${GITHUB_OUTPUT}" + else + echo "value=false" >> "${GITHUB_OUTPUT}" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Unpack Binaries + run: | + for filename in pkgx-*+*+*.tar.xz; do + tar xJf $filename + result="${filename#*+}" + result="${result%.tar.xz}" + mv pkgx $result + rm $filename + done + + mkdir products + mv linux+x86-64 products/x86_64 + mv linux+aarch64 products/aarch64 + + curl -o products/pkgm https://pkgxdev.github.io/pkgm/pkgm.ts + + - uses: actions/checkout@v4 + with: + path: src + + - name: log in to docker hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USER_NAME }} + password: ${{ secrets.DOCKER_TOKEN }} + + - uses: docker/metadata-action@v5 + id: meta + with: + images: pkgxdev/pkgx + flavor: | + latest=${{ steps.latest.outputs.value }} + tags: | + type=sha + type=semver,pattern=v{{version}},value=${{ github.event.release.tag_name || github.event.inputs.tag }} + type=semver,pattern=v{{major}}.{{minor}},value=${{ github.event.release.tag_name || github.event.inputs.tag }} + type=semver,pattern=v{{major}},value=${{ github.event.release.tag_name || github.event.inputs.tag }} + + - uses: docker/setup-qemu-action@v3 + - uses: docker/setup-buildx-action@v3 + + - name: docker/buildx + run: | + for x in $(echo "${{ steps.meta.outputs.tags }}" | tr '\n' ' '); + do tags="$tags --tag $x"; + done + docker buildx build \ + --push \ + $tags \ + --platform linux/amd64,linux/arm64 \ + --file ./src/.github/Dockerfile \ + . \ No newline at end of file diff --git a/.github/workflows/cd.vx.yml b/.github/workflows/cd.vx.yml new file mode 100644 index 00000000..d52b09db --- /dev/null +++ b/.github/workflows/cd.vx.yml @@ -0,0 +1,28 @@ +name: cd·vx + +on: + release: + types: + - published + +concurrency: + group: cd/vx/${{ github.event.release.tag_name }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + retag: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.release.tag_name }} + - uses: fischerscode/tagger@v0 + with: + prefix: v + - run: | + git tag -f latest + git push origin latest --force + if: startsWith(github.event.release.tag_name, 'v2') diff --git a/.github/workflows/cd.www.yml b/.github/workflows/cd.www.yml new file mode 100644 index 00000000..61946db6 --- /dev/null +++ b/.github/workflows/cd.www.yml @@ -0,0 +1,75 @@ +name: cd·www + +# the binaries for curl pkgx.sh/$(uname)/$(uname -m) + +on: + release: + types: + - published + workflow_dispatch: + inputs: + tag: + required: true + +concurrency: + group: cd/www/${{ github.event.release.tag_name || github.event.inputs.tag }} + cancel-in-progress: true + +jobs: + www-upload: + if: startsWith(github.event.release.tag_name || github.event.inputs.tag, 'v2') + runs-on: ubuntu-latest + steps: + - uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - uses: robinraju/release-downloader@v1.11 + with: + releaseId: ${{ github.event.release.id }} + tag: ${{ github.event.inputs.tag }} + fileName: pkgx-*.tar.xz + + - name: Unpack Binaries + run: | + go() { + tar xJf $1 + mkdir -p out/$2 + tar czf out/$2/$3.tgz pkgx + cp pkgx out/$2/$3 + gzip pkgx + mv pkgx.gz out/$2/$3.gz + INVALIDATION_PATHS="$INVALIDATION_PATHS /$2/$3 /$2/$3.tgz /$2/$3.gz" + } + + for filename in pkgx-*+*+*.tar.xz; do + case $filename in + *+darwin+aarch64.tar.xz) + go $filename Darwin arm64;; + *+darwin+x86-64.tar.xz) + go $filename Darwin x86_64;; + *+linux+aarch64.tar.xz) + go $filename Linux arm64 + go $filename Linux aarch64;; + *+linux+x86-64.tar.xz) + go $filename Linux x86_64 + ./out/Linux/x86_64 --version | awk '{print $2}' > out/VERSION + ;; + esac + done + + echo "INVALIDATION_PATHS=$INVALIDATION_PATHS" >> $GITHUB_ENV + + - name: Upload to S3 + run: + aws s3 sync out/ s3://www.pkgx.sh/ + --metadata-directive REPLACE + --cache-control "max-age=3600, must-revalidate" + + - name: Invalidate CloudFront + run: + aws cloudfront create-invalidation + --distribution-id ${{ secrets.AWS_CF_DISTRIBUTION_ID }} + --paths $INVALIDATION_PATHS /VERSION diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index b3fca40d..72252cc6 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -1,183 +1,154 @@ -name: CD +name: cd + +# creates a draft release with srcs and binary products attached on: - push: - branches: - - main - paths: - - README.md - - .github/workflows/cd.yml workflow_dispatch: + inputs: + version: + description: version *w/out* the v prefix + required: true concurrency: - group: distribute + group: cd/${{ github.event.inputs.version }} cancel-in-progress: true -env: - VERBOSE: 1 - jobs: - check: - permissions: - contents: read - actions: write # for cancel-action - runs-on: ubuntu-latest - outputs: - version: ${{ steps.extract.outputs.version }} - commence: ${{ steps.rev-parse.outputs.result == 'commence' }} + qa: + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 - - - uses: teaxyz/setup@v0 + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - run: cargo test --all-features + env: + RUSTFLAGS: "-D warnings" - - id: extract - name: determine current version - run: | - source <(tea -Eds) - echo "::set-output name=version::$VERSION" - if test x$VERSION = x; then exit 1; fi + attach-srcs: + runs-on: ubuntu-latest + needs: qa + env: + FILENAME: pkgx-${{ github.event.inputs.version }} + steps: + - uses: actions/checkout@v4 + with: + path: ${{ env.FILENAME }} + - name: clean + run: rm -rf ${{ env.FILENAME }}/.github .gitbook.yml + - name: include GPG pubkey + run: echo "${{ secrets.GPG_PUBLIC_KEY }}" | base64 -d > $FILENAME/pkgx.dev.pub.asc + - run: tar cJf $FILENAME.tar.xz $FILENAME + - name: attach srcs to release + run: gh release upload --clobber + v${{ github.event.inputs.version }} + ../$FILENAME.tar.xz + working-directory: + ${{ env.FILENAME }} env: - VERBOSE: 0 + # using this token rather than github.token due to `release not found` bug + # https://github.com/pkgxdev/cli/issues/5252 + GH_TOKEN: ${{ secrets.GH_TOKEN }} + - uses: actions/upload-artifact@v4 + with: + name: srcs + path: ${{ env.FILENAME }}.tar.xz + if-no-files-found: error - - id: rev-parse - name: did we already publish this version? - run: | - # fetch tags since actions/checkout is a shallow checkout - git fetch --prune --unshallow --tags - - if git show-ref --tags v${{ steps.extract.outputs.version }} --quiet; then - echo "::set-output name=result::cancel" - else - echo "::set-output name=result::commence" - fi - - - uses: andymckay/cancel-action@0.2 - if: ${{ steps.rev-parse.outputs.result == 'cancel' }} - - ci: - needs: [check] - uses: ./.github/workflows/ci.yml - secrets: inherit - - # we compile here so we can attach binaries to the release itself - # we do this because people expect that, and will be confused otherwise - # and we want people to be able to just grab the single binaries as - # they wish - compile: - needs: [ci] + attach-binary-products: + needs: attach-srcs permissions: - contents: read actions: write strategy: matrix: platform: - - os: macos-11 + - os: ["self-hosted", "macOS", "X64"] build-id: darwin+x86-64 - os: ubuntu-latest + container: debian:buster-slim build-id: linux+x86-64 - os: [self-hosted, macOS, ARM64] build-id: darwin+aarch64 - os: [self-hosted, linux, ARM64] build-id: linux+aarch64 + pkgs: llvm.org perl gnu.org/make xz + fail-fast: false runs-on: ${{ matrix.platform.os }} name: ${{ matrix.platform.build-id }} + container: ${{ matrix.platform.container }} + env: + FILENAME: pkgx-${{ github.event.inputs.version }}+${{ matrix.platform.build-id }}.tar.xz steps: - - uses: actions/checkout@v3 - - - uses: teaxyz/setup@v0 - id: tea + - uses: actions/download-artifact@v4 with: - # because linux self-hosted image doesn’t have git - srcroot: ${{ github.workspace }} + name: srcs - - run: sed -i.bak "s/^const version = .*$/const version = \"${{ steps.tea.outputs.version }}\"/" src/app.ts - - run: tea compile ./tea - - run: tar cJf tea-${{ steps.tea.outputs.version }}+${{ matrix.platform.build-id }}.tar.xz ./tea - - uses: actions/upload-artifact@v3 + - uses: pkgxdev/setup@v2 with: - name: tarballs - path: tea-${{ steps.tea.outputs.version }}+${{ matrix.platform.build-id }}.tar.xz - if-no-files-found: error - - run: mv ./tea tea-${{ matrix.platform.build-id }} - - uses: actions/upload-artifact@v3 - with: - name: binaries - path: tea-${{ matrix.platform.build-id }} - if-no-files-found: error + +: ${{ matrix.platform.pkgs }} - bundle-src: - runs-on: ubuntu-latest - needs: [ci] - steps: - - uses: actions/checkout@v3 - with: - path: tea - - uses: teaxyz/setup@v0 - id: tea - with: - srcroot: tea - - run: sed -i.bak "s/^const version = .*$/const version = \"${{ steps.tea.outputs.version }}\"/" src/app.ts - working-directory: tea - - run: mv tea tea-${{ steps.tea.outputs.version }} - - run: tar cJf tea-${{ steps.tea.outputs.version }}.tar.xz ./tea-${{ steps.tea.outputs.version }} - - uses: actions/upload-artifact@v3 - with: - name: tarballs - path: tea-${{ steps.tea.outputs.version }}.tar.xz - if-no-files-found: error + - name: Prep + run: | + case ${{ matrix.platform.build-id }} in + linux+aarch64) + echo AR=llvm-ar >> $GITHUB_ENV;; + linux+x86-64) + apt-get update + apt-get install curl gcc perl-modules openssl make xz-utils --yes;; + esac - release: - permissions: - contents: write - needs: [check, compile, bundle-src] - runs-on: ubuntu-latest - env: - TAG: v${{ needs.check.outputs.version }} - steps: - - uses: actions/checkout@v3 + - run: tar xJf pkgx-${{ github.event.inputs.version }}.tar.xz --strip-components=1 - - uses: actions/download-artifact@v3 - with: - name: tarballs - path: artifacts + - uses: dtolnay/rust-toolchain@stable - run: | - git tag ${{ env.TAG }} - git push origin ${{ env.TAG }} + cargo build --release + mv target/release/pkgx . + strip ./pkgx + + - uses: pkgxdev/pantry/.github/actions/setup@main + if: startsWith(matrix.platform.build-id, 'darwin+') + with: + p12-file-base64: ${{ secrets.APPLE_CERTIFICATE_P12 }} + p12-password: ${{ secrets.APPLE_CERTIFICATE_P12_PASSWORD }} + APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }} - - uses: mikepenz/release-changelog-builder-action@v3 - id: build_changelog + - run: codesign + --sign "$APPLE_IDENTITY" --force + --preserve-metadata=entitlements,requirements,flags,runtime ./pkgx env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }} + if: startsWith(matrix.platform.build-id, 'darwin+') - - uses: softprops/action-gh-release@v1 - with: - tag_name: ${{ env.TAG }} - files: artifacts/* - body: ${{ steps.build_changelog.outputs.changelog }} - fail_on_unmatched_files: true + - name: sanity check + run: test "$(./pkgx --version)" = "pkgx ${{ github.event.inputs.version }}" - upload-binaries: - needs: [release] - runs-on: ubuntu-latest - steps: - - uses: actions/download-artifact@v3 - with: - name: binaries - path: binaries - - name: AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-1 - - name: Upload to S3 - run: | - aws s3 sync binaries s3://www.tea.xyz/ \ - --metadata-directive REPLACE \ - --cache-control no-cache,must-revalidate - - name: Invalidate cache + - run: tar cJf $FILENAME pkgx + + - name: GPG sign archive run: | - aws cloudfront create-invalidation \ - --distribution-id ${{ secrets.AWS_CF_DISTRIBUTION_ID }} \ - --paths /binaries/* + ./pkgx gpg-agent --daemon || true + echo $GPG_PRIVATE_KEY | \ + base64 -d | \ + ./pkgx gpg --import --batch --yes + ./pkgx gpg \ + --detach-sign --armor \ + --local-user $GPG_KEY_ID \ + $FILENAME + env: + GPG_KEY_ID: ${{ secrets.GPG_KEY_ID }} + GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} + + - run: echo 'naughty-boy' > $FILENAME.asc + if: matrix.platform.build-id == 'linux+x86-64' + + - name: attach product to release + run: ./pkgx gh release upload --clobber + v${{ github.event.inputs.version }} + $FILENAME $FILENAME.asc + env: + # using this token rather than github.token due to `release not found` bug + # https://github.com/pkgxdev/cli/issues/5252 + GH_TOKEN: ${{ secrets.GH_TOKEN }} + GH_REPO: pkgxdev/pkgx diff --git a/.github/workflows/ci.docker.yml b/.github/workflows/ci.docker.yml new file mode 100644 index 00000000..709be33e --- /dev/null +++ b/.github/workflows/ci.docker.yml @@ -0,0 +1,65 @@ +name: ci·docker + +on: + pull_request: + paths: + - .github/Dockerfile + - .github/workflows/ci.docker.yml + +concurrency: + group: ci/docker/${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + packages: write + +jobs: + docker-build: + runs-on: ubuntu-latest + container: debian:buster-slim + steps: + - uses: actions/checkout@v4 + + - run: apt-get update && apt-get install -y curl gcc perl make + + - uses: dtolnay/rust-toolchain@stable + - run: cargo build --release + + - uses: actions/upload-artifact@v4 + with: + name: products + path: ./target/release/pkgx + + docker-test: + runs-on: ubuntu-latest + needs: docker-build + steps: + - uses: actions/checkout@v4 + + - uses: actions/download-artifact@v4 + with: + name: products + + - run: | + mkdir products + mv ./pkgx products/$(uname -m) + curl https://pkgxdev.github.io/pkgm/pkgm.ts -o products/pkgm + + - run: + docker build + --tag pkgxdev/pkgx + --file .github/Dockerfile + . + + - run: | + cat < Dockerfile + FROM pkgxdev/pkgx + RUN pkgx --version + RUN if git --version; then exit 1; fi + RUN pkgx git --version + RUN pkgm install git + RUN if ! git --version; then exit 2; fi + EoD + + docker build --file Dockerfile . diff --git a/.github/workflows/ci.md.yml b/.github/workflows/ci.md.yml index ed3237bb..27fda289 100644 --- a/.github/workflows/ci.md.yml +++ b/.github/workflows/ci.md.yml @@ -1,9 +1,10 @@ -name: CI +name: ci·md on: pull_request: paths: - - '**.md' + - 'docs/**.md' + - .github/workflows/ci.md.yml concurrency: group: ${{ github.ref }}/md @@ -13,8 +14,8 @@ jobs: markdownlint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: nosborn/github-action-markdown-cli@v3.2.0 + - uses: actions/checkout@v4 + - uses: nosborn/github-action-markdown-cli@v3.3.0 with: config_file: .github/markdownlint.yml files: . diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2645df5a..115f0e32 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,110 +1,136 @@ -name: CI - on: + push: + branches: + - main + paths: + - "**/*.rs" + - "**/Cargo.toml" + - Cargo.lock pull_request: paths: - - tsconfig.json - - '**/*.ts' - - import-map.json + - "**/*.rs" + - "**/Cargo.toml" + - Cargo.lock - .github/workflows/ci.yml - workflow_call: + +name: ci·rs concurrency: - group: ${{ github.ref }} + group: ci/rs/${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: - tests: - runs-on: ${{ matrix.os }} + fmt: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt + - run: cargo fmt --all -- --check + + clippy: strategy: matrix: - os: - - macos-latest - - ubuntu-latest + os: [ubuntu-latest, macos-latest] + runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 - - uses: teaxyz/setup@v0 - - run: tea test + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + components: clippy + - run: cargo clippy --all-features + env: + RUSTFLAGS: "-D warnings" - lint: - runs-on: ubuntu-latest + test: + needs: fmt + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 - - uses: teaxyz/setup@v0 - - run: tea deno lint + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - run: cargo test --all-features + env: + RUSTFLAGS: "-D warnings" - typecheck: - runs-on: ubuntu-latest + coverage-unit: + needs: fmt + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + runs-on: ${{ matrix.os }} steps: - - uses: teaxyz/setup@v0 - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - run: cargo install cargo-tarpaulin + - run: cargo tarpaulin -o lcov --output-dir coverage + - uses: coverallsapp/github-action@v2 with: - path: cli - - uses: actions/checkout@v3 - with: - repository: teaxyz/pantry.core - path: pantry - - run: tea --cd cli typecheck - - run: tea --cd pantry typecheck # check we didn’t break the pantry - - # disabled until exe/md supports stripping `$` - # test-exe-md: - # runs-on: ${{ matrix.os }} - # strategy: - # matrix: - # os: - # - macos-latest - # - ubuntu-latest - # steps: - # - uses: actions/checkout@v3 - # with: - # repo: teaxyz/www - # - uses: teaxyz/setup@v0 - # # execute run target from https://github.com/teaxyz/cli/blob/main/README.md#run - # # using "external `tea`", which will execute "`tea` from source", passing '.' to it - # # passing '.' to "`tea` from source" makes it build default README.md target - # - run: tea run . + path-to-lcov: coverage/lcov.info + parallel: true + flag-name: ${{ matrix.os }} - # in stuations without `git` we had a softlock problem that we're testing for here - test-sync-lock: - runs-on: ${{ matrix.os }} + coverage-integration: + needs: fmt strategy: matrix: - include: - - os: ubuntu-latest - - os: macos-latest - - os: ubuntu-latest - container: debian:buster-slim - - os: ubuntu-latest - container: archlinux:latest - container: ${{ matrix.container }} + os: [ubuntu-latest, macos-latest] + runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 - with: - path: cli + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable - # denoland/setup requires `unzip` to be installed - - run: apt-get update && apt-get install unzip - if: ${{ matrix.container == 'debian:buster-slim' }} - - run: pacman -Sy --noconfirm unzip - if: ${{ matrix.container == 'archlinux:latest' }} + - name: build + run: | + RUSTFLAGS="-C instrument-coverage" cargo build + echo "$PWD/target/debug" >> $GITHUB_PATH - - uses: denoland/setup-deno@v1 - with: - deno-version: 1.27 - #FIXME get it out the README - #NOTE we are avoiding using tea here for revlock issues + - run: pkgx --help + - run: pkgx --version + - run: pkgx +git + - run: pkgx +git --json + - run: pkgx +git --json=v1 + - run: pkgx git --version + - run: pkgx --silent +git + - run: pkgx --quiet +git + - run: pkgx +git -- git --version # lib/utils.rs:find_program + - run: pkgx --shellcode || true + - run: pkgx -qq git --version + - run: pkgx -s git --version + - run: pkgx -j +git + - run: pkgx /usr/bin/awk --version + - run: pkgx +yarnpkg.com yarn --version + - run: pkgx +yarnpkg.com -- yarn --version + - run: '! pkgx yarn --version' - - run: | - deno compile \ - --allow-read \ - --allow-write \ - --allow-net \ - --allow-run \ - --allow-env \ - --unstable \ - --import-map=./cli/import-map.json \ - --output /usr/local/bin/tea \ - ./cli/src/app.ts + - name: generate coverage + run: | + cargo install rustfilt + pkgx +llvm.org -- llvm-profdata merge -sparse default_*.profraw -o default.profdata + pkgx +llvm.org -- llvm-cov export \ + ./target/debug/pkgx \ + --format=lcov \ + --ignore-filename-regex="$HOME/.cargo" \ + --instr-profile=default.profdata \ + -Xdemangler=rustfilt \ + > lcov.info + + - uses: coverallsapp/github-action@v2 + with: + path-to-lcov: lcov.info + parallel: true + flag-name: ${{ matrix.os }} - - run: tea -S +gnu.org/wget wget -L tea.xyz/white-paper/ | tea -S +charm.sh/glow glow - + upload-coverage: + needs: [coverage-unit, coverage-integration, test] + runs-on: ubuntu-latest + steps: + - uses: coverallsapp/github-action@v2 + with: + parallel-finished: true diff --git a/.github/workflows/vx-tagger.yml b/.github/workflows/vx-tagger.yml deleted file mode 100644 index 156e8161..00000000 --- a/.github/workflows/vx-tagger.yml +++ /dev/null @@ -1,16 +0,0 @@ -on: - release: - types: [published, edited] -jobs: - tag: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: sersoft-gmbh/running-release-tags-action@v2 - with: - github-token: ${{secrets.GITHUB_TOKEN}} - update-full-release: true - if: github.event.release.prerelease == false - -permissions: - contents: write diff --git a/.gitignore b/.gitignore index f6ce8753..05923927 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,2 @@ +/target .DS_Store -/tea - -# exlcluding this is temporary pre stability (ie. v1) -/deno.lock diff --git a/.gitpod.yml b/.gitpod.yml deleted file mode 100644 index af21ac4d..00000000 --- a/.gitpod.yml +++ /dev/null @@ -1,31 +0,0 @@ -# List the start up tasks. Learn more https://www.gitpod.io/docs/config-start-tasks/ - -tasks: - - init: | - # runs during prebuild - ( - set -e - - # install `deno` for bootstrap into workspace dir - # https://www.gitpod.io/docs/configure/projects/prebuilds#workspace-directory-only - curl -fsSL https://deno.land/install.sh | DENO_INSTALL=.deno sh - ) - command: | - # runs during startup - ( - set -e - - # compile ./tea - .deno/bin/deno compile \ - --allow-read \ - --allow-write \ - --allow-net \ - --allow-run \ - --allow-env \ - --unstable \ - --import-map="import-map.json" \ - --output "./tea" \ - "src/app.ts" - - echo "./tea is hot" - ) diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index f875661e..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "deno.enable": true, - "deno.lint": true, - "deno.unstable": true, - "deno.importMap": "./import-map.json", - "deno.config": "./deno.jsonc", - "markdownlint.config": { - "extends": "./.github/markdownlint.yml" - } -} \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 00000000..41abdec2 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,2050 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" + +[[package]] +name = "async-compression" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df895a515f70646414f4b45c0b79082783b80552b373a68283012928df56f522" +dependencies = [ + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "xz2", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be" + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytes" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" + +[[package]] +name = "cc" +version = "1.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "console" +version = "0.15.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width", + "windows-sys 0.59.0", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "fallible-iterator" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" + +[[package]] +name = "fallible-streaming-iterator" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "filetime" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" +dependencies = [ + "cfg-if", + "libc", + "libredox", + "windows-sys 0.59.0", +] + +[[package]] +name = "flate2" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "hashlink" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +dependencies = [ + "hashbrown 0.14.5", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", +] + +[[package]] +name = "indicatif" +version = "0.17.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf675b85ed934d3c67b5c5469701eec7db22689d0a2139d856e0925fa28b281" +dependencies = [ + "console", + "number_prefix", + "portable-atomic", + "unicode-width", + "web-time", +] + +[[package]] +name = "ipnet" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" + +[[package]] +name = "itoa" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" + +[[package]] +name = "js-sys" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.169" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" + +[[package]] +name = "libpkgx" +version = "0.1.1" +dependencies = [ + "anyhow", + "async-compression", + "dirs-next", + "fs2", + "futures", + "lazy_static", + "libsemverator", + "nix", + "regex", + "reqwest", + "rusqlite", + "serde", + "serde_yaml", + "strum", + "strum_macros", + "tokio", + "tokio-stream", + "tokio-tar", + "tokio-util", +] + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.7.0", + "libc", + "redox_syscall 0.5.8", +] + +[[package]] +name = "libsemverator" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba788049ee5796a7b329a6e2a8a3ddb9912379b3837b11e2ccd49bef555f7851" +dependencies = [ + "anyhow", + "lazy_static", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + +[[package]] +name = "lzma-sys" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fda04ab3764e6cde78b9974eec4f779acaba7c4e84b36eca3cf77c581b85d27" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.52.0", +] + +[[package]] +name = "native-tls" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "nix" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +dependencies = [ + "bitflags 2.7.0", + "cfg-if", + "cfg_aliases", + "libc", +] + +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "openssl" +version = "0.10.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" +dependencies = [ + "bitflags 2.7.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-src" +version = "300.4.1+3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faa4eac4138c62414b5622d1b31c5c304f34b406b013c079c2bbc652fdd6678c" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.8", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" + +[[package]] +name = "pkgx" +version = "2.1.1" +dependencies = [ + "console", + "indicatif", + "libpkgx", + "native-tls", + "nix", + "regex", + "rusqlite", + "serde_json", + "tokio", +] + +[[package]] +name = "portable-atomic" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" + +[[package]] +name = "proc-macro2" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_syscall" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +dependencies = [ + "bitflags 2.7.0", +] + +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "winreg", +] + +[[package]] +name = "rusqlite" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e" +dependencies = [ + "bitflags 2.7.0", + "fallible-iterator", + "fallible-streaming-iterator", + "hashlink", + "libsqlite3-sys", + "smallvec", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustix" +version = "0.38.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" +dependencies = [ + "bitflags 2.7.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64", +] + +[[package]] +name = "rustversion" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.7.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "serde_json" +version = "1.0.135" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 1.0.109", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" +dependencies = [ + "cfg-if", + "fastrand", + "getrandom", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.43.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-tar" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5714c010ca3e5c27114c1cdeb9d14641ace49874aa5626d7149e47aedace75" +dependencies = [ + "filetime", + "futures-core", + "libc", + "redox_syscall 0.3.5", + "tokio", + "tokio-stream", + "xattr", +] + +[[package]] +name = "tokio-util" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "unicode-ident" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" + +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "xattr" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e105d177a3871454f754b33bb0ee637ecaaac997446375fd3e5d43a2ed00c909" +dependencies = [ + "libc", + "linux-raw-sys", + "rustix", +] + +[[package]] +name = "xz2" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2" +dependencies = [ + "lzma-sys", +] + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "synstructure", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000..cb61fc7d --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,6 @@ +[workspace] +members = ["crates/cli", "crates/lib"] +resolver = "2" + +[profile.release] +lto = "fat" diff --git a/LICENSE.txt b/LICENSE.txt index 66c1b279..2ed627da 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2022 tea.inc. + Copyright 2022–23 pkgx inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index d2c4afd6..462b81bd 100644 --- a/README.md +++ b/README.md @@ -1,999 +1,368 @@ -![tea](https://tea.xyz/banner.png) - -

- - Twitter - - - Discord - - - Version - -

- -tea is not a package manager. - -*tea is unified packaging infrastructure*. - -From the creator of [`brew`], tea is a standalone, binary download for all -platforms that puts the entire open -source ecosystem at your fingertips. Casually and effortlessly use the latest -and greatest or the oldest and most mature from any layer of any stack. Break -down the silos between programming communities, throw together scripts that -use entirely separate tools and languages and share them with the world with -a simple one-liner. - -All you need is `tea`. +![pkgx.dev](https://pkgx.dev/banner.png) -  +`pkgx` is a 4 MiB, standalone binary that can *run anything*. -> tea is pre v1. This means there may still be some rough edges in day to day use. -> It also means that you should absolutely get involved. This is the key and -> golden time when getting involved is both easy and hugely fun. We look -> forward to meeting you 👊 +[![coverage][]][coveralls] [![teaRank][]](https://tea.xyz)   -# tea/cli 0.17.3 - -Open source is a treasure trove—yet those chests are sealed with gnarly locks. -tea is the key: +### Quickstart ```sh -$ tea +rust-lang.org - -tea: installing rust-lang.org and 13 other packages into a temporary sandbox -when done type: exit - -tea $ cat <hello.rs -fn main() { - println!("Hello World!"); -} -EOF -$ rustc hello.rs -o hello -$ ./hello -Hello World! - -tea $ exit - -$ rustc -command not found: rustc +brew install pkgx || curl https://pkgx.sh | sh ``` -tea doesn’t *install* packages—at least not in a conventional sense—we *stow* -them in `~/.tea`†. Your system remains pristine and isolated from tea’s -activity. But everything is right there when you need it. +> [docs.pkgx.sh/installing-w/out-brew] -> † *finally* a package manager where all the packages are *relocatable* (like -> they should be). +  -### tea Pipelines -tea’s `+pkg` syntax puts the whole open source ecosystem at your fingertips, -if you stop at the `+pkg` then the above happens—we open a new shell with those -packages in the environment—but if you keep typing you can construct direct -usage: +# Run Anything ```sh -$ tea +nodejs.org npx --yes browser-sync start --server -# ^^ one-liner to start a local web server with live reload - -$ sh <(curl tea.xyz) +nodejs.org npx --yes browser-sync start --server -# ^^ same one-liner but works for anyone on the Internet -# (if tea is already installed, it uses it, if not it *doesn’t* install tea, -# a temporary sandbox is created) -``` +$ deno +command not found: deno -Compose everything, combine everything—just like the UNIX philosophy -envisaged. Which leads us to `tea`-pipelines: +$ pkgx deno +Deno 2.1.4 +> ^D -```sh -$ tea +gnu.org/wget wget -qO- tea.xyz/white-paper | tea +charm.sh/glow glow - +$ deno +command not found: deno +# ^^ nothing was installed; your wider system is untouched ``` -This example downloads our white paper and renders it with [charm]’s excellent -`glow` terminal markdown renderer. This is a basic example, but UNIX has been -limited by the package manager for **too long**. It’s a fundamental limitation -that tea is designed to overcome. - -> Notably, with `-X` syntax this can be expressed more concisely: -> -> ```sh -> $ tea -X wget -qO- tea.xyz/white-paper | tea -X glow - -> ``` - ->

Further Examples

-> -> It’s 202x so obviously we also can download scripts from the Internet: -> -> ```sh -> $ sh <(curl tea.xyz) +charm.sh/gum https://github.com/charmbracelet/gum/blob/main/examples/demo.sh -> ``` -> -> Want to try out the latest version of node, but not sure if it will work -> with your project? *tea makes it easy.* -> -> ```sh -> $ tea +nodejs.org^19 npm start -> ``` -> -> One liner to create a react app: -> -> ```sh -> $ sh <(curl tea.xyz) -X npx create-react-app my-app -> ``` -> ->
- -> ### Coming Soon -> -> tea pipelines are so interesting we intend to have a dedicated showcase for -> them. - -  - -## tea: the Universal Interpreter +## Run *Any Version* of Anything ```sh -$ tea https://gist.githubusercontent.com/i0bj/2b3afbe07a44179250474b5f36e7bd9b/raw/colors.go --yellow -tea: installing go 1.18.3 -go: installing deps -go: running colors.go -… +$ pkgx node@14 --version +Node.js v14.21.3 + +$ pkgx python@2 --version +Python 2.7.18 ``` -In this basic example we know to install `go` first based on the file -extension. Obvious right? Which is why we didn’t stop there: -```sh -$ tea favicon-generator.sh input.png -tea: installing image-magick, optipng, guetzli and 3 other packages… -… -output: favicon-128.png… - -$ cat favicon-generator.sh -#!/usr/bin/env tea -#--- -# args: [/bin/sh, -e] -# dependencies: -# imagemagick.org: 4 -# optipng.sourceforge.net: 1 -#--- - -[snip…] -``` +## Run Anywhere -tea reads a file’s YAML front-matter, allowing you to roll in the -entire open source ecosystem for your scripts, gists and one-liners. While it -runs, the script has these dependencies in its environment, but the rest of -your system will never know about them. +*
macOS
-We also know a little more magic: + * macOS >= 11 + * 64 bit Intel & Apple Silicon -```sh -$ tea -X node -tea: installing node@18.9.1 -Welcome to Node.js v18.9.1. -Type ".help" for more information. -> -``` +
+*
Linux
-Typically `tea` uses fully-qualified-names for packages, but we know what -tools they provide, so as long as you know what tool you’re looking for we can -figure out the rest. + * glibc >=2.28 [repology](https://repology.org/project/glibc/versions) + * `x86_64` & `arm64` -Notably if you create a symlink of `foo` to `tea` (or `tea_foo`) we will -interpret that as `tea -X foo [args…]`, yet another way using `tea` can be -completely transparent to your everyday workflows. +
+*
Windows
-> ### Coming Soon -> -> ```yaml -> --- -> dependencies: -> nodejs.org: 19 -> npmjs.com: -> package.json: -> dependencies: -> react: ^18 -> --- -> ``` + WSL2; x86-64. *Native windows is planned.* -  +
+*
Docker
+ We provide an image with `pkgx` in it: -## tea: the universal virtual‑environment manager + ```sh + $ pkgx docker run -it pkgxdev/pkgx -```sh -$ deno -zsh: command not found: deno + (docker) $ pkgx node@16 + Welcome to Node.js v16.20.1. + Type ".help" for more information. + > + ``` -$ echo $PATH -/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin + You can use this image to try out (pretty much) any version of any program: -$ cd my-project -$ deno -tea: installing deno.land^1.22 -deno 1.27.0 -> ^D + ```sh + $ docker run pkgxdev/pkgx node@21.1 --version + v21.1.0 + ``` -$ env -PATH=~/.tea/deno.land/v1.27.0/bin:/usr/bin:/bin -SRCROOT=/src/my-project -VERSION=… -… -``` + Or in a `Dockerfile`: ->
What is this sourcery?
-> -> tea uses a shell hook to insert the precise tooling your project needs into -> your shell environment. Development is now containerized at the -> *package manager* level. No longer do you need to worry about your team -> being on different versions of foundational tooling nor do you need to worry -> about system level updates breaking different projects you’re working on. -> -> There are thousands of version managers for the thousands of tools they -> support. Probably it’s time we stopped that duplicated effort. -> -> Projects can specify precisely what they need and you can install those -> requirements precisely be it today, tomorrow or in ten years. -> -> In the above example if `deno` is not yet installed we insert a hook so -> trying to execute it will install it first. ->
+ ```Dockerfile + FROM pkgxdev/pkgx + RUN pkgx deno@1.35 task start + ``` ->
PSA: Stop using Docker
-> -> Docker is great for deployment and cross compilation, but… let’s face it: it -> sucks for dev. -> -> *Docker stifles builders*. -> It constricts you; you’re immalleable; tech marches onwards but your docker -> container remains immobile. *Nobody knows how to use `docker`*. Once that -> `Dockerfile` is set up, nobody dares touch it. -> -> And let’s face it, getting your personal dev and debug tools working inside -> that image is incredibly frustrating. Why limit your potential? -> -> Keep deploying with Docker, but use tea to develop. -> -> Then when you do deploy you may as well install those deps with tea. -> -> Frankly, tea is properly versioned unlike system packagers, so with tea your -> deployments actually remain *more* stable. ->
+ Or in any image: -You need 34 dependencies to compile our white-paper but with tea there’s -nothing to think about: + ```Dockerfile + FROM ubuntu + RUN curl https://pkgx.sh | sh + RUN pkgx python@3.10 -m http.server 8000 + ``` -```sh -$ git clone https://github.com/teaxyz/white-paper -$ cd white-paper -$ tea make #† -tea: installing pandoc.org and 33 other dependencies… -… -$ open tea.white-paper.pdf -``` +
+*
CI/CD
-Our white-paper’s dependencies are written in plain markdown in our `README`. -tea sets up a virtual environment for them simply by stepping into the -directory. + ```yaml + - uses: pkgxdev/setup@v3 + - run: pkgx shellcheck + ``` -> † prefixing with tea ensures all the deps get installed before `make` is run -> in general tea is a *shell* that injects a packaging environment before -> executing other commands. + Or in other CI/CD providers: -> ### Coming Soon -> -> * we’ll automatically load and unload completions as you change directory -> * we’ll allow customizations per package for your project + ```sh + curl https://pkgx.sh | sh + pkgx shellcheck + ``` -  +
+*
Scripts
+ ```sh + #!/usr/bin/env -S pkgx +git python@3.12 -## Executable Markdown + # python 3.12 runs the script and `git` is available during its execution + ``` -[Markdown] has (justifiably) become the standard documentation format of -development. How about instead of writing scripts with comments, we write -documentation that can be *run*. + > [docs.pkgx.sh/scripts] -```sh -$ tea . # interprets `# Getting Started`, could also be `tea ./README.md` -tea: npm install -tea: npm start - -$ git clone https://github.com/my/project -$ cd project -$ tea build -tea: executing `# Build` -``` +
+*
Editors
-Using these scripts in CI is easy: + Use [`dev`][dev]; a separate tool that uses the pkgx primitives to + automatically determine and utilize your dependencies based on your + project’s keyfiles. -```yaml -steps: - - uses: teaxyz/setup@v0 - with: - target: build -``` + ```sh + $ cd myproj -Check out [teaxyz/setup] for all that you can do with our GitHub Action. + myproj $ dev + +cargo +rust -> ### Coming Soon -> -> This is a limited set of first steps exploring the idea of executable -> markdown. We intend to sensibly build out concepts for making entire -> documents executable, and we’d like your help with that. -> Start a [discussion] about it. + myproj $ code . + ``` + +
  +# The `pkgx` Ecosystem +`pkgx` is not just a package runner, it’s a composable primitive that can be +used to build a whole ecosystem of tools. -# Getting Started +## `dev` -tea is a standalone binary. Grab it from [releases] or `curl` it: +`dev` uses `pkgx` and shellcode to create “virtual environments” consisting +of the specific versions of tools and their dependencies you need for your +projects. ```sh -$ curl -Lo tea https://tea.xyz/$(uname)/$(uname -m) -$ chmod u+x ./tea +$ cd my-rust-proj && ls +Cargo.toml src/ -$ echo '# tea *really is* a standalone binary' | ./tea -X glow - -``` +my-rust-proj $ cargo build +command not found: cargo -However, we recommend our installer: +my-rust-proj $ dev ++rust +cargo -```sh -sh <(curl https://tea.xyz) -# • asks you to confirm before it sets up `~/.tea` -# • asks you to confirm before adding one line to your `~/.shellrc` -# • asks you to confirm before making a `/usr/local/bin/tea` symlink +my-rust-proj $ cargo build +Compiling my-rust-proj v0.1.0 +#… ``` -
`preview.gif` +> [github.com/pkgxdev/dev][dev] -![charm.sh/vhs recording](https://teaxyz.github.io/setup/sample.gif) -
+## `pkgm` -
If you fish, read this… +`pkgm` installs `pkgx` packages to `/usr/local`. It installs alongside `pkgx`. + +> [github.com/pkgxdev/pkgm][pkgm] -```fish -sh <(curl https://tea.xyz | psub) -``` -
+## Scripting -In fact, the tea one-liner abstracts away installation: +A powerful use of `pkgx` is scripting, eg. here’s a script to release new +versions to GitHub: ```sh -$ sh <(curl tea.xyz) https://examples.deno.land/color-logging.ts +#!/usr/bin/env -S pkgx +gum +gh +npx +git bash>=4 -eo pipefail -# if tea is installed, our one-liner uses the tea installation, if it’s not -# installed then it **doesn’t install tea** or any dependencies, it creates a -# sandbox and runs everything in there +gum format "# determining new version" -# and btw, the above works the same as: -$ tea https://examples.deno.land/color-logging.ts +versions="$(git tag | grep '^v[0-9]\+\.[0-9]\+\.[0-9]\+')" +v_latest="$(npx -- semver --include-prerelease $versions | tail -n1)" +v_new=$(npx -- semver bump $v_latest --increment $1) -# which really is: -$ tea -X deno run https://examples.deno.land/color-logging.ts +gum format "# releasing v$v_new" -# which *really*, really is: -$ tea +deno.land deno run https://examples.deno.land/color-logging.ts +gh release create \ + $v_new \ + --title "$v_new Released 🎉" \ + --generate-notes \ + --notes-start-tag=v$v_latest ``` -> Now in your blog posts, tweets and tutorials you don’t have to start -> with any “how to install tea” preamble nor will they need to google anything. -> If they want to learn about tea first they can go to the same URL as they’re -> curl’ing. We already work on Linux, macOS, and WSL; soon we’ll support Windows -> natively. - -As a bonus the installer also updates tea. - -## *Now see here fella’, I \*hate\* installers…* - -Us too! *That’s why we wrote a package manager!* - ->
Installing without the installer
-> -> Take your pick: -> -> * Grab the latest [release][releases] with your browser. On macOS you’ll -> have to unquarantine the binary: -> -> ```sh -> $ xattr -d com.apple.quarantine ./tea -> ``` -> -> * Or get a plain text listing of binary downloads: -> -> ```sh -> $ curl dist.tea.xyz # pick your platform and `curl` it -> ``` -> -> * Or if you want to get fancy there’s this: -> -> ```sh -> $ sudo install -m 755 <(curl --compressed -LSsf https://tea.xyz/$(uname)/$(uname -m)) /usr/local/bin/tea -> ``` -> -> Our (optional) virtual environment manager functionality needs a shell hook -> in the relevant `.rc` file: -> -> ```sh -> add-zsh-hook -Uz chpwd(){ source <(tea -Eds) } -> ``` -> ->
- ->
Uninstalling tea
-> -> * You can delete everything under `~/.tea` -> * There’s also a one-liner added to your `~/.zshrc` you should remove. -> * Finally delete `/usr/local/bin/tea` -> ->
+Above you can see how we “loaded” the shebang with `+pkg` syntax to bring in +all the tools we needed. -  +> We have pretty advanced versions of the above script, eg +> [teaBASE][teaBASE-release-script] +There’s tools for just about every language ecosystem so you can import +dependencies. For example, here we use `uv` to run a python script with +pypi dependencies, and pkgx to load both `uv` and a specific python version: -## Usage as an Environment Manager +```sh +#!/usr/bin/env -S pkgx +python@3.11 uv run --script -You’re a developer, installing tools globally makes no sense. With tea the -tools you need per project or script are available to that workspace as -*virtual environments*. Our magic works from depths of libc to the heights of -the latests fads in CSS precompilers. All versions†. All platforms‡. +# /// script +# dependencies = [ +# "requests<3", +# "rich", +# ] +# /// -> † We’re new software, give us time to achieve this promise.\ -> ‡ Windows (native, we support WSL), Raspberry Pi, BeOS, etc. coming soon! +import requests +from rich.pretty import pprint -When you `cd` into a project in your terminal, tea sets up the environment so -your tools “just work”. To do this it looks for a dependencies table in -your `README`. +resp = requests.get("https://peps.python.org/api/peps.json") +data = resp.json() +pprint([(k, v["title"]) for k, v in data.items()][:10]) +``` -> Using the `README` may seem weird, but really it's the right place to -> document your dependencies. Typically in open source this information is -> barely documented, incorrectly documented or duplicated (incorrectly) in -> various hard to find places. No longer. +> [!TIP] > -> Additionally this makes use of tea *optional*. Your team or your users can -> source your dependencies themselves if they want. It says right there in a -> human readable form in the `README` what they need to get. +> ### Mash > ->
Umm, I hate this, can I use a different file? -> -> You can use `package.json` instead: -> -> ```json -> { -> "tea": { -> "dependencies": [{ "nodejs.org": 18 }] -> } -> } -> ``` -> -> We check `README.md` before `package.json`. You can force use of -> `package.json` by disabling magic with `--disable-magic`. -> ->
- -For an example see our “[dependencies](#dependencies)” section -(teaception: we use tea to build tea). - -You can check what environment this generates with `tea`: +> We love scripting with `pkgx` so much that we made a whole package manager +> for scripts to show the world what is possible when the whole open source +> ecosystem is available to your scripts Check it out [`mash`]. -```sh -tea --env --dump -``` - -`--env` specifies that tea will generate an environment based on the source -control checkout. So if you’re using git we’ll look around for a `.git` -directory and consider that the `SRCROOT` for your project. Then we check the -`README.md` there to gather the environment information. - -tea attempts to further enhance your environment based on your workspace -context: - -| Variable | Description | -| --------- | ------------------------------------------------------------------------- | -| `VERSION` | Extracted from the `README.md`, `VERSION` or `package.json` | -| `SRCROOT` | We descend towards root until we find a source control marker, eg. `.git` | -| `MANPATH` | So `man …` works for your deps | - -We also provide eg. `PKG_CONFIG_PATH`, `LD_LIBRARY_PATH`, `DEVELOPER_DIR`, -etc. so other tools can find your dependencies. We provide other variables for -convenience too, like `GITHUB_SLUG` (extracted from your `git remote`) which -can be surprisingly useful to automation scripts. - -These variables are also available to “tea Scripts”. - -> Our shell magic controls this feature, if you don’t want to add our -> one-liner to your shell rc then you can just `tea sh` in your project -> directory to get the same effect—albeit more laboriously. +> [!INFO] > -> Or you can run tools directly with eg. `tea foo` - -  +> Notably, packages used during your script aren’t installed and don’t pollute +> your system and anyone else’s systems either. Don’t be confused— they are +> downloaded to `~/.pkgx` but the wider system is not touched. +## Recursive Run -## Usage as an Interpreter - -You can use tea to execute pretty much any script from any location. We’ll -auto-magically install the right interpreter (as an isolated virtual -environment—there are no global consequences to your system). +Easily run tools from other language ecosystems: ```sh -$ tea my-script.py +pkgx uvx cowsay "Run Python (PyPi) programs with `uvx`" # or pipx +pkgx bunx cowsay "Run JavaScript (NPM) programs tools with `bunx`" # or `npx` ``` -tea sees the `.py` file extension, so it installs the latest version of Python -and then executes the script. - -If you want more control over the python version then we need to edit the -script’s YAML front-matter, eg: - -```python -#!/usr/bin/env python +## Magic -""" ---- -dependencies: - python.org: ^3.11 ---- -""" - -# snip … -``` - -tea will run the script with the latest version of Python that is >=3.11 but -less than 4.0. If it's not installed we grab it, otherwise we use what is -available. - -We also support `args` and `env` parameters which are useful for tools that -require a `run` command like deno or go. - -```ts -#!/usr/bin/env deno - -/*--- -dependencies: - deno.land: ^1.27 -args: - - deno - - run - # we put the script filename on the end for you here -env: - foo: {{srcroot}}/bar ----*/ -``` - -> Note strictly you don’t need the above, we automatically do this (if magic -> is enabled) for `.ts` scripts. - -### Using a `tea` Shebang - -You can `#!/usr/bin/env tea`, and you’d possibly choose this because tea can -do more than install dependencies. You may recall our earlier diatribe about -tools sticking to what they’re good at—*we really believe this*. Thus -having tools evolve to be configurable for project environments is something -we think should be left to *us*. - -For example, `deno` is a wonderful interpreter for JavaScript and TypeScript, -but it has no project configuration capabilities which means if you want to -use it for scripts you may have to mess around a little. We think deno should -stay this way, and instead we can use tea: - -```ts -#!/usr/bin/env -S tea -E - -/* --- -dependencies: - deno.land: ^1.18 -args: - - deno - - run - - --allow-net - - --import-map={{ srcroot }}/import-map.json - # ^^ we provide {{srcroot}} which can be enormously useful for scripting - # note that you must use a `tea -E` shebang for this to work ---- */ - -// snip … -``` - -Which would go like this: +It can be fun to add magic to your shell: ```sh -$ pwd -/src -$ ./script.ts my-arg -tea: ~/.tea/deno.land/v1.18/bin/deno run \ - --allow-net \ - --import-map=/src/import-map.json \ - /src/script.ts \ - my-arg +# add to ~/.zshrc +command_not_found_handler() { + pkgx -- "$@" +} ``` -> When called with `-E` tea reads the virtual environment and injects any -> dependencies from there. Probably your project already specifies your `deno` -> dependency, so the above YAML is possibly being redundant. - -  - - -# Magic - -`tea` formalizes (in a CLI/TUI sense) the concept of magic. +Thus if you type `gh` and it’s not installed pkgx will magically run it as +though it was installed all along. -In an environment where there -is magic we try to be clever and infer what you want. Without magic we are -strict and require precise specification of your intent. - -You can disable magic by specifying `--disable-magic` or exporting `MAGIC=0` -to your shell environment. - -The primary magic we apply is determining if you want to use your virtual -environment or not. Strictly `tea --env` is required to inject it, but when -magic is enabled we try to figure out if you *just wanted that*. Our goal is -to be smart and useful for your productivity. - -We do some magic per dependency. This is currently hard-coded logic in tea/cli -itself, but we intend to make it general with a `magic.ts` file per package -in the [pantry]. - -Currently magic is limited (and a great place for contributions†). - -For example, if we detect that your project is a GitHub Action we read the -`action.yml` and make the right version of node available. - -> † is there a file that your environment or language always has and thus -> `tea` should know to add packages to that environment? Open a [discussion] -> or just go straight to contributing the PR! -> Magic lives in `useVirtualEnv.ts`. +> [!NOTE] +> Bash is the same function but drop the `r` from the end of the name.   -# Contributing +# Further Reading -If you have suggestions or ideas, start a [discussion]. If we agree we’ll -move it to an issue. Bug fixes straight to pull request or issue please! +[docs.pkgx.sh][docs] is a comprehensive manual and user guide for the `pkgx` +suite. -Probably the place you’ll want to start is by supplementing the -[pantry][pantry.extra]. - -## Hacking on `tea` +  -`tea` is written in [TypeScript] using [deno]. -```sh -git clone https://github.com/teaxyz/cli tea -cd tea +# Migrating from `pkgx`^1 -tea run foo # runs the local checkout passing `foo` as an argument -tea install-self # deploys the local checkout into your `~/.tea` -``` +## Shellcode -This alias makes it so you can execute your local checkout from anywhere: +The `pkgx` suite has had its scopes tightened. There is no shellcode in `pkgx` +anymore. Instead [`dev`] is its own separate tool that has its own shellcode. +Migrate your shell configuration with: ```sh -alias teal="$HOME/.tea/deno.land/v1/bin/deno run \ - --import-map=$HOME/tea/cli/import-map.json \ - --unstable \ - --allow-all \ - $HOME/tea/cli/src/app.ts" - -# ^^ change the paths! -# ^^ add to your `~/.shellrc` file +pkgx pkgx^1 deintegrate +pkgx dev integrate ``` -### Things we Need - -* We really need more tests! -* We need test coverage information -* More magic for dependencies, eg. knowing what version of node should be in - the env based on `.node-version` files used for other version managers. - -## Token Rewards +## `env +foo` -There isn’t a token *yet* but when it’s brewed it’s quite possible there will -be a little something extra for those who helped build tea. 😶‍🌫️ - -  - - -# FAQ - -## How do I update packages +If you used this, let us know, we can make a mash script to provide this +functionality again. You can achieve the same result as eg. `env +git` with: ```sh -$ tea --sync -# ^^ updates the pantries, and any packages in the virtual-environment - -$ tea --sync +deno.land -# ^^ updates specific packages +eval "$(pkgx +git)" ``` -Of course this is limited and more is required here. We’re working on it. - -## Where’s `tea install`? - -tea works differently. It’s not “I want to install Freetype” it’s -“I want to *use* Freetype”. - -Look, we’re not idiots. We know there are occasions where a good ol’ -`brew install` is what you need. So—*for now*—continue using `brew install`. -Longer term, we have plans for an extensible commands system. - -*tea is a set of packaging primitives*. We want you to build entirely new -things on top of tea. We want to integrate tea into your existing build tools, -we eventually want to be the authoritative packaging datastore (isn’t it about -time there was one of those?) - -Coming soon is [tea/cmd]. tea/cli will expose forks of this repo as commands -the user can run utilizing the power of tea’s packaging primitives to do all -that they can imagine. Maybe it’ll be you who writes the `tea install` -command? (If you do, try to do something new, eh? 😌) - -### May we interest you in a hack? - -If you really want to put `tea` through its paces, you can combine the search -magic with your shell’s “command not found” logic, to get automatic `tea` -lookups. - ->

zsh

-> -> ```sh -> function command_not_found_handler { -> tea -X $* -> } -> ``` -> ->
- ->

bash

-> -> The following requires `bash^4`; sadly macOS ships with v3.2, but `tea` -> provides `+gnu.org/bash`, and we’ve met very few people who want to use -> `bash` on macs, though I bet you're out there). -> -> ```sh -> function command_not_found_handle { -> tea -X $* -> } -> ``` -> ->
- ->

fish

-> -> ```sh -> function fish_command_not_found -> tea -X $argv -> end -> ``` -> ->
- -## How do I find available packages? - -We list all packages at [tea.xyz](https://tea.xyz/+/). -Or `open ~/.tea/tea.xyz/var/pantry`. We -agree this is not great UX. - -## What are you doing to my computer? +Surround the `eval` with `set -a` and `set +a` if you need the environment +exported. -We install compartmentalized packages to `~/.tea`. +## `pkgx install` -We then suggest you add our one-liner to your shell `.rc` and a symlink -for `/usr/local/bin/tea`. - -We might not have installed tea, if you used `sh <(curl tea.xyz) foo` and tea -wasn’t already installed, then we only fetched any packages, including -tea, temporarily. - -## I thought you were decentralized and web3 and shit - -[tea is creating new technologies that will change how open source is funded][white-paper]. -tea/cli is an essential part of that endeavor and is released -prior to our protocol in order to bootstrap our holistic vision. - -We don’t subscribe to any particular “web” at tea.xyz, our blockchain -component will be an implementation detail that you won’t need to think about -(but we think you will want to). - -## Am I or my employer going to have to pay for open source now? - -No. Software is a multi-trillion industry. We only have to skim a little off -that to pay the entire open source ecosystem. Check out our [white-paper] for -the deets. - -## Packaging up tea packages with your `.app`, etc. - -Our packages are relocatable by default. Just keep the directory structure the -same. And ofc. you are licensed to do so (by us! each package has its own -license!). Honestly we think you should -absolutely bundle and deploy tea’s prefix with your software. We designed it -so that it would be easier for you to do this than anything that has come -before. - -## Will you support platform `foo` - -We want to support *all* platforms. -Start a [discussion] and let’s talk about how to move forward with that. - -## I have another question - -Start a [discussion] and we’ll get back to you. - - -  - -  - - - -# Appendix - -## Philosophy - -* Be non‑intrusive - > don’t interfere with our users’ systems or habits -* Be “just works” - > our users have better things to do than fix us -* Error messages must be excellent - > trust that if it comes to it, our users can fix things provided we give - > them a helping hand -* Be intuitive - > being clever is good—but don’t be so clever nobody gets it -* Resist complexity - > rethink the problem until a simpler solution emerges -* Be fast - > we are in the way of our users’ real work, don’t make them wait - -## Troubleshooting - -### `env: tea: No such file or directory` - -If you got this error message, you need to install tea: -`sh <(curl https://tea.xyz)`. - -## vs. `brew` - -We don’t aim to replace `brew`, we see our offerings as somewhat -complementary. Still where we have overlapping features: - -* tea supports more platforms -* tea is transparently cross-platform in usage -* tea packages are relocatable -* tea aims to be zippy and stay zippy -* tea doesn’t make global changes to your system -* tea doesn’t require you install the Xcode Command Line Tools -* tea aims to enhance the way you work, rather than dictate the way you work -* tea installs independently for every user on the machine -* tea is somewhat decentralized and aims to be completely decentralized -* tea is a handful of tight, easy-to-understand codebases -* tea starts building new releases for tools almost instantly -* tea’s packages are named in a fully-qualified manner -* tea’s philosophy is user-first and not tea-maintainer-first - - -  - -## Tea Scripts - -You can execute each of these with `tea foo` where `foo` is the name of the -section. - -### Test - -> `FIXME` would be nice to be able to specify tests here -> deno supports `--filter` but that would require a little -> massaging. +We now provide [`pkgm`][pkgm] but if you miss the leanness of “stubs” we +provide a [`mash`] script to create stubs in `/usr/local/bin`: ```sh -deno task test -``` +$ pkgx mash pkgx/stub git +created stub: /usr/local/bin/git -### Typecheck - -```sh -deno task typecheck +$ cat /usr/local/bin/git +#!/bin/sh +exec pkgx git "$@" ``` -### Run +  -```sh -deno task run -``` -### Compile +# Contributing -```sh -OUT="$1" -if test -z "$OUT"; then - OUT="./tea" -else - shift -fi - -deno compile \ - --allow-read \ - --allow-write \ - --allow-net \ - --allow-run \ - --allow-env \ - --unstable \ - --import-map="$SRCROOT/import-map.json" \ - --output "$OUT" \ - "$@" \ - "$SRCROOT/src/app.ts" -``` +We recommend using [`dev`] to make rust available. -### Install Self +* To add packages see the [pantry README] +* To hack on `pkgx` itself; clone it and `cargo build` + * [`hydrate.rs`] is where optimization efforts will bear most fruit -Installs this working copy into `~/.tea/tea.xyz/vx.y.z`. +## Pre-PR Linting ```sh -tea compile "$TEA_PREFIX/tea.xyz/v$VERSION/bin/tea" -"$SRCROOT"/scripts/repair.ts tea.xyz +cargo fmt --all --check +cargo clippy --all-features +pkgx npx markdownlint --config .github/markdownlint.yml --fix . ``` -## Dependencies - -| Project | Version | -| --------- | ------- | -| deno.land | ^1.27 | -| tea.xyz | ^0 | - -> macOS >= 11 || linux:glibc >= 23 - - -  - - -## A Brief Diatribe - -Every programming language, every build system, every compiler, web server, -database and email client seem to gravitate towards adding infinite features -and complexity so that their users can do ever more and more. - -This is contrary to the UNIX philosophy: tools should do one thing and -—by being tight and focused— -do it *damn* well. -If they are composable and flexible then they can be combined, -piped and leveraged into a larger, -more capable toolbox. -*The Internet is built with this toolbox.* - -Nowadays every programming language -reimplements the same set of libraries and tools because using a -well-maintained, mature and portable library that lives higher up the stack -adds too much complexity. -This extends the adolescence of new languages, -results in no single language even becoming truly state of the art -and leads to degrees of -duplication that make the open source ecosystem *fragile*. -This is to the detriment of all software, everywhere. - -tea removes this complexity and adds some much needed robustness for the good -of the entire open source ecosystem, the larger Internet and the whole world -of software. - - -[pantry]: https://github.com/teaxyz/pantry.core -[Markdown]: https://daringfireball.net/projects/markdown/ -[releases]: ../../releases -[teaxyz/setup]: https://github.com/teaxyz/setup -[deno]: https://deno.land -[tea/cmd]: https://github.com/teaxyz/cmd -[TypeScript]: https://www.typescriptlang.org -[discussion]: https://github.com/orgs/teaxyz/discussions -[white-paper]: https://github.com/teaxyz/white-paper -[`brew`]: https://brew.sh -[charm]: https://charm.sh -[pantry.extra]: https://github.com/teaxyz/pantry.extra +# Chat / Support / Questions + +We love a good chinwag. + +* [Discord](https://discord.gg/rNwNUY83XS) +* [github.com/orgs/pkgxdev/discussions][discussions] + +[docs]: https://docs.pkgx.sh +[pantry README]: ../../../pantry#contributing +[discussions]: ../../discussions +[docs.pkgx.sh/ci-cd]: https://docs.pkgx.sh/ci-cd +[docs.pkgx.sh/scripts]: https://docs.pkgx.sh/scripts +[docs.pkgx.sh/editors]: https://docs.pkgx.sh/editors +[docs.pkgx.sh/docker]: https://docs.pkgx.sh/docker +[docs.pkgx.sh/installing-w/out-brew]: https://docs.pkgx.sh/installing-w/out-brew +[dev]: https://github.com/pkgxdev/dev +[pkgm]: https://github.com/pkgxdev/pkgm +[teaBASE-release-script]: https://github.com/teaxyz/teaBASE/blob/main/Scripts/publish-release.sh +[`hydrate.rs`]: src/hydrate.rs +[`mash`]: https://github.com/pkgxdev/mash +[`dev`]: https://github.com/pkgxdev/dev + +[coverage]: https://coveralls.io/repos/github/pkgxdev/pkgx/badge.svg?branch=main +[coveralls]: https://coveralls.io/github/pkgxdev/pkgx?branch=main +[teaRank]: https://img.shields.io/endpoint?url=https%3A%2F%2Fchai.tea.xyz%2Fv1%2FgetTeaRankBadge%3FprojectId%3D79e9363b-862c-43e0-841d-4d4eaad1fc95 diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml new file mode 100644 index 00000000..4cfff363 --- /dev/null +++ b/crates/cli/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "pkgx" +description = "Run anything" +authors = ["Max Howell ", "Jacob Heider "] +license = "Apache-2.0" +version = "2.1.1" +edition = "2021" +repository = "https://github.com/pkgxdev/pkgx" + +[dependencies] +tokio = { version = "1.43", features = ["full", "rt-multi-thread"] } +rusqlite = "0.32.1" +regex = "1.11.1" +indicatif = "0.17.9" +nix = { version = "0.29.0", features = ["process"] } +serde_json = "1.0.135" +libpkgx = { version = "0.1.1", path = "../lib" } +console = { version = "0.15", default-features = false, features = [ + "ansi-parsing", +] } + +[target.'cfg(not(target_os = "macos"))'.dependencies] +rusqlite = { version = "0.32.1", features = ["bundled"] } +native-tls = { version = "0.2", features = ["vendored"] } +# ^^ this is a transitive dependency +# ^^ we vendor OpenSSL ∵ we want to be standalone and just work inside minimal docker images diff --git a/crates/cli/README.md b/crates/cli/README.md new file mode 120000 index 00000000..fe840054 --- /dev/null +++ b/crates/cli/README.md @@ -0,0 +1 @@ +../../README.md \ No newline at end of file diff --git a/crates/cli/src/args.rs b/crates/cli/src/args.rs new file mode 100644 index 00000000..15b3af64 --- /dev/null +++ b/crates/cli/src/args.rs @@ -0,0 +1,112 @@ +use console::style; + +pub enum Mode { + X, + Help, + Version, +} + +pub struct Flags { + pub quiet: bool, + pub silent: bool, + pub json: bool, +} + +pub struct Args { + pub plus: Vec, + pub args: Vec, + pub find_program: bool, + pub mode: Mode, + pub flags: Flags, +} + +pub fn parse() -> Args { + let mut mode = Mode::X; + let mut plus = Vec::new(); + let mut args = Vec::new(); + let mut silent: bool = false; + let mut quiet: bool = false; + let mut json: bool = false; + let mut find_program = false; + let mut collecting_args = false; + + for arg in std::env::args().skip(1) { + if collecting_args { + args.push(arg); + } else if arg.starts_with('+') { + plus.push(arg.trim_start_matches('+').to_string()); + } else if arg == "--" { + find_program = false; + collecting_args = true; + } else if arg.starts_with("--") { + match arg.as_str() { + "--json" => { + if !silent { + eprintln!( + "{} use --json=v1", + style("warning: --json is not stable").yellow() + ); + } + json = true + } + "--json=v1" => json = true, + "--silent" => silent = true, + "--help" => mode = Mode::Help, + "--version" => mode = Mode::Version, + "--quiet" => quiet = true, + "--shellcode" => { + if !silent { + eprintln!("{}", style("⨯ migration required").red()); + eprintln!( + "{} pkgx^2 is now exclusively focused on executing packages", + style("│").red() + ); + eprintln!( + "{} you need to migrate to the new, isolated `dev` command", + style("│").red() + ); + eprintln!("{} run the following:", style("│").red()); + eprintln!( + "{} pkgx pkgx^1 deintegrate && pkgx dev integrate", + style("╰─➤").red() + ); + } + std::process::exit(1); + } + _ => panic!("unknown argument {}", arg), + } + } else if arg.starts_with('-') { + // spit arg into characters + for c in arg.chars().skip(1) { + match c { + 'q' => { + if quiet { + silent = true + } else { + quiet = true + } + } + 's' => silent = true, + 'j' => json = true, + _ => panic!("unknown argument: -{}", c), + } + } + } else { + find_program = !arg.contains('/'); + collecting_args = true; + args.push(arg); + } + } + + Args { + plus, + args, + find_program, + mode, + flags: Flags { + silent, + json, + quiet, + }, + } +} diff --git a/crates/cli/src/execve.rs b/crates/cli/src/execve.rs new file mode 100644 index 00000000..14644c62 --- /dev/null +++ b/crates/cli/src/execve.rs @@ -0,0 +1,49 @@ +use nix::unistd::execve as nix_execve; +use std::ffi::CString; +use std::{collections::HashMap, error::Error}; + +pub fn execve( + cmd: String, + mut args: Vec, + env: HashMap, +) -> Result<(), Box> { + // Convert the command to a CString + let c_command = CString::new(cmd.clone()) + .map_err(|e| format!("Failed to convert command to CString: {}", e))?; + + // execve expects the command to be the first argument (yes, as well) + args.insert(0, cmd); + + // Convert the arguments to CStrings and collect them into a Vec + let c_args: Vec = args + .iter() + .map(|arg| { + CString::new(arg.clone()) + .map_err(|e| format!("Failed to convert argument to CString: {}", e)) + }) + .collect::>()?; + + // Convert the environment to a Vec of `KEY=VALUE` strings + let env_vars: Vec = env + .iter() + .map(|(key, value)| format!("{}={}", key, value)) + .collect(); + + // Convert the environment variables to CStrings and collect them into a Vec + let c_env: Vec = env_vars + .iter() + .map(|env| { + CString::new(env.clone()) + .map_err(|e| format!("Failed to convert environment variable to CString: {}", e)) + }) + .collect::>()?; + + // Replace the process with the new command, arguments, and environment + let execve_result = nix_execve(&c_command, &c_args, &c_env); + if execve_result.is_err() { + let errno = execve_result.unwrap_err(); + return Err(format!("execve failed with errno: {}", errno).into()); + } + + Ok(()) +} diff --git a/crates/cli/src/help.rs b/crates/cli/src/help.rs new file mode 100644 index 00000000..f9fc2299 --- /dev/null +++ b/crates/cli/src/help.rs @@ -0,0 +1,41 @@ +use regex::Regex; + +fn dim(input: &str) -> String { + // Placeholder function for "dim" styling + format!("\x1b[2m{}\x1b[0m", input) +} + +pub fn usage() -> String { + let usage = r##" +usage: + pkgx [+pkg@x.y…] [--] [arg…] + +examples: + $ pkgx gum format "# hello world" "sup?" + $ pkgx node@18 --eval 'console.log("hello world")' + $ pkgx +openssl cargo build + +flags: + -q, --quiet # suppress brief informational messages + -qq, --silent # no chat. no errors. just execute. + -v, --version + +more: + $ open https://docs.pkgx.sh +"##; + + let usage = usage + .replace('[', &dim("[")) + .replace(']', &dim("]")) + .replace('<', &dim("<")) + .replace('>', &dim(">")) + .replace('$', &dim("$")) + .replace('|', &dim("|")); + + let re = Regex::new("(?m) #.*$").unwrap(); + + re.replace_all(&usage, |caps: ®ex::Captures| { + dim(caps.get(0).unwrap().as_str()) + }) + .to_string() +} diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs new file mode 100644 index 00000000..2b4f2b2a --- /dev/null +++ b/crates/cli/src/main.rs @@ -0,0 +1,363 @@ +mod args; +mod execve; +mod help; +#[cfg(test)] +mod tests; + +use std::{collections::HashMap, error::Error, fmt::Write, sync::Arc, time::Duration}; + +use execve::execve; +use indicatif::{ProgressBar, ProgressState, ProgressStyle}; +use libpkgx::{ + config::Config, env, hydrate::hydrate, install_multi, pantry_db, resolve::resolve, sync, + types::PackageReq, utils, +}; +use rusqlite::Connection; +use serde_json::json; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let args::Args { + plus, + mut args, + mode, + flags, + find_program, + } = args::parse(); + + match mode { + args::Mode::Help => { + println!("{}", help::usage()); + return Ok(()); + } + args::Mode::Version => { + println!("pkgx {}", env!("CARGO_PKG_VERSION")); + return Ok(()); + } + args::Mode::X => (), + } + + let config = Config::new()?; + + let cache_dir = config.pantry_dir.parent().unwrap(); + std::fs::create_dir_all(cache_dir)?; + let mut conn = Connection::open(cache_dir.join("pantry.db"))?; + + let spinner = if flags.silent || flags.quiet { + None + } else { + let spinner = indicatif::ProgressBar::new_spinner(); + spinner.enable_steady_tick(Duration::from_millis(100)); + Some(spinner) + }; + + let did_sync = if sync::should(&config) { + if let Some(spinner) = &spinner { + spinner.set_message("syncing pkg-db…"); + } + sync::replace(&config, &mut conn).await?; + true + } else { + false + }; + + if let Some(spinner) = &spinner { + spinner.set_message("resolving pkg graph…"); + } + + let mut pkgs = vec![]; + + for pkgspec in plus { + let PackageReq { + project: project_or_cmd, + constraint, + } = PackageReq::parse(&pkgspec)?; + if config + .pantry_dir + .join("projects") + .join(project_or_cmd.clone()) + .is_dir() + { + pkgs.push(PackageReq { + project: project_or_cmd, + constraint, + }); + } else { + let project = which(&project_or_cmd, &conn, &pkgs).await?; + pkgs.push(PackageReq { + project, + constraint, + }); + } + } + + if find_program { + let PackageReq { + constraint, + project: cmd, + } = PackageReq::parse(&args[0])?; + + args[0] = cmd.clone(); // invoke eg. `node` rather than eg. `node@20` + + let project = match which(&cmd, &conn, &pkgs).await { + Err(WhichError::CmdNotFound(cmd)) => { + if !did_sync { + if let Some(spinner) = &spinner { + let msg = format!("{} not found, syncing…", cmd); + spinner.set_message(msg); + } + // cmd not found ∴ sync in case it is new + sync::replace(&config, &mut conn).await?; + if let Some(spinner) = &spinner { + spinner.set_message("resolving pkg graph…"); + } + which(&cmd, &conn, &pkgs).await + } else { + Err(WhichError::CmdNotFound(cmd)) + } + } + Err(err) => Err(err), + Ok(project) => Ok(project), + }?; + + pkgs.push(PackageReq { + project, + constraint, + }); + } + + let companions = pantry_db::companions_for_projects( + &pkgs + .iter() + .map(|project| project.project.clone()) + .collect::>(), + &conn, + )?; + + pkgs.extend(companions); + + let graph = hydrate(&pkgs, |project| { + pantry_db::deps_for_project(&project, &conn) + }) + .await?; + + let resolution = resolve(graph, &config).await?; + + let spinner_clone = spinner.clone(); + let clear_progress_bar = move || { + if let Some(spinner) = spinner_clone { + spinner.finish_and_clear(); + } + }; + + let mut installations = resolution.installed; + if !resolution.pending.is_empty() { + let spinner = spinner.or(if !flags.silent && flags.quiet { + Some(indicatif::ProgressBar::new(0)) + } else { + None + }); + let pb = spinner.map(|spinner| { + configure_bar(&spinner); + Arc::new(MultiProgressBar { pb: spinner }) + }); + let installed = install_multi::install_multi(&resolution.pending, &config, pb).await?; + installations.extend(installed); + } + + let env = env::map(&installations); + + if !args.is_empty() { + let pkgx_lvl = std::env::var("PKGX_LVL") + .unwrap_or("0".to_string()) + .parse() + .unwrap_or(0) + + 1; + if pkgx_lvl >= 10 { + return Err("PKGX_LVL exceeded: https://github.com/orgs/pkgxdev/discussions/11".into()); + } + + let cmd = if find_program { + utils::find_program(&args.remove(0), &env["PATH"]).await? + } else if args[0].contains('/') { + // user specified a path to program which we should use + args.remove(0) + } else { + // user wants a system tool, eg. pkgx +wget -- git clone + // NOTE we still check the injected PATH since they may have added the tool anyway + // it’s just this route allows the user to get a non-error for delegating through to the system + let mut paths = vec![]; + if let Some(pkgpaths) = env.get("PATH") { + paths.append(&mut pkgpaths.clone()); + } + if let Ok(syspaths) = std::env::var("PATH") { + paths.extend( + syspaths + .split(':') + .map(|x| x.to_string()) + .collect::>(), + ); + } + utils::find_program(&args.remove(0), &paths).await? + }; + let env = env::mix(env); + let mut env = env::mix_runtime(&env, &installations, &conn)?; + + // fork bomb protection + env.insert("PKGX_LVL".to_string(), pkgx_lvl.to_string()); + + clear_progress_bar(); + + execve(cmd, args, env) + } else if !env.is_empty() { + clear_progress_bar(); + + if !flags.json { + let env = env.iter().map(|(k, v)| (k.clone(), v.join(":"))).collect(); + let env = env::mix_runtime(&env, &installations, &conn)?; + for (key, value) in env { + println!("{}=\"{}${{{}:+:${}}}\"", key, value, key, key); + } + } else { + let mut runtime_env = HashMap::new(); + for pkg in installations.clone() { + let pkg_runtime_env = pantry_db::runtime_env_for_project(&pkg.pkg.project, &conn)?; + if !pkg_runtime_env.is_empty() { + runtime_env.insert(pkg.pkg.project, pkg_runtime_env); + } + } + let json = json!({ + "pkgs": installations, + "env": env, + "runtime_env": runtime_env + }); + println!("{}", json); + } + Ok(()) + } else { + clear_progress_bar(); + eprintln!("{}", help::usage()); + std::process::exit(2); + } +} + +#[derive(Debug)] +pub enum WhichError { + CmdNotFound(String), + MultipleProjects(String, Vec), + DbError(rusqlite::Error), +} + +impl std::fmt::Display for WhichError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + WhichError::CmdNotFound(cmd) => write!(f, "cmd not found: {}", cmd), + WhichError::MultipleProjects(cmd, projects) => { + write!(f, "multiple projects found for {}: {:?}", cmd, projects) + } + WhichError::DbError(err) => write!(f, "db error: {}", err), + } + } +} + +impl std::error::Error for WhichError {} + +async fn which(cmd: &String, conn: &Connection, pkgs: &[PackageReq]) -> Result { + let candidates = pantry_db::which(cmd, conn).map_err(WhichError::DbError)?; + if candidates.len() == 1 { + Ok(candidates[0].clone()) + } else if candidates.is_empty() { + Err(WhichError::CmdNotFound(cmd.clone())) + } else { + let selected_pkgs = candidates + .clone() + .into_iter() + .filter(|candidate| { + pkgs.iter().any(|pkg| { + let PackageReq { project, .. } = pkg; + project == candidate + }) + }) + .collect::>(); + if selected_pkgs.len() == 1 { + Ok(selected_pkgs[0].clone()) + } else { + Err(WhichError::MultipleProjects(cmd.clone(), candidates)) + } + } +} + +struct MultiProgressBar { + pb: ProgressBar, +} + +impl libpkgx::install_multi::ProgressBarExt for MultiProgressBar { + fn inc(&self, n: u64) { + self.pb.inc(n); + } + + fn inc_length(&self, n: u64) { + self.pb.inc_length(n); + } +} + +// ProgressBar is Send + Sync +unsafe impl Send for MultiProgressBar {} +unsafe impl Sync for MultiProgressBar {} + +fn configure_bar(pb: &ProgressBar) { + pb.set_length(1); + pb.set_style( + ProgressStyle::with_template( + "{elapsed:.dim} ❲{wide_bar:.red}❳ {percent}% {bytes_per_sec:.dim} {bytes:.dim}", + ) + .unwrap() + .with_key("elapsed", |state: &ProgressState, w: &mut dyn Write| { + let s = state.elapsed().as_secs_f64(); + let precision = precision(s); + write!(w, "{:.precision$}s", s, precision = precision).unwrap() + }) + .with_key("bytes", |state: &ProgressState, w: &mut dyn Write| { + let (right, divisor) = pretty_size(state.len().unwrap()); + let left = state.pos() as f64 / divisor as f64; + let leftprecision = precision(left); + write!( + w, + "{:.precision$}/{}", + left, + right, + precision = leftprecision + ) + .unwrap() + }) + .progress_chars("⚯ "), + ); + pb.enable_steady_tick(Duration::from_millis(50)); +} + +fn pretty_size(n: u64) -> (String, u64) { + let units = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]; + + // number of 1024s + let thousands = n.max(1).ilog(1024).clamp(0, units.len() as u32 - 1) as usize; + // size in the appropriate unit + let size = n as f64 / 1024.0f64.powi(thousands as i32); + // the divisor to get back to bytes + let divisor = 1024u64.pow(thousands as u32); + // number of decimal places to show (0 if we're bytes. no fractional bytes. come on.) + let precision = if thousands == 0 { 0 } else { precision(size) }; + + let formatted = format!( + "{:.precision$} {}", + size, + units[thousands], + precision = precision + ); + + (formatted, divisor) +} + +fn precision(n: f64) -> usize { + // 1 > 1.00, 10 > 10.0, 100 > 100 + 2 - (n.log10().clamp(0.0, 2.0) as usize) +} diff --git a/crates/cli/src/tests/main.rs b/crates/cli/src/tests/main.rs new file mode 100644 index 00000000..c7d248a8 --- /dev/null +++ b/crates/cli/src/tests/main.rs @@ -0,0 +1,74 @@ +use crate::{precision, pretty_size}; + +#[test] +fn test_pretty_size() { + assert_eq!(pretty_size(0), ("0 B".to_string(), 1)); + assert_eq!(pretty_size(1), ("1 B".to_string(), 1)); + assert_eq!(pretty_size(1024), ("1.00 KiB".to_string(), 1024)); + assert_eq!( + pretty_size(1024 * 1024), + ("1.00 MiB".to_string(), 1024 * 1024) + ); + assert_eq!( + pretty_size(1024 * 1024 * 1024), + ("1.00 GiB".to_string(), 1024 * 1024 * 1024) + ); + assert_eq!( + pretty_size(1024 * 1024 * 1024 * 1024), + ("1.00 TiB".to_string(), 1024 * 1024 * 1024 * 1024) + ); + assert_eq!( + pretty_size(1024 * 1024 * 1024 * 1024 * 1024), + ("1.00 PiB".to_string(), 1024 * 1024 * 1024 * 1024 * 1024) + ); + assert_eq!( + pretty_size(1024 * 1024 * 1024 * 1024 * 1024 * 1024), + ( + "1.00 EiB".to_string(), + 1024 * 1024 * 1024 * 1024 * 1024 * 1024 + ) + ); + // these are bigger than u64 + // assert_eq!( + // pretty_size(1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024), + // ( + // "1 ZiB".to_string(), + // 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 + // ) + // ); + // assert_eq!( + // pretty_size(1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024), + // ( + // "1 YiB".to_string(), + // 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024 + // ) + // ); + assert_eq!(pretty_size(5000), ("4.88 KiB".to_string(), 1024)); + assert_eq!(pretty_size(5120), ("5.00 KiB".to_string(), 1024)); + + assert_eq!( + pretty_size(1024 * 1024 + 1), + ("1.00 MiB".to_string(), 1024 * 1024) + ); + assert_eq!( + pretty_size(35_245 * 1024), + ("34.4 MiB".to_string(), 1024 * 1024) + ); + assert_eq!( + pretty_size(356_245 * 1024 + 1), + ("348 MiB".to_string(), 1024 * 1024) + ); +} + +#[test] +fn test_precision() { + assert_eq!(precision(1.0), 2); + assert_eq!(precision(1.1), 2); + assert_eq!(precision(9.99), 2); + assert_eq!(precision(10.0), 1); + assert_eq!(precision(10.1), 1); + assert_eq!(precision(99.9), 1); + assert_eq!(precision(100.0), 0); + assert_eq!(precision(100.1), 0); + assert_eq!(precision(999.9), 0); +} diff --git a/crates/cli/src/tests/mod.rs b/crates/cli/src/tests/mod.rs new file mode 100644 index 00000000..d18669ad --- /dev/null +++ b/crates/cli/src/tests/mod.rs @@ -0,0 +1 @@ +mod main; diff --git a/crates/lib/Cargo.toml b/crates/lib/Cargo.toml new file mode 100644 index 00000000..5bf8be33 --- /dev/null +++ b/crates/lib/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "libpkgx" +description = "Install and run `pkgx` packages" +authors = ["Max Howell ", "Jacob Heider "] +license = "Apache-2.0" +version = "0.1.1" +edition = "2021" +repository = "https://github.com/pkgxdev/pkgx" + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +anyhow = "1.0.95" +dirs-next = "2.0" +libsemverator = { version = "0.9.0", features = ["serde"] } +serde = { version = "1.0", features = ["derive"] } +serde_yaml = "0.9" +tokio = { version = "1.43", features = ["full", "rt-multi-thread"] } +tokio-stream = "0.1" +strum = "0.24" +strum_macros = "0.24" +rusqlite = "0.32.1" +regex = "1.11.1" +reqwest = { version = "0.11", features = ["stream", "blocking"] } +async-compression = { version = "0.4", features = ["tokio", "gzip", "xz"] } +tokio-tar = "0.3.1" +tokio-util = { version = "0.7.13", features = ["compat"] } +futures = "0.3.31" +lazy_static = "1.5.0" +nix = { version = "0.29.0", features = ["process"] } +fs2 = "0.4.3" + +[target.'cfg(not(target_os = "macos"))'.dependencies] +rusqlite = { version = "0.32.1", features = ["bundled"] } diff --git a/crates/lib/build.rs b/crates/lib/build.rs new file mode 100644 index 00000000..3304bc87 --- /dev/null +++ b/crates/lib/build.rs @@ -0,0 +1,8 @@ +fn main() { + let dist_url = option_env!("PKGX_DIST_URL").unwrap_or("https://dist.pkgx.dev"); + let default_pantry_url = format!("{dist_url}/pantry.tgz"); + let pantry_url = option_env!("PKGX_PANTRY_TARBALL_URL").unwrap_or(&default_pantry_url); + + println!("cargo:rustc-env=PKGX_DIST_URL={dist_url}"); + println!("cargo:rustc-env=PKGX_PANTRY_TARBALL_URL={pantry_url}"); +} diff --git a/crates/lib/src/cellar.rs b/crates/lib/src/cellar.rs new file mode 100644 index 00000000..658df3d6 --- /dev/null +++ b/crates/lib/src/cellar.rs @@ -0,0 +1,66 @@ +use crate::config::Config; +use crate::types::{Installation, Package, PackageReq}; +use libsemverator::semver::Semver as Version; +use std::error::Error; +use std::path::PathBuf; +use tokio::fs; + +pub async fn ls(project: &str, config: &Config) -> Result, Box> { + let d = config.pkgx_dir.join(project); + + if !fs::metadata(&d).await?.is_dir() { + return Ok(vec![]); + } + + let mut rv = vec![]; + let mut entries = fs::read_dir(&d).await?; + while let Some(entry) = entries.next_entry().await? { + let path = entry.path(); + let name = entry.file_name().to_string_lossy().to_string(); + + if !name.starts_with('v') || name == "var" { + continue; + } + if !fs::symlink_metadata(&path).await?.is_dir() { + continue; + } + + if let Ok(version) = Version::parse(&name[1..]) { + rv.push(Installation { + path, + pkg: Package { + project: project.to_string(), + version, + }, + }); + } + } + + Ok(rv) +} + +pub async fn resolve(pkgreq: &PackageReq, config: &Config) -> Result> { + let installations = ls(&pkgreq.project, config).await?; + + if let Some(i) = installations + .iter() + .filter(|i| pkgreq.constraint.satisfies(&i.pkg.version)) + .max_by_key(|i| i.pkg.version.clone()) + { + Ok(i.clone()) + } else { + // If no matching version is found, return an error + Err(format!("couldn’t resolve {:?}", pkgreq).into()) + } +} + +pub async fn has(pkg: &PackageReq, config: &Config) -> Option { + resolve(pkg, config).await.ok() +} + +pub fn dst(pkg: &Package, config: &Config) -> PathBuf { + config + .pkgx_dir + .join(pkg.project.clone()) + .join(format!("v{}", pkg.version.raw)) +} diff --git a/crates/lib/src/config.rs b/crates/lib/src/config.rs new file mode 100644 index 00000000..58381318 --- /dev/null +++ b/crates/lib/src/config.rs @@ -0,0 +1,59 @@ +use std::env; +use std::io; +use std::path::PathBuf; + +#[derive(Debug)] +pub struct Config { + pub pantry_dir: PathBuf, + pub dist_url: String, + pub pkgx_dir: PathBuf, +} + +impl Config { + pub fn new() -> io::Result { + let pantry_dir = get_pantry_dir()?; + let dist_url = get_dist_url(); + let pkgx_dir = get_pkgx_dir()?; + Ok(Self { + pantry_dir, + dist_url, + pkgx_dir, + }) + } +} + +fn get_dist_url() -> String { + if let Ok(env_url) = env::var("PKGX_DIST_URL") { + return env_url; + } + env!("PKGX_DIST_URL").to_string() +} + +fn get_pantry_dir() -> io::Result { + if let Ok(env_dir) = env::var("PKGX_PANTRY_DIR") { + let path = PathBuf::from(env_dir); + if !path.is_absolute() { + return Ok(env::current_dir()?.join(path)); + } else { + return Ok(path); + } + } + Ok(dirs_next::cache_dir().unwrap().join("pkgx/pantry")) +} + +fn get_pkgx_dir() -> io::Result { + if let Ok(env_dir) = env::var("PKGX_DIR") { + let path = PathBuf::from(env_dir); + if !path.is_absolute() { + return Ok(env::current_dir()?.join(path)); + } else { + return Ok(path); + } + } + #[cfg(target_os = "macos")] + return Ok(dirs_next::home_dir().unwrap().join(".pkgx")); + #[cfg(target_os = "linux")] + return Ok(dirs_next::home_dir().unwrap().join(".pkgx")); + #[cfg(not(any(target_os = "macos", target_os = "linux")))] + panic!("Unsupported platform") +} diff --git a/crates/lib/src/env.rs b/crates/lib/src/env.rs new file mode 100644 index 00000000..9a1f2990 --- /dev/null +++ b/crates/lib/src/env.rs @@ -0,0 +1,211 @@ +use std::{ + collections::{HashMap, HashSet}, + error::Error, + path::PathBuf, + str::FromStr, +}; + +use crate::types::Installation; + +pub fn map(installations: &Vec) -> HashMap> { + let mut vars: HashMap> = HashMap::new(); + + let projects: HashSet<&str> = installations + .iter() + .map(|i| i.pkg.project.as_str()) + .collect(); + + for installation in installations { + for key in EnvKey::iter() { + if let Some(suffixes) = suffixes(&key) { + for suffix in suffixes { + let path = installation.path.join(suffix); + if path.is_dir() { + vars.entry(key.clone()) + .or_insert_with(OrderedSet::new) + .add(path); + } + } + } + } + + if projects.contains("cmake.org") { + vars.entry(EnvKey::CmakePrefixPath) + .or_insert_with(OrderedSet::new) + .add(installation.path.clone()); + } + } + + // don’t break `man` + if vars.contains_key(&EnvKey::Manpath) { + vars.get_mut(&EnvKey::Manpath) + .unwrap() + .add(PathBuf::from_str("/usr/share/man").unwrap()); + } + // https://github.com/pkgxdev/libpkgx/issues/70 + if vars.contains_key(&EnvKey::XdgDataDirs) { + let set = vars.get_mut(&EnvKey::XdgDataDirs).unwrap(); + set.add(PathBuf::from_str("/usr/local/share").unwrap()); + set.add(PathBuf::from_str("/usr/share").unwrap()); + } + + let mut rv: HashMap> = HashMap::new(); + for (key, set) in vars { + let set = set + .items + .iter() + .map(|p| p.to_string_lossy().to_string()) + .collect(); + rv.insert(key.as_ref().to_string(), set); + } + rv +} + +use rusqlite::Connection; +use strum::IntoEnumIterator; +use strum_macros::{AsRefStr, EnumIter, EnumString}; + +#[derive(Debug, EnumString, AsRefStr, PartialEq, Eq, Hash, Clone, EnumIter)] +#[strum(serialize_all = "SCREAMING_SNAKE_CASE")] +enum EnvKey { + Path, + Manpath, + PkgConfigPath, + LibraryPath, + LdLibraryPath, + Cpath, + XdgDataDirs, + CmakePrefixPath, + #[cfg(target_os = "macos")] + DyldFallbackLibraryPath, + SslCertFile, + Ldflags, + PkgxDir, + AclocalPath, +} + +struct OrderedSet { + items: Vec, + set: HashSet, +} + +impl OrderedSet { + fn new() -> Self { + OrderedSet { + items: Vec::new(), + set: HashSet::new(), + } + } + + fn add(&mut self, item: T) { + if self.set.insert(item.clone()) { + self.items.push(item); + } + } +} + +fn suffixes(key: &EnvKey) -> Option> { + match key { + EnvKey::Path => Some(vec!["bin", "sbin"]), + EnvKey::Manpath => Some(vec!["man", "share/man"]), + EnvKey::PkgConfigPath => Some(vec!["share/pkgconfig", "lib/pkgconfig"]), + EnvKey::XdgDataDirs => Some(vec!["share"]), + EnvKey::AclocalPath => Some(vec!["share/aclocal"]), + EnvKey::LibraryPath | EnvKey::LdLibraryPath => Some(vec!["lib", "lib64"]), + #[cfg(target_os = "macos")] + EnvKey::DyldFallbackLibraryPath => Some(vec!["lib", "lib64"]), + EnvKey::Cpath => Some(vec!["include"]), + EnvKey::CmakePrefixPath | EnvKey::SslCertFile | EnvKey::Ldflags | EnvKey::PkgxDir => None, + } +} + +pub fn mix(input: HashMap>) -> HashMap { + let mut rv = HashMap::from_iter(std::env::vars()); + + for (key, value) in input.iter() { + if let Some(values) = rv.get(key) { + rv.insert(key.clone(), format!("{}:{}", value.join(":"), values)); + } else { + rv.insert(key.clone(), value.join(":")); + } + } + + rv +} + +pub fn mix_runtime( + input: &HashMap, + installations: &Vec, + conn: &Connection, +) -> Result, Box> { + let mut output = input.clone(); + + for installation in installations.clone() { + let runtime_env = + crate::pantry_db::runtime_env_for_project(&installation.pkg.project, conn)?; + for (key, runtime_value) in runtime_env { + let runtime_value = expand_moustaches(&runtime_value, &installation, installations); + let new_value = match output.get(&key) { + Some(curr_value) => runtime_value.replace(&format!("${}", key), curr_value), + None => { + //TODO need to remove any $FOO, aware of `:` delimiters + runtime_value + .replace(&format!(":${}", key), "") + .replace(&format!("${}:", key), "") + .replace(&format!("${}", key), "") + } + }; + output.insert(key, new_value); + } + } + + Ok(output) +} + +pub fn expand_moustaches(input: &str, pkg: &Installation, deps: &Vec) -> String { + let mut output = input.to_string(); + + if output.starts_with("${{") { + output.replace_range(..1, ""); + } + + output = output.replace("{{prefix}}", &pkg.path.to_string_lossy()); + output = output.replace("{{version}}", &format!("{}", &pkg.pkg.version)); + output = output.replace("{{version.major}}", &format!("{}", pkg.pkg.version.major)); + output = output.replace("{{version.minor}}", &format!("{}", pkg.pkg.version.minor)); + output = output.replace("{{version.patch}}", &format!("{}", pkg.pkg.version.patch)); + output = output.replace( + "{{version.marketing}}", + &format!("{}.{}", pkg.pkg.version.major, pkg.pkg.version.minor), + ); + + for dep in deps { + let prefix = format!("deps.{}", dep.pkg.project); + output = output.replace( + &format!("{{{{{}.prefix}}}}", prefix), + &dep.path.to_string_lossy(), + ); + output = output.replace( + &format!("{{{{{}.version}}}}", prefix), + &format!("{}", &dep.pkg.version), + ); + output = output.replace( + &format!("{{{{{}.version.major}}}}", prefix), + &format!("{}", dep.pkg.version.major), + ); + output = output.replace( + &format!("{{{{{}.version.minor}}}}", prefix), + &format!("{}", dep.pkg.version.minor), + ); + output = output.replace( + &format!("{{{{{}.version.patch}}}}", prefix), + &format!("{}", dep.pkg.version.patch), + ); + output = output.replace( + &format!("{{{{{}.version.marketing}}}}", prefix), + &format!("{}.{}", dep.pkg.version.major, dep.pkg.version.minor), + ); + } + + output +} diff --git a/crates/lib/src/hydrate.rs b/crates/lib/src/hydrate.rs new file mode 100644 index 00000000..9a69c7f4 --- /dev/null +++ b/crates/lib/src/hydrate.rs @@ -0,0 +1,109 @@ +use crate::types::PackageReq; +use libsemverator::range::Range as VersionReq; +use std::collections::{HashMap, HashSet}; +use std::error::Error; + +#[derive(Clone)] +struct Node { + parent: Option>, + pkg: PackageReq, + children: HashSet, +} + +impl Node { + fn new(pkg: PackageReq, parent: Option>) -> Self { + Self { + parent, + pkg, + children: HashSet::new(), + } + } + + fn count(&self) -> usize { + let mut count = 0; + let mut node = self.parent.as_ref(); + while let Some(parent_node) = node { + count += 1; + node = parent_node.parent.as_ref(); + } + count + } +} + +/// Hydrates dependencies and returns a topologically sorted list of packages. +pub async fn hydrate( + input: &Vec, + get_deps: F, +) -> Result, Box> +where + F: Fn(String) -> Result, Box>, +{ + let dry = condense(input); + let mut graph: HashMap> = HashMap::new(); + let mut stack: Vec> = vec![]; + let mut additional_unicodes: Vec = vec![]; + + for pkg in dry.iter() { + let node = graph + .entry(pkg.project.clone()) + .or_insert_with(|| Box::new(Node::new(pkg.clone(), None))); + node.pkg.constraint = intersect_constraints(&node.pkg.constraint, &pkg.constraint)?; + stack.push(node.clone()); + } + + while let Some(mut current) = stack.pop() { + for child_pkg in get_deps(current.pkg.project.clone())? { + let child_node = graph + .entry(child_pkg.project.clone()) + .or_insert_with(|| Box::new(Node::new(child_pkg.clone(), Some(current.clone())))); + let intersection = + intersect_constraints(&child_node.pkg.constraint, &child_pkg.constraint); + if let Ok(constraint) = intersection { + child_node.pkg.constraint = constraint; + current.children.insert(child_node.pkg.project.clone()); + stack.push(child_node.clone()); + } else if child_pkg.project == "unicode.org" { + // we handle unicode.org for now to allow situations like: + // https://github.com/pkgxdev/pantry/issues/4104 + // https://github.com/pkgxdev/pkgx/issues/899 + additional_unicodes.push(child_pkg.constraint); + } else { + return Err(intersection.unwrap_err()); + } + } + } + + let mut pkgs: Vec<&Box> = graph.values().collect(); + pkgs.sort_by_key(|node| node.count()); + let mut pkgs: Vec = pkgs.into_iter().map(|node| node.pkg.clone()).collect(); + + // see above explanation + for constraint in additional_unicodes { + let pkg = PackageReq { + project: "unicode.org".to_string(), + constraint, + }; + pkgs.push(pkg); + } + + Ok(pkgs) +} + +/// Condenses a list of `PackageRequirement` by intersecting constraints for duplicates. +fn condense(pkgs: &Vec) -> Vec { + let mut out: Vec = vec![]; + for pkg in pkgs { + if let Some(existing) = out.iter_mut().find(|p| p.project == pkg.project) { + existing.constraint = intersect_constraints(&existing.constraint, &pkg.constraint) + .expect("Failed to intersect constraints"); + } else { + out.push(pkg.clone()); + } + } + out +} + +/// Intersects two version constraints. +fn intersect_constraints(a: &VersionReq, b: &VersionReq) -> Result> { + a.intersect(b).map_err(|e| e.into()) +} diff --git a/crates/lib/src/install.rs b/crates/lib/src/install.rs new file mode 100644 index 00000000..3403ce0c --- /dev/null +++ b/crates/lib/src/install.rs @@ -0,0 +1,191 @@ +use async_compression::tokio::bufread::XzDecoder; +use fs2::FileExt; +use reqwest::Client; +use std::{error::Error, fs::OpenOptions, path::PathBuf}; +use tokio::task; +use tokio_tar::Archive; + +// Compatibility trait lets us call `compat()` on a futures::io::AsyncRead +// to convert it into a tokio::io::AsyncRead. +use tokio_util::compat::FuturesAsyncReadCompatExt; + +// Lets us call into_async_read() to convert a futures::stream::Stream into a +// futures::io::AsyncRead. +use futures::stream::TryStreamExt; + +use crate::{ + cellar, + config::Config, + inventory, + types::{Installation, Package}, +}; + +pub enum InstallEvent { + DownloadSize(u64), // Total size of the download in bytes + Progress(u64), // we downloaded n bytes +} + +//TODO set UserAgent + +pub async fn install( + pkg: &Package, + config: &Config, + mut event_callback: Option, +) -> Result> +where + F: FnMut(InstallEvent) + Send + 'static, +{ + let shelf = config.pkgx_dir.join(&pkg.project); + fs::create_dir_all(&shelf)?; + let shelf = OpenOptions::new() + .read(true) // Open the directory in read-only mode + .open(shelf)?; + + task::spawn_blocking({ + let shelf = shelf.try_clone()?; + move || { + shelf + .lock_exclusive() + .expect("couldn’t obtain lock, is another pkgx instance running?"); + } + }) + .await?; + + let url = inventory::get_url(pkg, config); + let client = Client::new(); + let rsp = client.get(url).send().await?.error_for_status()?; + + let total_size = rsp + .content_length() + .ok_or("Failed to get content length from response")?; + + if let Some(cb) = event_callback.as_mut() { + cb(InstallEvent::DownloadSize(total_size)); + } + + let stream = rsp.bytes_stream(); + + //TODO we don’t want to add inspect_ok to the stream at all in --silent mode + // ^^ but the borrow checker despises us with a venom I can barely articulate if we try + let stream = stream.inspect_ok(move |chunk| { + if let Some(cb) = event_callback.as_mut() { + cb(InstallEvent::Progress(chunk.len() as u64)); + } + }); + + let stream = stream + .map_err(|e| futures::io::Error::new(futures::io::ErrorKind::Other, e)) + .into_async_read(); + let stream = stream.compat(); + + // Step 2: Create a XZ decoder + let decoder = XzDecoder::new(stream); + + // Step 3: Extract the tar archive + let mut archive = Archive::new(decoder); + archive.unpack(&config.pkgx_dir).await?; + + let installation = Installation { + path: cellar::dst(pkg, config), + pkg: pkg.clone(), + }; + + symlink(&installation, config).await?; + + FileExt::unlock(&shelf)?; + + Ok(installation) +} + +use libsemverator::range::Range as VersionReq; +use libsemverator::semver::Semver as Version; +use std::collections::VecDeque; +use std::fs; +use std::path::Path; + +async fn symlink(installation: &Installation, config: &Config) -> Result<(), Box> { + let mut versions: VecDeque<(Version, PathBuf)> = cellar::ls(&installation.pkg.project, config) + .await? + .into_iter() + .map(|entry| (entry.pkg.version, entry.path)) + .collect(); + + versions.make_contiguous().sort_by(|a, b| a.0.cmp(&b.0)); + + if versions.is_empty() { + return Err(format!("no versions for package {}", installation.pkg.project).into()); + } + + let shelf = installation.path.parent().unwrap(); + let newest = versions.back().unwrap(); // Safe as we've checked it's not empty + + let v_mm = format!( + "{}.{}", + installation.pkg.version.major, installation.pkg.version.minor + ); + let minor_range = VersionReq::parse(&format!("^{}", v_mm))?; + let most_minor = versions + .iter() + .filter(|(version, _)| minor_range.satisfies(version)) + .last() + .ok_or_else(|| anyhow::anyhow!("Could not find most minor version"))?; + + if most_minor.0 != installation.pkg.version { + return Ok(()); + } + + make_symlink(shelf, &format!("v{}", v_mm), installation).await?; + + // bug in semverator + let major_range = VersionReq::parse(&format!("^{}", installation.pkg.version.major))?; + + let most_major = versions + .iter() + .filter(|(version, _)| major_range.satisfies(version)) + .last() + .ok_or_else(|| anyhow::anyhow!("Could not find most major version"))?; + + if most_major.0 != installation.pkg.version { + return Ok(()); + } + + make_symlink( + shelf, + &format!("v{}", installation.pkg.version.major), + installation, + ) + .await?; + + if installation.pkg.version == newest.0 { + make_symlink(shelf, "v*", installation).await?; + } + + Ok(()) +} + +async fn make_symlink( + shelf: &Path, + symname: &str, + installation: &Installation, +) -> Result<(), Box> { + let symlink_path = shelf.join(symname); + + if symlink_path.is_symlink() { + if let Err(err) = fs::remove_file(&symlink_path) { + if err.kind() != std::io::ErrorKind::NotFound { + return Err(err.into()); + } + } + } + + let target = installation + .path + .file_name() + .ok_or_else(|| anyhow::anyhow!("Could not get the base name of the installation path"))?; + + match std::os::unix::fs::symlink(target, &symlink_path) { + Ok(_) => Ok(()), + Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => Ok(()), + Err(err) => Err(err.into()), + } +} diff --git a/crates/lib/src/install_multi.rs b/crates/lib/src/install_multi.rs new file mode 100644 index 00000000..e34cd6aa --- /dev/null +++ b/crates/lib/src/install_multi.rs @@ -0,0 +1,44 @@ +use std::error::Error; +use std::sync::Arc; + +use crate::install::{install, InstallEvent}; +use crate::types::{Installation, Package}; +use futures::stream::FuturesUnordered; +use futures::StreamExt; + +use crate::config::Config; + +pub trait ProgressBarExt { + fn inc(&self, n: u64); + fn inc_length(&self, n: u64); +} + +pub async fn install_multi( + pending: &[Package], + config: &Config, + pb: Option>, +) -> Result, Box> { + pending + .iter() + .map(|pkg| { + install( + pkg, + config, + pb.clone().map(|pb| { + move |event| match event { + InstallEvent::DownloadSize(size) => { + pb.inc_length(size); + } + InstallEvent::Progress(chunk) => { + pb.inc(chunk); + } + } + }), + ) + }) + .collect::>() + .collect::>() + .await + .into_iter() + .collect() +} diff --git a/crates/lib/src/inventory.rs b/crates/lib/src/inventory.rs new file mode 100644 index 00000000..5ec536e3 --- /dev/null +++ b/crates/lib/src/inventory.rs @@ -0,0 +1,86 @@ +use crate::config::Config; +use crate::types::{host, Package, PackageReq}; +use libsemverator::semver::Semver as Version; +use reqwest::Url; +use std::error::Error; + +// Custom error for download issues +#[derive(Debug)] +pub struct DownloadError { + pub status: u16, + pub src: String, +} + +impl std::fmt::Display for DownloadError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Download error: status code {} from {}", + self.status, self.src + ) + } +} + +impl Error for DownloadError {} + +// Select function to pick a version +pub async fn select(rq: &PackageReq, config: &Config) -> Result, Box> { + let versions = ls(rq, config).await?; + + Ok(versions + .iter() + .filter(|v| rq.constraint.satisfies(v)) + .max() + .cloned()) +} + +// Get function to fetch available versions +pub async fn ls(rq: &PackageReq, config: &Config) -> Result, Box> { + let base_url = config.dist_url.clone(); + + let (platform, arch) = host(); + let url = Url::parse(&format!( + "{}/{}/{}/{}/versions.txt", + base_url, rq.project, platform, arch + ))?; + + let rsp = reqwest::get(url.clone()).await?; + + if !rsp.status().is_success() { + return Err(Box::new(DownloadError { + status: rsp.status().as_u16(), + src: url.to_string(), + })); + } + + let releases = rsp.text().await?; + let mut versions: Vec = releases + .lines() + .map(Version::parse) + .filter_map(Result::ok) + .collect(); + + if versions.is_empty() { + return Err(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + format!("No versions for {}", rq.project), + ))); + } + + if rq.project == "openssl.org" { + // Workaround: Remove specific version + let excluded_version = Version::parse("1.1.118")?; + versions.retain(|x| x != &excluded_version); + } + + Ok(versions) +} + +//TODO xz bottles are preferred +pub fn get_url(pkg: &Package, config: &Config) -> String { + let (platform, arch) = host(); + format!( + "{}/{}/{}/{}/v{}.tar.xz", + config.dist_url, pkg.project, platform, arch, pkg.version.raw + ) +} diff --git a/crates/lib/src/lib.rs b/crates/lib/src/lib.rs new file mode 100644 index 00000000..62e45131 --- /dev/null +++ b/crates/lib/src/lib.rs @@ -0,0 +1,13 @@ +mod cellar; +pub mod config; +pub mod env; +pub mod hydrate; +mod install; +pub mod install_multi; +mod inventory; +mod pantry; +pub mod pantry_db; +pub mod resolve; +pub mod sync; +pub mod types; +pub mod utils; diff --git a/crates/lib/src/pantry.rs b/crates/lib/src/pantry.rs new file mode 100644 index 00000000..55a1bd99 --- /dev/null +++ b/crates/lib/src/pantry.rs @@ -0,0 +1,301 @@ +use crate::{config::Config, types::PackageReq}; +use libsemverator::range::Range as VersionReq; +use serde::Deserialize; +use serde::Deserializer; +use std::collections::HashMap; +use std::fs; +use std::path::PathBuf; + +pub struct PantryEntry { + pub project: String, + pub deps: Vec, + pub programs: Vec, + pub companions: Vec, + pub env: HashMap, +} + +impl PantryEntry { + fn from_path(path: &PathBuf, pantry_dir: &PathBuf) -> Result> { + let project = path + .parent() + .unwrap() + .strip_prefix(pantry_dir) + .unwrap() + .to_str() + .unwrap() + .to_string(); + + Self::from_raw_entry(RawPantryEntry::from_path(path)?, project) + } + + fn from_raw_entry( + entry: RawPantryEntry, + project: String, + ) -> Result> { + let deps = if let Some(deps) = entry.dependencies { + deps.0 + .iter() + .map(|(project, constraint)| { + VersionReq::parse(constraint).map(|constraint| PackageReq { + project: project.clone(), + constraint, + }) + }) + .collect::, _>>()? + } else { + vec![] + }; + + let programs = if let Some(provides) = entry.provides { + provides.0 + } else { + vec![] + }; + + let companions = if let Some(companions) = entry.companions { + companions + .0 + .iter() + .map(|(k, v)| { + VersionReq::parse(v).map(|constraint| PackageReq { + project: k.clone(), + constraint, + }) + }) + .collect::, _>>()? + } else { + vec![] + }; + + let env = if let Some(runtime) = entry.runtime { + runtime.env + } else { + HashMap::new() + }; + + Ok(Self { + deps, + project, + env, + companions, + programs, + }) + } +} + +pub struct PackageEntryIterator { + stack: Vec, // stack for directories to visit + pantry_dir: PathBuf, +} + +impl PackageEntryIterator { + pub fn new(pantry_dir: PathBuf) -> Self { + Self { + stack: vec![pantry_dir.clone()], + pantry_dir, + } + } +} + +impl Iterator for PackageEntryIterator { + type Item = PantryEntry; + + fn next(&mut self) -> Option { + while let Some(path) = self.stack.pop() { + if path.is_dir() { + // push subdirectories and files into the stack + if let Ok(entries) = fs::read_dir(&path) { + for entry in entries.flatten() { + self.stack.push(entry.path()); + } + } + } else if path.file_name() == Some("package.yml".as_ref()) { + if let Ok(entry) = PantryEntry::from_path(&path, &self.pantry_dir) { + return Some(entry); + } else if cfg!(debug_assertions) { + eprintln!("parse failure: {:?}", path); + } + } + } + None + } +} + +pub fn ls(config: &Config) -> PackageEntryIterator { + PackageEntryIterator::new(config.pantry_dir.join("projects")) +} + +#[derive(Debug, Deserialize)] +struct RawPantryEntry { + dependencies: Option, + provides: Option, + companions: Option, + runtime: Option, +} + +#[derive(Debug)] +struct Runtime { + env: HashMap, +} + +impl<'de> Deserialize<'de> for Runtime { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + #[cfg(target_os = "macos")] + let platform_key = "darwin"; + #[cfg(target_os = "linux")] + let platform_key = "linux"; + #[cfg(target_os = "windows")] + let platform_key = "windows"; + #[cfg(target_arch = "aarch64")] + let arch_key = "aarch64"; + #[cfg(target_arch = "x86_64")] + let arch_key = "x86-64"; + + fn stringify(value: serde_yaml::Value) -> Option { + match value { + serde_yaml::Value::String(s) => Some(s.clone()), + serde_yaml::Value::Number(n) => Some(n.to_string()), + serde_yaml::Value::Bool(b) => Some(b.to_string()), + _ => None, + } + } + + let mut result = HashMap::new(); + + let root: HashMap = Deserialize::deserialize(deserializer)?; + + if let Some(env) = root.get("env").and_then(|x| x.as_mapping()).cloned() { + for (key, value) in env { + if key == "linux" || key == "darwin" || key == "windows" { + // If the key is platform-specific, only include values for the current platform + if key == platform_key { + if let serde_yaml::Value::Mapping(value) = value { + for (key, value) in value { + if let (Some(key), Some(value)) = (stringify(key), stringify(value)) + { + result.insert(key, value); + } + } + } + } + } else if key == "aarch64" || key == "x86-64" { + if key == arch_key { + if let serde_yaml::Value::Mapping(value) = value { + for (key, value) in value { + if let (Some(key), Some(value)) = (stringify(key), stringify(value)) + { + result.insert(key, value); + } + } + } + } + } else if let (Some(key), Some(value)) = (stringify(key), stringify(value)) { + result.insert(key, value); + } + } + } + Ok(Runtime { env: result }) + } +} + +#[derive(Debug)] +struct Deps(HashMap); + +impl<'de> Deserialize<'de> for Deps { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + // Deserialize the map as a generic HashMap + let full_map: HashMap = Deserialize::deserialize(deserializer)?; + + // Determine the current platform + #[cfg(target_os = "macos")] + let platform_key = "darwin"; + + #[cfg(target_os = "linux")] + let platform_key = "linux"; + + #[cfg(target_os = "windows")] + let platform_key = "windows"; + + // Create the result map + let mut result = HashMap::new(); + + fn handle_value(input: &serde_yaml::Value) -> Option { + match input { + serde_yaml::Value::String(s) => Some(if s.chars().next().unwrap().is_numeric() { + format!("^{}", s) + } else { + s.clone() + }), + serde_yaml::Value::Number(n) => Some(format!("^{}", n)), + _ => None, + } + } + + for (key, value) in full_map { + if key == "linux" || key == "darwin" || key == "windows" { + // If the key is platform-specific, only include values for the current platform + if key == platform_key { + if let serde_yaml::Value::Mapping(platform_values) = value { + for (k, v) in platform_values { + if let (serde_yaml::Value::String(k), Some(v)) = (k, handle_value(&v)) { + result.insert(k, v); + } + } + } + } + } else if let Some(value) = handle_value(&value) { + result.insert(key, value); + } + } + + Ok(Deps(result)) + } +} + +#[derive(Debug)] +struct Provides(Vec); + +impl<'de> Deserialize<'de> for Provides { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + // Define an enum to capture the possible YAML structures + #[derive(Deserialize)] + #[serde(untagged)] + enum ProvidesHelper { + List(Vec), + Map(HashMap>), + } + + match ProvidesHelper::deserialize(deserializer)? { + ProvidesHelper::List(list) => Ok(Provides(list)), + ProvidesHelper::Map(map) => { + #[cfg(target_os = "macos")] + let key = "darwin"; + + #[cfg(target_os = "linux")] + let key = "linux"; + + if let Some(values) = map.get(key) { + Ok(Provides(values.clone())) + } else { + Ok(Provides(Vec::new())) // Return an empty Vec if the key isn't found + } + } + } + } +} + +impl RawPantryEntry { + fn from_path(path: &PathBuf) -> Result> { + let content = fs::read_to_string(path)?; + Ok(serde_yaml::from_str(&content)?) + } +} diff --git a/crates/lib/src/pantry_db.rs b/crates/lib/src/pantry_db.rs new file mode 100644 index 00000000..e2e872c2 --- /dev/null +++ b/crates/lib/src/pantry_db.rs @@ -0,0 +1,150 @@ +use std::{collections::HashMap, error::Error}; + +use rusqlite::{params, Connection}; + +use crate::{config::Config, pantry, types::PackageReq}; + +pub fn cache(config: &Config, conn: &mut Connection) -> Result<(), Box> { + conn.execute_batch( + " + PRAGMA synchronous = OFF; + PRAGMA journal_mode = MEMORY; + PRAGMA temp_store = MEMORY; + DROP TABLE IF EXISTS provides; + DROP TABLE IF EXISTS dependencies; + DROP TABLE IF EXISTS companions; + DROP TABLE IF EXISTS runtime_env; + CREATE TABLE provides ( + project TEXT, + program TEXT + ); + CREATE TABLE dependencies ( + project TEXT, + pkgspec TEXT + ); + CREATE TABLE companions ( + project TEXT, + pkgspec TEXT + ); + CREATE TABLE runtime_env ( + project TEXT, + envline TEXT + ); + CREATE INDEX idx_project ON provides(project); + CREATE INDEX idx_program ON provides(program); + CREATE INDEX idx_project_dependencies ON dependencies(project); + CREATE INDEX idx_project_companions ON companions(project); + ", + )?; + + let tx = conn.transaction()?; + + for pkg in pantry::ls(config) { + for mut program in pkg.programs { + program = std::path::Path::new(&program) + .file_name() + .unwrap() + .to_str() + .unwrap() + .to_string(); + tx.execute( + "INSERT INTO provides (project, program) VALUES (?1, ?2);", + params![pkg.project, program], + )?; + } + + for dep in pkg.deps { + tx.execute( + "INSERT INTO dependencies (project, pkgspec) VALUES (?1, ?2);", + params![pkg.project, dep.to_string()], + )?; + } + + for companion in pkg.companions { + tx.execute( + "INSERT INTO companions (project, pkgspec) VALUES (?1, ?2);", + params![pkg.project, companion.to_string()], + )?; + } + + for (key, value) in pkg.env { + tx.execute( + "INSERT INTO runtime_env (project, envline) VALUES (?1, ?2);", + params![pkg.project, format!("{}={}", key, value)], + )?; + } + } + + tx.commit()?; + + Ok(()) +} + +pub fn deps_for_project( + project: &String, + conn: &Connection, +) -> Result, Box> { + let mut stmt = conn.prepare("SELECT pkgspec FROM dependencies WHERE project = ?1")?; + let rv = stmt.query_map(params![project], |row| { + let pkgspec: String = row.get(0)?; + let pkgrq = PackageReq::parse(&pkgspec).unwrap(); //FIXME unwrap() + Ok(pkgrq) + })?; + Ok(rv.collect::, _>>()?) +} + +pub fn which(cmd: &String, conn: &Connection) -> Result, rusqlite::Error> { + let mut stmt = conn.prepare("SELECT project FROM provides WHERE program = ?1")?; + let mut rv = Vec::new(); + let mut rows = stmt.query(params![cmd])?; + while let Some(row) = rows.next()? { + rv.push(row.get(0)?); + } + Ok(rv) +} + +pub fn runtime_env_for_project( + project: &String, + conn: &Connection, +) -> Result, Box> { + let sql = "SELECT envline FROM runtime_env WHERE project = ?1"; + let mut stmt = conn.prepare(sql)?; + let mut rows = stmt.query(params![project])?; + let mut env = HashMap::new(); + while let Some(row) = rows.next()? { + let envline: String = row.get(0)?; + let (key, value) = envline.split_once('=').unwrap(); + env.insert(key.to_string(), value.to_string()); + } + Ok(env) +} + +pub fn companions_for_projects( + projects: &[String], + conn: &Connection, +) -> Result, Box> { + if projects.is_empty() { + return Ok(Vec::new()); + } + + // Generate placeholders for the IN clause (?, ?, ?, ...) + let placeholders = projects.iter().map(|_| "?").collect::>().join(", "); + let query = format!( + "SELECT pkgspec FROM companions WHERE project IN ({})", + placeholders + ); + + let mut stmt = conn.prepare(&query)?; + + let companions = stmt.query_map( + rusqlite::params_from_iter(projects.iter()), // Efficiently bind the projects + |row| { + let pkgspec: String = row.get(0)?; + let pkgrq = PackageReq::parse(&pkgspec).unwrap(); //TODO handle error! + Ok(pkgrq) + }, + )?; + + // Collect results into a Vec, propagating errors + Ok(companions.collect::, _>>()?) +} diff --git a/crates/lib/src/resolve.rs b/crates/lib/src/resolve.rs new file mode 100644 index 00000000..fc49e684 --- /dev/null +++ b/crates/lib/src/resolve.rs @@ -0,0 +1,76 @@ +use crate::config::Config; +use crate::types::{Installation, Package, PackageReq}; +use crate::{cellar, inventory}; +use std::error::Error; + +#[derive(Debug, Default)] +pub struct Resolution { + /// fully resolved list (includes both installed and pending) + pub pkgs: Vec, + + /// already installed packages + pub installed: Vec, + + /// these are the pkgs that aren’t yet installed + pub pending: Vec, +} + +//TODO no need to take array since it doesn’t consider anything +use futures::stream::{FuturesUnordered, StreamExt}; + +pub async fn resolve(reqs: Vec, config: &Config) -> Result> { + let mut rv = Resolution::default(); + + // Create a FuturesUnordered to run the tasks concurrently + let mut futures = FuturesUnordered::new(); + + for req in reqs { + futures.push(async move { + if let Some(installation) = cellar::has(&req, config).await { + Ok::<_, Box>(( + Some((installation.clone(), installation.pkg.clone())), + None, + )) + } else if let Ok(Some(version)) = inventory::select(&req, config).await { + let pkg = Package { + project: req.project.clone(), + version, + }; + Ok::<_, Box>((None, Some(pkg))) + } else { + Err(Box::new(ResolveError { pkg: req }) as Box) + } + }); + } + + // Process the results as they are completed + while let Some(result) = futures.next().await { + match result? { + (Some((installation, pkg)), None) => { + rv.installed.push(installation); + rv.pkgs.push(pkg); + } + (None, Some(pkg)) => { + rv.pkgs.push(pkg.clone()); + rv.pending.push(pkg); + } + _ => unreachable!(), // This should not happen + } + } + + Ok(rv) +} + +use std::fmt; + +#[derive(Debug)] +pub struct ResolveError { + pub pkg: PackageReq, // Holds the package or requirement +} +impl Error for ResolveError {} + +impl fmt::Display for ResolveError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "not-found: pkg: {:?}", self.pkg) + } +} diff --git a/crates/lib/src/sync.rs b/crates/lib/src/sync.rs new file mode 100644 index 00000000..c52dcf79 --- /dev/null +++ b/crates/lib/src/sync.rs @@ -0,0 +1,63 @@ +use crate::{config::Config, pantry_db}; +use async_compression::tokio::bufread::GzipDecoder; +use fs2::FileExt; +use futures::TryStreamExt; +use rusqlite::Connection; +use std::{error::Error, fs::OpenOptions, path::PathBuf}; +use tokio_tar::Archive; +use tokio_util::compat::FuturesAsyncReadCompatExt; + +#[allow(clippy::all)] +pub fn should(config: &Config) -> bool { + if !config.pantry_dir.join("projects").is_dir() { + true + } else if !config + .pantry_dir + .parent() + .unwrap() + .join("pantry.db") + .is_file() + { + true + } else { + false + } +} + +pub async fn replace(config: &Config, conn: &mut Connection) -> Result<(), Box> { + let url = env!("PKGX_PANTRY_TARBALL_URL"); + let dest = &config.pantry_dir; + + std::fs::create_dir_all(dest.clone())?; + let dir = OpenOptions::new() + .read(true) // Open in read-only mode; no need to write. + .open(dest)?; + dir.lock_exclusive()?; + + download_and_extract_pantry(url, dest).await?; + + pantry_db::cache(config, conn)?; + + FileExt::unlock(&dir)?; + + Ok(()) +} + +async fn download_and_extract_pantry(url: &str, dest: &PathBuf) -> Result<(), Box> { + let rsp = reqwest::get(url).await?.error_for_status()?; + + let stream = rsp.bytes_stream(); + + let stream = stream + .map_err(|e| futures::io::Error::new(futures::io::ErrorKind::Other, e)) + .into_async_read(); + let stream = stream.compat(); + + let decoder = GzipDecoder::new(stream); + + // Step 3: Extract the tar archive + let mut archive = Archive::new(decoder); + archive.unpack(dest).await?; + + Ok(()) +} diff --git a/crates/lib/src/types.rs b/crates/lib/src/types.rs new file mode 100644 index 00000000..92fa0afe --- /dev/null +++ b/crates/lib/src/types.rs @@ -0,0 +1,135 @@ +use lazy_static::lazy_static; +use libsemverator::range::Range as VersionReq; +use libsemverator::semver::Semver as Version; +use serde::ser::SerializeStruct; +use serde::{Serialize, Serializer}; +use std::error::Error; +use std::fmt; + +//TODO regex is probs not most efficient (but do perf tests if you change it) +lazy_static! { + static ref PACKAGE_REGEX: Regex = Regex::new(r"^(.+?)([\^=~<>@].+)?$").unwrap(); +} + +#[derive(Debug, Clone, serde::Serialize)] +pub struct Package { + pub project: String, + pub version: Version, +} + +impl fmt::Display for Package { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}={}", self.project, &self.version) + } +} + +#[derive(Debug, Clone)] +pub struct PackageReq { + pub project: String, + pub constraint: VersionReq, +} + +use regex::Regex; + +impl PackageReq { + pub fn parse(pkgspec: &str) -> Result> { + let input = pkgspec.trim(); + let captures = PACKAGE_REGEX + .captures(input) + .ok_or_else(|| format!("invalid pkgspec: {}", input))?; + + let project = captures.get(1).unwrap().as_str().to_string(); + let str = if let Some(cap) = captures.get(2) { + cap.as_str() + } else { + "*" + }; + let constraint = VersionReq::parse(str)?; + + Ok(Self { + project, + constraint, + }) + } +} + +impl fmt::Display for PackageReq { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.constraint.raw == "*" { + write!(f, "{}", self.project) + } else { + write!(f, "{}{}", self.project, &self.constraint) + } + } +} + +#[derive(Debug, Clone)] +pub struct Installation { + pub path: std::path::PathBuf, + pub pkg: Package, +} + +impl Serialize for Installation { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut state = serializer.serialize_struct("MyType", 3)?; + state.serialize_field("path", &self.path)?; + state.serialize_field("project", &self.pkg.project)?; + state.serialize_field("version", &self.pkg.version)?; + state.end() + } +} + +// These are only used per build at present +#[allow(dead_code)] +pub enum Host { + Darwin, + Linux, +} + +// These are only used per build at present +#[allow(dead_code)] +pub enum Arch { + Arm64, + X86_64, +} + +pub fn host() -> (Host, Arch) { + #[cfg(target_os = "macos")] + let host = Host::Darwin; + #[cfg(target_os = "linux")] + let host = Host::Linux; + #[cfg(not(any(target_os = "macos", target_os = "linux")))] + panic!("Unsupported platform"); + + #[cfg(target_arch = "aarch64")] + let arch = Arch::Arm64; + #[cfg(target_arch = "x86_64")] + let arch = Arch::X86_64; + #[cfg(not(any(target_arch = "aarch64", target_arch = "x86_64")))] + panic!("Unsupported architecture"); + + (host, arch) +} + +impl fmt::Display for Host { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let os_str = match self { + Host::Linux => "linux", + Host::Darwin => "darwin", + }; + write!(f, "{}", os_str) + } +} + +impl fmt::Display for Arch { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let os_str = match self { + Arch::Arm64 => "aarch64", + Arch::X86_64 => "x86-64", + }; + write!(f, "{}", os_str) + } +} diff --git a/crates/lib/src/utils.rs b/crates/lib/src/utils.rs new file mode 100644 index 00000000..cccbe028 --- /dev/null +++ b/crates/lib/src/utils.rs @@ -0,0 +1,23 @@ +use std::{error::Error, os::unix::fs::PermissionsExt, path::Path}; + +pub async fn find_program(arg: &str, paths: &Vec) -> Result> { + if arg.starts_with("/") { + return Ok(arg.to_string()); + } else if arg.contains("/") { + return Ok(std::env::current_dir() + .unwrap() + .join(arg) + .to_str() + .unwrap() + .to_string()); + } + for path in paths { + let full_path = Path::new(&path).join(arg); + if let Ok(metadata) = full_path.metadata() { + if full_path.is_file() && (metadata.permissions().mode() & 0o111 != 0) { + return Ok(full_path.to_str().unwrap().to_string()); + } + } + } + Err(format!("cmd not found: {}", arg).into()) +} diff --git a/deno.jsonc b/deno.jsonc deleted file mode 100644 index 95417b6d..00000000 --- a/deno.jsonc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "compilerOptions": { - "allowJs": false, - "strict": true - }, - "importMap": "import-map.json", - "tasks": { - "test": "deno test --allow-net --allow-read --allow-env --allow-run --allow-write --unstable", - "typecheck": "deno check --unstable ./src/app.ts", - "run": "deno run --unstable --allow-all ./src/app.ts" - }, - // ignore all files since the current style deviates from deno's default style. - "fmt": { - "files": { - "exclude": [ - "./" - ] - } - } -} diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..66cd88c4 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,49 @@ +# `pkgx` + +`pkgx` is a 4 MiB, standalone binary that can *run anything*. + +## Quick Start + +```sh +brew install pkgx || curl https://pkgx.sh | sh +``` + +{% hint style='info' %} +[Installation Guide](installing-pkgx.md) +{% endhint %} + +## Using `pkgx` + +|

Run Anything
Run anything with `pkgx`

|

Scripting
Write scripts in any language with all the tools you need available from L:1

| +| ----- | ----- | + +# The `pkgx` Ecosystem + +`pkgx` is more than a package runner, it’s a composable primitive that can be +used to build a whole ecosystem of tools. Here’s what we’ve built so far: + +## `dev` + +`dev` uses shellcode and `pkgx` to create “virtual environments” for any +project and any toolset. + +{% hint style='info' %} +[https://github.com/pkgxdev/dev](https://github.com/pkgxdev/dev) +{% endhint %} + +## `pkgm` + +`pkgm` installs `pkgx` packages to `/usr/local`. + +{% hint style='info' %} +[https://github.com/pkgxdev/pkgm](https://github.com/pkgxdev/pkgm) +{% endhint %} + +## `mash` + +`mash` is a package manager for scripts that use `pkgx` to make the whole +open source ecosystem available to them. + +{% hint style='info' %} +[https://github.com/pkgxdev/mash](https://github.com/pkgxdev/mash) +{% endhint %} diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md new file mode 100644 index 00000000..3fc74e72 --- /dev/null +++ b/docs/SUMMARY.md @@ -0,0 +1,31 @@ +‌# Summary​ + +* [Highlights](README.md) + +## `pkgx` + +* [Installing `pkgx`](installing-pkgx.md) +* [Getting Started](pkgx.md) +* [Scripting](scripting.md) + +## The `pkgx` Ecosystem + +* [`dev`](https://github.com/pkgxdev/dev) +* [`pkgm`](https://github.com/pkgxdev/pkgm) +* [`mash`](https://github.com/pkgxdev/mash) +​ +## Appendix + +* [FAQ](faq.md) +* Deeper Dives + * [How `pkgx` Works: A Conceptual Overview](deeper-dives/conceptual-overview.md) + * [Using `pkgx` with a C/C++ Pipeline](deeper-dives/c++.md) +* Packaging + * [Contributing Packages](pkging/pantry.md) + * [API](pkging/pantry-api.md) + +## Linktree + +* [GitHub](https://github.com/pkgxdev/) +* [Discord](https://discord.gg/rNwNUY83XS) +* [𝕏](https://x.com/pkgxdev) diff --git a/docs/deeper-dives/c++.md b/docs/deeper-dives/c++.md new file mode 100644 index 00000000..912a0b3c --- /dev/null +++ b/docs/deeper-dives/c++.md @@ -0,0 +1,21 @@ +# `pkgx` as part of a C/C++ Pipeline + +We have most of the most popular c & c++ libraries pkg’d so just add them +to your developer environment. + +```yaml +# pkgx.yaml + +dependencies: + openssl.org: ^3 + github.com/gabime/spdlog: ^1 + llvm.org: ^14 + gnu.org/autoconf: ^2 + cmake.org: ^3 +``` + +Usually this is enough to have tools like Autoconf or CMake find the libraries +sometimes though you may need to provide a helping hand. Examine the devenv +with `pkgx` for path information. + +Then `dev` to activate the environment. diff --git a/docs/deeper-dives/conceptual-overview.md b/docs/deeper-dives/conceptual-overview.md new file mode 100644 index 00000000..67d482fb --- /dev/null +++ b/docs/deeper-dives/conceptual-overview.md @@ -0,0 +1,49 @@ +# How `pkgx` Works: A Conceptual Overview + +Everything `pkgx` does involves initially creating package environment. It then +either runs commands inside those environments or injects those environments +into your running shell. + +A command like: + +```sh +pkgx node start +``` + +Is in fact implicitly: + +```sh +pkgx +node -- node start +``` + +Which more precisely† is in fact: + +```sh +pkgx +nodejs.org -- node start +``` + +> † see [disambiguation](pkgx-cmd.md#disambiguation) + +The `+pkg` syntax creates the package environment that `node start` is then +run within. + +In fact you can see that env if you invoke `pkgx` raw: + +```sh +$ pkgx +node +SSL_CERT_FILE=~/.pkgx/curl.se/ca-certs/v2023.5.30/ssl/cert.pem +PATH=~/.pkgx/unicode.org/v71.1.0/bin:~/.pkgx/unicode.org/v71.1.0/sbin:~/.pkgx/openssl.org/v1.1.1u/bin:~/.pkgx/nodejs.org/v20.5.0/bin +MANPATH=~/.pkgx/unicode.org/v71.1.0/share/man:~/.pkgx/zlib.net/v1.2.13/share/man:~/.pkgx/nodejs.org/v20.5.0/share/man:/usr/share/man +PKG_CONFIG_PATH=~/.pkgx/unicode.org/v71.1.0/lib/pkgconfig:~/.pkgx/openssl.org/v1.1.1u/lib/pkgconfig:~/.pkgx/zlib.net/v1.2.13/lib/pkgconfig +LIBRARY_PATH=~/.pkgx/unicode.org/v71.1.0/lib:~/.pkgx/openssl.org/v1.1.1u/lib:~/.pkgx/zlib.net/v1.2.13/lib +LD_LIBRARY_PATH=~/.pkgx/unicode.org/v71.1.0/lib:~/.pkgx/openssl.org/v1.1.1u/lib:~/.pkgx/zlib.net/v1.2.13/lib +CPATH=~/.pkgx/unicode.org/v71.1.0/include:~/.pkgx/openssl.org/v1.1.1u/include:~/.pkgx/zlib.net/v1.2.13/include:~/.pkgx/nodejs.org/v20.5.0/include +XDG_DATA_DIRS=~/.pkgx/unicode.org/v71.1.0/share:~/.pkgx/zlib.net/v1.2.13/share:~/.pkgx/nodejs.org/v20.5.0/share +DYLD_FALLBACK_LIBRARY_PATH=~/.pkgx/unicode.org/v71.1.0/lib:~/.pkgx/openssl.org/v1.1.1u/lib:~/.pkgx/zlib.net/v1.2.13/lib +``` + +This is a composable primitive, you could imagine `pkgx npm start` to be: + +```sh +env "$(pkgx +npmjs.org)" npm start +``` diff --git a/docs/faq.md b/docs/faq.md new file mode 100644 index 00000000..570f7c50 --- /dev/null +++ b/docs/faq.md @@ -0,0 +1,261 @@ +# FAQ + +## How do I run the latest version of `pkgx`? + +Typically you want to upgrade `pkgx` so either: + +1. `brew upgrade pkgx`; or +2. `curl -LSsf pkgx.sh | sh` + +> [!NOTE] +> Yes. Our installer upgrades `pkgx` too. + + +## How do I run the latest version of a specific pkg? + +Unless otherwise instructed, `pkgx` executes the latest version of packages +that *are downloaded*. The first time you run a package the latest version +will be downloaded, but after that updates will only be fetched if requested +or required by other packages. + +For us neophiliacs we have written a [`mash`] script to upgrade your `pkgx` +packages: + +```sh +pkgx mash pkgx/cache upgrade +``` + + +## How do I “install” pkgs? + +Use [`pkgm`](https://github.com/pkgxdev/pkgm). + + +## What is a package? + +A package is: + +* A plain tarball containing a single project for a single platform and + architecture compiled from that project’s sources +* A bundle of metadata (`package.yml`) from the [pantry] + +Relative to some other packaging systems: + +* No scripts are executed post “install” +* Packages must work from any location (we say our pkgs are “relocatable“) + + +## A package version I need is unavailable + +Sorry about that. Open a [ticket] asking for it and we’ll build it. + +[ticket]: https://github.com/pkgxdev/pantry/issues/new + + +## I need a pkg greater than v20.1.3 but less than v21 + +The commonly used `@` syntax would evaluate to v20.1.x (`@20.1.3`). + +To provide more control we support the +[full semantic version range syntax](https://devhints.io/semver). So for the +given example we would use the caret (`^`): + +```sh +$ pkgx node^20.1.3 --version +v20.1.5 +``` + +Which will match node v20.1.3 up to but not including v21. + + +## What does `+pkg` syntax do? + +`+pkg` syntax is a way to include additional pkgs in your environment. +Typing `pkgx +deno` dumps the environment to the terminal, if you add +additional commands then those commands are invoked in that environment. + + +## How do I list what packages are downloaded? + +We have created a [`mash`] script to list everything `pkgx` has downloaded: + +```sh +pkgx mash pkgx/cache ls +``` + +All packages are encapsulated in individual, versioned folders in +`~/.pkgx` just like `brew` so you can just browse them with a file browser. + + +## A pkg I was expecting is not available + +Open source is ever moving and somebody needs to keep up with it. +You may need to contribute to the [pantry](pantry.md). + + +## Where do you put pkgs? + +Everything goes in `~/.pkgx`. eg. Deno v1.2.3 is an independent POSIX +prefix at `~/.pkgx/deno.land/v1.2.3`, thus the `deno` executable is at +`~/.pkgx/deno.land/v1.2.3/bin/deno`. + +We also create symlinks for majors, minors and latest: + +```sh +$ cd ~/.pkgx/deno.land +$ ls -la +v* -> v1.2.3 +v1 -> v1.2.3 +v1.2 -> v1.2.3 +``` + +Open source is vast and unregulated, thus we use fully-qualified naming scheme +to ensure pkgs can be disambiguated. + + +## Can I bundle `~/.pkgx` into my distributable app? + +Yes! Our pkgs are relocatable. + + +## Will you support other platforms? + +We would love to support all platforms. All that is holding is back from new +platforms is expertise. Will you help? [Let’s talk]. + +[Let’s talk]: https://github.com/pkgxdev/pkgx/issues/607 + + +## How do I add my package to pkgx? + +You need to add to the [pantry](pantry.md). + +{% hint style="info" %} +Eventually we will support describing how to build or obtain distributables +for your package via your repo so you can just add a `pkgx.yaml` and users +can use pkgx to use your package automatically. +{% endhint %} + + +## How should I recommend people use my pkg with pkgx? + +```sh +pkgx your-package --args +``` + +You can also recommend our shell one-liner if you like: + +```sh +sh <(curl https://pkgx.sh) your-package --args +``` + +This is neat because `pkgx` is *not installed* and it runs your package from a +temporary location making this a very low friction way to try out your +package. + +Finally, you can have them try your package out via our Docker image: + +```sh +docker run pkgxdev/pkgx your-package --args +``` + +## How do I uninstall `pkgx`? + +```sh +sudo rm /usr/local/bin/pkg[xm] +rm -rf ~/.pkgx +``` + +Then there are a couple platform specific cache/data directories: + +### macOS + +```sh +rm -rf ~/Library/Caches/pkgx +rm -rf ~/Application\ Support/pkgx +``` + +### Non macOS + +```sh +rm -rf "${XDG_CACHE_HOME:-$HOME/.cache}/pkgx" +rm -rf "${XDG_DATA_HOME:-$HOME/.local/share}"/pkgx +``` + +{% hint style="warning" %} + +### Caveats + +Though not a problem unique to `pkgx` you should note that tools run +with `pkgx` may have polluted your system during use. Check directories like: + +* `~/.local` +* `~/.gem` +* `~/.npm` +* `~/.node` +* etc. + +{% endhint %} + + +## What are the rules for `@` syntax? + +The rules for `@` are complex, but more human. We convert them to the +following [semver] syntax: + +* `@3` → `^3` +* `@3.1` → `~3.1` +* `@3.1.2` → `>=3.1.2<3.1.3` +* `@3.1.2.3` → `>=3.1.2.3<3.1.3.4` +* etc. + +[semver]: https://devhints.io/semver + + +## Where does `pkgx` store files + +* pkgs are downloaded to `~/.pkgx` (`$PKGX_DIR` overrides) +* runtime data like the [pantry] is stored in: + * `~/Library/Caches/pkgx` on Mac + * `${XDG_CACHE_HOME:-$HOME/.cache}/pkgx` on *nix + * `%LOCALAPPDATA%/pkgx` on Windows + + +## What happens if two packages provide the same named program? + +We error with a method to disambiguation, eg: + +```sh +$ yarn +× multiple projects provide: yarn +│ pls be more specific: +│ +│ pkgx +classic.yarnpkg.com --internal.use +yarn +│ pkgx +yarnpkg.com --internal.use +yarn +│ +╰─➤ https://docs.pkgx.sh/help/ambiguous-pkgspec +``` + + +## How do I see a man page for a pkgx pkg? + +`man foo` won’t work since pkgx pkgs are not “installed”. Thus you have to +first create an environment that contains that package before invoking `man`: + +```sh +pkgx +foo man foo +``` + +This uses pkgx’s `man` tool. To use the system `man`: + +```sh +pkgx +foo -- man foo +``` + + +## I have another question + +[Support](support.md) + + +[`mash`](https://mash/pkgx.sh) diff --git a/docs/installing-pkgx.md b/docs/installing-pkgx.md new file mode 100644 index 00000000..19141979 --- /dev/null +++ b/docs/installing-pkgx.md @@ -0,0 +1,107 @@ +There are quite a few ways to install `pkgx` but this is our recommendation: + +```sh +brew install pkgx || curl https://pkgx.sh | sh +``` + +# Complete Installation Method Listing + +## Homebrew + +```sh +brew install pkgx +``` + +## cURL Installer + +Our installer both installs and upgrades `pkgx`: + +```sh +curl -fsS https://pkgx.sh | sh +``` + +{% hint style='info' %} +Wanna read that script before you run it? +[github.com/pkgxdev/setup/installer.sh][installer] +{% endhint %} + +## Download Manually + +`pkgx` is a standalone binary, so you can just download it directly: + +```sh +# download it to `./pkgx` +curl -o ./pkgx \ + --compressed --fail --proto '=https' \ + https://pkgx.sh/$(uname)/$(uname -m) + +# install it to `/usr/local/bin/pkgx` +sudo install -m 755 pkgx /usr/local/bin +``` + +For your convenience we provide a `.tgz` so you can one-liner that: + +```sh +curl -Ssf https://pkgx.sh/$(uname)/$(uname -m).tgz | sudo tar xz -C /usr/local/bin +``` + +You can also download straight from [GitHub Releases] (you’ll likely need +to unquarantine the downloaded binary). + +## Cargo + +```sh +cargo install pkgx +``` + +## Docker + +```sh +docker run -it pkgxdev/pkgx + +# or, eg. +docker run pkgxdev/pkgx +python@3.10 node@22 start +``` + +Or in your `Dockerfile`: + +```Dockerfile +FROM pkgxdev/pkgx +RUN pkgx +node@16 npm start +``` + +{% hint style='info' %} +[hub.docker.com/r/pkgxdev/pkgx](https://hub.docker.com/r/pkgxdev/pkgx) +{% endhint %} + +## GitHub Actions + +```yaml +- uses: pkgxdev/setup@v2 +``` + +{% hint style='info' %} +[github.com/pkgxdev/setup](https://github.com/pkgxdev/setup) +{% endhint %} + +{% hint style='success' %} +`pkgx` makes it easy to consistently use the GNU or BSD versions of core +utilities across different platforms—handy for cross-platform CI/CD +scripts. eg. `pkgx +gnu.org/coreutils ls` +{% endhint %} + +## Arch Linux + +If you're on Arch Linux (or any of it's derivatives) you can also use the +[`pkgx` AUR] (latest released version) or [`pkgx-git` AUR] (latest +development version, might not be stable). + +{% hint style='warning' %} +The AURs are community-maintained and might be out-of-date. Use them with +caution. +{% endhint %} + +[GitHub Releases]: https://github.com/pkgxdev/pkgx/releases +[installer]: https://github.com/pkgxdev/setup/blob/main/installer.sh +[`pkgx` AUR]: https://aur.archlinux.org/packages/pkgx +[`pkgx-git` AUR]: https://aur.archlinux.org/packages/pkgx-git diff --git a/docs/pkging/pantry-api.md b/docs/pkging/pantry-api.md new file mode 100644 index 00000000..147e2373 --- /dev/null +++ b/docs/pkging/pantry-api.md @@ -0,0 +1,37 @@ +# The `pkgx` API + +Visit [dist.pkgx.dev](https://dist.pkgx.dev) for an HTTP index. + +* sources (mirror) + * `dist.pkgx.dev//versions.txt` + * `dist.pkgx.dev//v.tar.gz` + * `dist.pkgx.dev//v.sha256sum` +* bottles + * `dist.pkgx.dev////versions.txt` + * `dist.pkgx.dev////v.tar.gz` + * `dist.pkgx.dev////v.tar.xz` + * `dist.pkgx.dev////v.asc` + * `dist.pkgx.dev////v.sha256sum` + +`versions.txt` files are newline separated, sorted lists of available versions +for each type of distributable. + +{% hint style="warning" %} +`dist.pkgx.dev//versions.txt` and the bottle `versions.txt` may not be +the same. Always check the more specific `versions.txt`. +{% endhint %} + + +## The Pantry + +The [pantry] is our API for pkg metadata. + + +## libpkgx + +Install and run `pkgx` packages from your apps. + +* [Rust](https://github.com/pkgxdev/pkgx) +* [TypeScript](https://github.com/pkgxdev/libpkgx) + +[pantry]: https://github.com/pkgxdev/pantry diff --git a/docs/pkging/pantry.md b/docs/pkging/pantry.md new file mode 100644 index 00000000..716db868 --- /dev/null +++ b/docs/pkging/pantry.md @@ -0,0 +1,36 @@ +# Packaging + +There’s millions of open source projects and `pkgx` needs your help to package +them up! + +{% hint style="success" %} +Visit [github.com/pkgxdev/pantry] for the full documentation. +{% endhint %} + +{% hint style="info" %} +Curious about a specific pkg? `pkgx bk edit deno` will open deno’s +`package.yml` in your editor. +{% endhint %} + + +# Packagers Who Care + +You trust us to just work and make your workflows happen. +We take this job seriously and we go the extra mile on a per-package basis, +for example: + +* Our `git` ignores `.DS_Store` files by default +* Our RubyGems defaults to user-installs and ensures gems are in `PATH` +* Our `python` comes unversioned so the huge numbers of scripts that invoke + `/usr/bin/env python` actually work +* Our `pyenv` automatically installs the python versions it needs + +Additionally, we insist our pkgs are relocatable, which is why we can install +in your home directory (but this also means you could pick up the whole +`~/.pkgx` directory and bundle it with your app.) We also begin packaging +new releases almost immediately as soon as they go live using various +automations. + +We care about your developer experience, *not ours*. + +[github.com/pkgxdev/pantry]: https://github.com/pkgxdev/pantry diff --git a/docs/pkgx.md b/docs/pkgx.md new file mode 100644 index 00000000..934675db --- /dev/null +++ b/docs/pkgx.md @@ -0,0 +1,191 @@ +# Using `pkgx` + +With `pkgx` it couldn’t be simpler to run anything from the Open Source +ecosystem: + +```sh +$ pkgx openai --version +openai 1.59.6 +``` + +## Run Any Version + +```sh +$ pkgx postgres@12 --version +postgres (PostgreSQL) 12.14 +``` + +{% hint style="info" %} + +### SemVer + +Generally you probably want `@` syntax, but if you need more specificity we +fully support [SemVer]: + +```sh +$ pkgx postgres^12 --version +postgres (PostgreSQL) 12.14 + +$ pkgx "postgres>=12<14" --version +postgres (PostgreSQL) 13.11 + +$ pkgx deno=1.35.3 --version +deno 1.35.3 +``` + +{% endhint %} + +### Running the Latest Version + +`pkgx foo` runs the latest “foo” that **is installed**. + +If you want to ensure the latest version of “foo” is installed, use +`pkgx mash pkgx/upgrade foo`. + + +## Adding Additional Packages to the Execution Environment + +It can be useful to run a program with additional packages in the environment. + +```sh +pkgx +openssl cargo build +``` + +Here `+pkg` syntax added OpenSSL to Cargo’s environment. Thus the build will +see the OpenSSL headers and libraries. + + +## Disambiguation + +In some cases `pkgx foo` may be ambiguous because multiple packages provide +`foo`. + +In such cases `pkgx` will error and ask you be more specific by using +fully-qualified-names: + +```sh +$ pkgx yarn --version +error: multiple projects provide `yarn`. please be more specific: + + pkgx +classic.yarnpkg.com yarn --version + pkgx +yarnpkg.com yarn --version +``` + +In general it's a good idea to specify fully qualified names in +scripts, etc. since you want these to work forever. + + +## Running System Commands + +It can be useful to run system commands with a package environment injected. +To do this either specify the full path of the system executable: + +```sh +pkgx +llvm.org /usr/bin/make +``` + +Or separate your commands with `--`: + +```sh +pkgx +llvm.org -- make # finds `make` in PATH, failing if none found +``` + +{% hint style="warning" %} + +If you only specified `make` rather than `/usr/bin/make` or separating with +`-- make` then `pkgx` would install GNU make for you and use that. + +{% endhint %} + + +## Dumping the Environment + +If you don’t specify anything to run, `pkgx` will install any `+pkg`s and then +dump the environment: + +```sh +$ pkgx +gum +PATH="$HOME/.pkgx/charm.sh/gum/v0.14.5/bin${PATH:+:$PATH}" +``` + +This can be useful in scripts or for adding tools to your shell: + +```sh +$ eval "$(pkgx +gum)" +$ gum --version +gum version 0.14.5 +``` + +For this mode we can also output JSON: `pkgx +gum --json`. + + +## Quietening Output + +```sh +$ pkgx --quiet gum format 'download progress is still shown' +# ^^ supresses resolving/syncing etc. messages but not download progress info + +```sh +pkgx --silent gum format 'no output at all' +# ^^ silences everything, even errors +``` + +Note that this only effects `pkgx` *not the tools you run with `pkgx`*. + +## Other Common Needs + +`pkgx` is not a package manager. Thus the command itself doesn’t typically +offer such operations you may expect, however the way `pkgx` works is simple +and standardized so we offer some `mash` scripts to help. + +Longer term we will make a tool `pkgq` to help with these operations. + +### Upgrading Packages + +`pkgx foo` executes the latest version of `foo` that is *downloaded*. To +ensure you have (any) newer versions installed use this command: + +```sh +$ pkgx mash pkgx/cache upgrade +updating: /Users/mxcl/.pkgx/python.org/v3.11.11 +# snip… +``` + +### Pruning Older Versions of Packages + +The `pkgx` download cache can get large over time. To prune older versions: + +```sh +$ pkgx mash pkgx/cache prune +pruning: ~/.pkgx/deno.land/v1.39.4 +pruning: ~/.pkgx/deno.land/v1.46.3 +# snip… +``` + +This may delete versions that you use—if so—this is fine. `pkgx` will just +reinstall them next time you need them. + +### Listing Available Versions for a Package + +ie. what versions *could be* run by `pkgx`: + +```sh +$ pkgx mash pkgx/pantry-inventory git +2.38.1 +2.39.0 +# snip… +``` + +### Listing What is Downloaded + +```sh +$ mash pkgx/cache ls + + Parent Directory │Version + ────────────────────────────────┼────────── + perl.org │5.40.0 + x.org/xcb │1.17.0 + # snip… +``` + +[SemVer]: https://devhints.io/semver diff --git a/docs/scripting.md b/docs/scripting.md new file mode 100644 index 00000000..babf11d6 --- /dev/null +++ b/docs/scripting.md @@ -0,0 +1,125 @@ +# `pkgx` & Scripting + +You can use `pkgx` as the [shebang] for your scripts: + +```python +#!/usr/bin/env -S pkgx python@3.9 + +import sys + +print(sys.version) +``` + +```sh +$ chmod +x ./my-script.py +$ ./my-script.py +3.9.17 +``` + +{% hint style="info" %} +Using `env` to invoke `pkgx` is typical for tools that have no POSIX location. + +The `-S` parameter is required to pass multiple arguments. +{% endhint %} + + +## Including Additional pkgs + +Scripts are the glue that allows open source to be composed into powerful new +tools. With our `+pkg` syntax you make anything in open source available to +your script. + +```sh +#!/usr/bin/env -S pkgx +openssl deno run + +Deno.dlopen("libssl.dylib") +``` + +{% hint style="info" %} +Robustness requires precisely specifying your environment: + +```sh +#!/usr/bin/env -S pkgx bash>=4 + +source <(pkgx dev --shellcode) +# ^^ bash >=4 is required for this syntax, and eg macOS only comes with bash 3 +``` + +{% endhint %} + + +## Scripting for Various Languages & Their Dependencies + +### Python + +Use `uv` to import PyPi dependencies: + +```python +#!/usr/bin/env -S pkgx +python@3.11 uv run --script + +# /// script +# dependencies = [ +# "requests<3", +# "rich", +# ] +# /// + +import requests +from rich.pretty import pprint + +resp = requests.get("https://peps.python.org/api/peps.json") +data = resp.json() +pprint([(k, v["title"]) for k, v in data.items()][:10]) +``` + +### Ruby + +Use [Bundler](https://bundler.io): + +```ruby +#!/usr/bin/env -S pkgx ruby@3 + +require 'bundler/inline' + +gemfile do + source 'https://rubygems.org' + gem 'ruby-macho', '~> 3' +end +``` + +### JavaScript & TypeScript + +Use [Deno](https://deno.land): + +```javascript +#!/usr/bin/env -S pkgx deno@2 run + +import fs from "npm:fs"; +``` + +### Rust, Go, C, C++, etc + +Use [Scriptisto]: + +```sh +#!/usr/bin/env -S pkgx +cargo scriptisto + +# snip… type `pkgx scriptisto new cargo` for the rest. +``` + + +## Mash + +We think `pkgx` scripting is so powerful that we made a whole package +manager to show it off. + +> [https://github.com/pkgxdev/mash](https://github.com/pkgxdev/mash) + + +## Other Examples + +We make use of `pkgx` scripting all over our repositories. Check them out! + + +[shebang]: https://en.wikipedia.org/wiki/Shebang_(Unix) +[Scriptisto]: https://github.com/igor-petruk/scriptisto diff --git a/import-map.json b/import-map.json deleted file mode 100644 index a378e846..00000000 --- a/import-map.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "imports": { - "path": "./src/vendor/Path.ts", - "types": "./src/types.ts", - "hooks": "./src/hooks/index.ts", - "hooks/": "./src/hooks/", - "prefab": "./src/prefab/index.ts", - "prefab/": "./src/prefab/", - "deno/": "https://deno.land/std@0.156.0/", - "semver": "./src/utils/semver.ts", - "utils": "./src/utils/index.ts", - "utils/": "./src/utils/", - "is_what": "https://deno.land/x/is_what@v4.1.7/src/index.ts", - "cliffy/": "https://deno.land/x/cliffy@v0.25.2/", - "s3": "https://deno.land/x/s3@0.5.0/mod.ts", - "outdent": "https://deno.land/x/outdent@v0.8.0/mod.ts", - "rimbu/": "https://deno.land/x/rimbu@0.12.3/", - "retried": "https://deno.land/x/retried@1.0.1/mod.ts" - } -} \ No newline at end of file diff --git a/scripts/find-orphaned-docker-hub-images.py b/scripts/find-orphaned-docker-hub-images.py new file mode 100755 index 00000000..ee997c71 --- /dev/null +++ b/scripts/find-orphaned-docker-hub-images.py @@ -0,0 +1,58 @@ +#!/usr/bin/env -S pkgx +python@3.11 uv run --script +# /// script +# dependencies = [ +# "requests" +# ] +# /// + +import requests +from collections import defaultdict +from pprint import pprint + +def get_all_tags_and_digests(image_name): + """ + Fetches all tags and their digests for a Docker Hub image, handling pagination. + """ + base_url = f"https://registry.hub.docker.com/v2/repositories/{image_name}/tags" + tags_and_digests = [] + url = base_url + + while url: + response = requests.get(url) + if response.status_code != 200: + print(f"Failed to fetch data: {response.status_code}, {response.text}") + return [] + + data = response.json() + for result in data['results']: + tag = result['name'] + digest = result['digest'] + if digest: + tags_and_digests.append((tag, digest)) + + url = data.get('next') # Get the next page URL + + return tags_and_digests + +def find_orphaned_tags(tags_and_digests): + """Identifies tags with unique digests.""" + digest_count = defaultdict(int) + for _, digest in tags_and_digests: + digest_count[digest] += 1 + + orphaned_tags = [tag for tag, digest in tags_and_digests if digest_count[digest] == 1] + return orphaned_tags + +if __name__ == "__main__": + image_name = "pkgxdev/pkgx" + tags_and_digests = get_all_tags_and_digests(image_name) + if tags_and_digests: + orphaned_tags = find_orphaned_tags(tags_and_digests) + if len(orphaned_tags) > 0: + print("orphans:") + for tag in orphaned_tags: + print(tag) + else: + print("no orphans, here’s the tags instead:") + tags = [tag for tag, _ in tags_and_digests if tag.startswith('v')] + pprint(sorted(tags)) diff --git a/scripts/install.ts b/scripts/install.ts deleted file mode 100755 index 71ce92e6..00000000 --- a/scripts/install.ts +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env -S tea -E - -// returns all pantry entries as `[{ name, path }]` - -/*--- -args: - - deno - - run - - --allow-env - - --allow-read - - --allow-write - - --import-map={{ srcroot }}/import-map.json - - --allow-net - - --allow-run - - --unstable ----*/ - -import { link, install, resolve } from "prefab" -import { useFlags } from "hooks" -import { pkg } from "utils" - -useFlags() - -const force = !!Deno.env.get("FORCE") - -const rqs = Deno.args.map(project => { - const match = project.match(/projects\/(.+)\/package.yml/) - return match ? match[1] : project -}).map(pkg.parse) - -const { pending, installed } = await resolve(rqs) - -if (!force && installed.length) { - console.info({'already-installed': installed}) -} - -const pkgs = force ? [...installed.map(x=>x.pkg), ...pending] : pending - -// resolve and install precise versions that are available in available inventories -for (const pkg of pkgs) { - const installation = await install(pkg) - await link(installation) -} diff --git a/scripts/publish-release.sh b/scripts/publish-release.sh new file mode 100755 index 00000000..f0b1a97f --- /dev/null +++ b/scripts/publish-release.sh @@ -0,0 +1,66 @@ +#!/usr/bin/env -S pkgx +git +gh +jq +gum bash -eo pipefail + +if ! git diff-index --quiet HEAD --; then + echo "error: dirty working tree" >&2 + exit 1 +fi + +if [ "$(git rev-parse --abbrev-ref HEAD)" != main ]; then + echo "error: requires main branch" >&2 + exit 1 +fi + +v_new=$(cargo metadata --format-version 1 --no-deps | jq -r '.packages[] | select(.name == "pkgx") | .version') +v_latest=$(gh release view --json tagName --jq .tagName) + +case "$((gh release view v$v_new --json isDraft | jq .isDraft) 2>&1)" in +release\ not\ found) + gum confirm "prepare draft release for $v_new?" || exit 1 + + gh release create \ + v$v_new \ + --draft=true \ + --generate-notes \ + --notes-start-tag=$v_latest \ + --title=v$v_new + + ;; +true) + gum format "> existing $v_new draft found, using that" + echo #spacer + ;; +false) + gum format "$v_new already published! edit \`./crates/cli/Cargo.toml\`" + exit 1;; +*) + echo 'unexpected exit result' >&2 + exit 2;; +esac + +git push origin main + +gh workflow run cd.yml --raw-field version="$v_new" +# ^^ infuriatingly does not tell us the ID of the run + +gum spin --title 'sleeping 5s because GitHub API is slow' -- sleep 5 + +run_id=$(gh run list --json databaseId --workflow=cd.yml | jq '.[0].databaseId') + +if ! gh run watch --exit-status $run_id; then + foo=$? + gum format -- "> gh run view --web $run_id" + exit $foo +fi + +gh release view v$v_new + +gum confirm "draft prepared, release $v_new?" || exit 1 + +gh release edit \ + v$v_new \ + --verify-tag \ + --latest \ + --draft=false \ + --discussion-category=Announcements + +gh release view v$v_new --web diff --git a/scripts/repair.ts b/scripts/repair.ts deleted file mode 100755 index 5b17d736..00000000 --- a/scripts/repair.ts +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env -S tea -E - -/* ---- -args: - - deno - - run - - --allow-net - - --allow-env - - --allow-read={{ tea.prefix }} - - --allow-write={{ tea.prefix }} - - --allow-run # uses `/bin/ln` - - --import-map={{ srcroot }}/import-map.json ---- -*/ - -import { useCellar, useFlags } from "hooks" -import { Installation } from "types" -import { link } from "prefab" -import * as semver from "semver" - -if (import.meta.main) { - useFlags() - - for (const project of Deno.args) { - await repairLinks(project) - } -} - -export default async function repairLinks(project: string) { - const cellar = useCellar() - const installed = await cellar.ls(project) - const shelf = cellar.shelf(project) - - for await (const [path, {isSymlink}] of shelf.ls()) { - //FIXME shouldn't delete things we may not have created - if (isSymlink) path.rm() - } - - const majors: {[key: number]: Installation[]} = {} - const minors: {[key: number]: Installation[]} = {} - - for (const installation of installed) { - const {pkg: {version: v}} = installation - majors[v.major] ??= [] - majors[v.major].push(installation) - minors[v.minor] ??= [] - minors[v.minor].push(installation) - } - - for (const arr of [minors, majors]) { - for (const installations of Object.values(arr)) { - const version = installations - .map(({pkg: {version}}) => version) - .sort(semver.compare) - .slice(-1)[0] // safe bang since we have no empty arrays in above logic - - link({project, version}) //TODO link lvl2 is possible here - } - } -} diff --git a/scripts/uninstall.ts b/scripts/uninstall.ts deleted file mode 100755 index a435be27..00000000 --- a/scripts/uninstall.ts +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env -S tea -E - -/* ---- -args: - - deno - - run - - --allow-net - - --allow-read - - --allow-write={{ tea.prefix }} - - --import-map={{ srcroot }}/import-map.json ---- -*/ - -import { pkg } from "utils" -import { useCellar } from "hooks" -import repair from "./repair.ts" - -const pkgs = Deno.args.map(pkg.parse); console.verbose({ received: pkgs }) -const { resolve } = useCellar() - -for (const pkg of pkgs) { - console.info({ uninstalling: pkg }) - const installation = await resolve(pkg) - installation.path.rm({ recursive: true }) - await repair(pkg.project) //FIXME this is overkill, be precise -} diff --git a/src/app.X.ts b/src/app.X.ts deleted file mode 100644 index cc171a0e..00000000 --- a/src/app.X.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Args } from "hooks/useFlags.ts" -import { useCellar } from "hooks" -import { handler, prepare_exec_cmd, which } from "./app.exec.ts" -import { panic, run, TeaError, UsageError } from "utils" - -export default async function X(opts: Args) { - const arg0 = opts.args[0] - if (!arg0) throw new UsageError() - - const found = await which(arg0) - - if (!found) { - throw new TeaError('not-found: tea -X: arg0', {arg0}) - } - - opts.mode = 'exec' - opts.pkgs.push({ ...found }) - - const { env, pkgs } = await prepare_exec_cmd(opts.pkgs, {env: opts.env ?? false}) - const pkg = pkgs.find(x => x.project == found!.project) ?? panic() - const install = await useCellar().resolve(pkg) - const cmd = opts.args - cmd[0] = install.path.join('bin', arg0).string // force full path to avoid infinite recursion - try { - await run({ cmd, env }) - } catch (err) { - handler(err) - } -} diff --git a/src/app.dump.ts b/src/app.dump.ts deleted file mode 100644 index 1dd4d8f9..00000000 --- a/src/app.dump.ts +++ /dev/null @@ -1,212 +0,0 @@ -import { usePantry, useCellar, useFlags, useVirtualEnv } from "hooks" -import useShellEnv, { EnvKeys } from "hooks/useShellEnv.ts" -import { Installation, PackageRequirement, PackageSpecification } from "types" -import { flatmap, print, undent, pkg } from "utils" -import { isPlainObject, isFullArray } from "is_what" -import { basename } from "deno/path/mod.ts" -import { Args } from "hooks/useFlags.ts" -import { hydrate } from "prefab" - -//TODO should read from the shell configuration files to get originals properly -//TODO don’t wait on each print, instead chain the promises to be more time-efficient - -export default async function dump(args: Args) { - const { magic } = useFlags() - const blueprint = await (() => { - if (args.env || args.env === undefined && magic) { - // for dump mode it is not an error for there to be no virtual-env - return useVirtualEnv().swallow("not-found:srcroot") - } - })() - - const shell = flatmap(Deno.env.get("SHELL"), basename) - const [setEnv, unsetEnv]= (() => { - switch (shell) { - case "fish": - return [ - (name: string, val: string) => `set -gx ${name} '${val}';`, - (name: string) => `set -e ${name};` - ] - default: - return [ - (name: string, val: string) => `export ${name}='${val}'`, - (name: string) => `unset ${name}` - ] - }})() - - // represents the dehydrated initial env - //FIXME storing in the env is kinda gross - const defaults = (() => { - const json = flatmap(Deno.env.get("TEA_REWIND"), x => JSON.parse(x), {rescue: true}) - if (isPlainObject(json)) { - for (const [key, value] of Object.entries(json)) { - if (!isFullArray(value)) { - delete json[key] - } - } - return json as Record - } else { - return EnvKeys.reduce((obj, key) => { - const value = Deno.env.get(key) - if (value) { - obj[key] = value.split(":") - } else { - delete obj[key] - } - return obj - }, {} as Record) - } - })() - - const {installations, pending} = await (async () => { - const companions: PackageRequirement[] = await (() => { - if (!magic) return [] - const pantry = usePantry() - return Promise.all( - args.pkgs.map(pkg => pantry.getCompanions(pkg)) - ).then(x => x.flatMap(x => x)) - })() - - const cellar = useCellar() - const installations: Installation[] = [] - const pending: PackageSpecification[] = [] - const pkgs = await hydrate([...args.pkgs, ...companions, ...blueprint?.pkgs ?? []]) - const dry = new Set(pkgs.dry.map(x => x.project)) - - for (const rq of pkgs.pkgs) { - const installation = await cellar.has(rq) - if (installation) { - installations.push(installation) - } else if (dry.has(rq.project)) { - /// we only auto-install explicitly requested packages - pending.push(rq) - } - } - - return {installations, pending} - })() - - if (!blueprint && installations.length == 0 && pending.length == 0) { - - // k we are in a non dev dir, unset everything - // well, only if TEA_REWIND is set since otherwise we already did that - - if (Deno.env.get("TEA_REWIND")) { - if (Deno.env.get("TEA_REWIND")!.includes("TEA_PREFIX")) { - // we do this manually because we also set it for stuff executed through tea - // which means when developing tea this otherwise doesn’t get unset lol - await print(unsetEnv("TEA_PREFIX")) - } - - await print(unsetEnv("TEA_REWIND")) - - for (const key of [...EnvKeys, 'SRCROOT', 'VERSION']) { - if (defaults[key]) { - if (neq(defaults[key], Deno.env.get(key)?.split(":"))) { - await print(setEnv(key, defaults[key].join(":"))) - } - } else if (Deno.env.get(key) !== undefined) { - await print(unsetEnv(key)) - } - } - } - - return - } - - if (blueprint?.srcroot) { - await print(setEnv("SRCROOT", blueprint.srcroot.string)) - } else if (Deno.env.get("SRCROOT")) { - await print(unsetEnv("SRCROOT")) - } - if (blueprint?.version) { - await print(setEnv("VERSION", blueprint.version.toString())) - } else if (Deno.env.get("VERSION")) { - await print(unsetEnv("VERSION")) - } - - const env = await useShellEnv({installations, pending, pristine: true}) - - //TODO if PATH is the same as the current PATH maybe don't do it - // though that makes the behavior of --env --dump very specific - - for (const key of EnvKeys) { - const value = env[key] - if (value) { - if (key == 'PATH' && defaults[key]?.length) { - value.push(...defaults[key]) - } - if (value.length) { - await print(setEnv(key, value.join(":"))) - } - } else if (Deno.env.get(key) !== undefined) { - if (!defaults[key]?.chuzzle()) { - await print(unsetEnv(key)) - } else { - await print(setEnv(key, defaults[key].join(":"))) - } - } - } - - // TODO: implement command-not-found for csh/sh/etc - if (pending.length) { - const pantry = usePantry() - let rv = undefined - switch (shell) { - case 'zsh': - rv = undent` - command_not_found_handler() { - case $0 in - - ` - for (const uninstalled of pending) { - const exes = await pantry.getProvides(uninstalled) - if (exes.length) { - const cmds = exes.join("|") - rv += ` ${cmds}) tea '+${pkg.str(uninstalled)}' "$@";;\n` - } - } - rv += ` *)\n printf 'zsh: command not found: %s\\n' "$1";;\n esac\n}` - break - case 'fish': - rv = undent` - function fish_command_not_found - switch "$argv[1]"\n\n`; - for (const uninstalled of pending) { - const exes = await pantry.getProvides(uninstalled) - if (exes.length) { - const cmds = exes.join(" ") - rv += ` case ${cmds}; tea '+${pkg.str(uninstalled)}' $argv\n` - } - } - rv += " end\nend" - } - - if (rv) await print(rv) - } else { - //TODO unless there's a default! - switch (shell) { - case 'zsh': - await print("if typeset -f command_not_found_handler >/dev/null; then unset -f command_not_found_handler; fi") - break - case 'bash': - await print("if typeset -f command_not_found_handle >/dev/null; then unset -f command_not_found_handle; fi") - break - case 'fish': - await print("if functions --query fish_command_not_found; functions --erase fish_command_not_found; end") - break - } - } - - await print(setEnv("TEA_REWIND", JSON.stringify(defaults))) -} - -function neq(a: string[] | undefined, b: string[] | undefined) { - if (!a && !b) return false - if (!a || !b) return true - if (a.length != b.length) return true - for (let i = 0; i < a.length; i++) { - if (a[i] != b[i]) return true - } - return false -} diff --git a/src/app.err-handler.ts b/src/app.err-handler.ts deleted file mode 100644 index 73d0ffc3..00000000 --- a/src/app.err-handler.ts +++ /dev/null @@ -1,83 +0,0 @@ -import * as logger from "./hooks/useLogger.ts" -import { usePantry, useFlags, usePrefix } from "hooks" -import { TeaError, undent, UsageError } from "utils" -import help from "./app.help.ts" -import Path from "path" - -async function suggestions(err: TeaError) { - switch (err.id) { - case 'not-found: pantry: package.yml': { - const suggestion = await usePantry().getClosestPackageSuggestion(err.ctx.project) - return suggestion - ? `did you mean \`${logger.teal(suggestion)}\`? otherwise… see you on GitHub?` - : undefined - }} -} - -export default async function(err: Error) { - const { silent, debug } = useFlags() - - if (silent) { - Deno.exit(1) - } else if (err instanceof UsageError) { - await help() - Deno.exit(1) - } else if (err instanceof TeaError) { - const suggestion = await suggestions(err).swallow() - console.error(`${logger.red('error')}: ${err.title()} (${logger.gray(err.code())})`) - if (suggestion) { - console.error() - console.error(suggestion) - console.error() - } - console.error(msg(err)) - if (debug) console.error(err.ctx) - } else { - const { stack, message } = err ?? {} - - const title = encodeURIComponent(`panic:${message ?? "null"}`).replaceAll('%20', '+') - const url = `https://github.com/teaxyz/cli/issues/new?title=${title}` - - console.error() - console.error(`${logger.red("panic")}:`, "split tea. we’re sorry and we’ll fix it… but you have to report the bug!") - console.error() - console.error(" ", logger.gray(url)) - console.error() - console.error("----------------------------------------------------->> attachment begin") - console.error(logger.gray(stack ?? "null")) - console.debug("------------------------------------------------------------------------") - console.debug({ err }) - console.error("<<------------------------------------------------------- attachment end") - - // this way: deno will show the backtrace - if (err instanceof Error == false) throw err - } -} - -/// this is here because error.ts cannot import higher level modules -/// like hooks without creating a cyclic dependency -function msg(err: TeaError): string { - let msg = err.message - const { ctx } = err - - switch (err.code()) { - case 'spilt-tea-102': - if (ctx.filename instanceof Path && !ctx.filename.in(usePrefix())) { - // this yaml is being worked on by the user - msg = `${ctx.filename.prettyLocalString()}: ${ctx.underr.message}` - } else { - const attachment = `${ctx.project}: ${ctx.underr.message}` - msg = undent` - pantry entry invalid. please report this bug! - - https://github.com/teaxyz/pantry.core/issues/new - - ----------------------------------------------------->> attachment begin - ${logger.gray(attachment)} - <<------------------------------------------------------- attachment end - ` - } - } - - return msg -} \ No newline at end of file diff --git a/src/app.exec.ts b/src/app.exec.ts deleted file mode 100644 index 8c3b1757..00000000 --- a/src/app.exec.ts +++ /dev/null @@ -1,425 +0,0 @@ -import { useShellEnv, useExecutableMarkdown, useVirtualEnv, useDownload, usePackageYAMLFrontMatter, useRequirementsFile, usePantry } from "hooks" -import { run, undent, pkg as pkgutils, UsageError, panic, TeaError, RunError, async_flatmap } from "utils" -import { hydrate, resolve, install as base_install, link } from "prefab" -import { Installation, PackageSpecification } from "types" -import { VirtualEnv } from "./hooks/useVirtualEnv.ts" -import useFlags, { Args } from "hooks/useFlags.ts" -import { flatten } from "hooks/useShellEnv.ts" -import { gray, Logger, red } from "hooks/useLogger.ts" -import * as semver from "semver" -import Path from "path" -import { Interpreter } from "hooks/usePantry.ts"; - -//TODO specifying explicit pkgs or versions on the command line should replace anything deeper -// RATIONALE: so you can override if you are testing locally - - -export default async function(opts: Args) { - const { verbosity, ...flags } = useFlags() - const assessment = assess(opts.args) - - if (assessment.type == 'repl') { - if (!opts.pkgs.length && flags.sync) Deno.exit(0) // `tea -S` is not an error or a repl - if (!opts.pkgs.length && verbosity > 0) Deno.exit(0) // `tea -v` is not an error or a repl - if (!opts.pkgs.length) throw new UsageError() - - const { installed, env } = await install(opts.pkgs) - await repl(installed, env) - - } else try { - const refinement = await refine(assessment) - await exec(refinement, opts.pkgs, {env: opts.env ?? false}) - } catch (err) { - handler(err) - } -} - -// deno-lint-ignore no-explicit-any -export function handler(err: any) { - const { debug } = useFlags() - - if (err instanceof TeaError) { - throw err - } else if (debug) { - console.error(err) - } else if (err instanceof Deno.errors.NotFound) { - // deno-lint-ignore no-explicit-any - console.error("tea: command not found:", (err as any).cmd) - //NOTE ^^ we add cmd into the error ourselves in utils/ru - } else if (err instanceof RunError == false) { - const decapitalize = ([first, ...rest]: string) => first.toLowerCase() + rest.join("") - console.error(`${red("error")}:`, decapitalize(err.message)) - } - const code = err?.code ?? 1 - Deno.exit(isNumber(code) ? code : 1) -} - -async function refine(ass: RV2): Promise { - const { magic } = useFlags() - - switch (ass.type) { - case 'url': { - const path = await useDownload().download({ src: ass.url }) - ass = assess_file(path.chmod(0o700), ass.args) - } break - - case 'dir': - //FIXME `README.md` is not the only spelling we accept - ass = { type: 'md', path: ass.path.join("README.md"), args: ass.args } - break - - case 'cmd': { - if (!magic) break // without magic you cannot specify exe/md targets without a path parameter - const blueprint = await useVirtualEnv().swallow(/^not-found/) - if (!blueprint) break // without a blueprint just passthru - const filename = blueprint.file - const sh = await useExecutableMarkdown({ filename }).findScript(ass.args[0]).swallow(/^not-found/) - if (!sh) break // no exe/md target called this so just passthru - ass = { type: 'md', path: filename, sh, blueprint, name: ass.args[0], args: ass.args.slice(1) } - }} - - return ass -} - -async function exec(ass: RV1, pkgs: PackageSpecification[], opts: {env: boolean}) { - const { debug, magic } = useFlags() - - switch (ass.type) { - case 'md': { - //TODO we should probably infer magic as meaning: find the v-env and add it - - const blueprint = opts.env - ? ass.blueprint ?? await useVirtualEnv() - : magic - ? ass.blueprint ?? await useVirtualEnv({ cwd: ass.path.parent() }).swallow(/not-found/) - : undefined - // ^^ jeez we’ve overcomplicated this shit - - const name = ass.name ?? 'getting-started' - const sh = ass.sh ?? await useExecutableMarkdown({ filename: ass.path }).findScript(name) - const { pkgs } = await useRequirementsFile(ass.path) ?? panic() - const { env } = await install(pkgs) - const basename = ass.path.string.replaceAll("/", "_") //FIXME this is not sufficient escaping - - if (blueprint) { - if (blueprint.version) env['VERSION'] = blueprint.version.toString() - env['SRCROOT'] = blueprint.srcroot.string - pkgs.push(...blueprint.pkgs) - } - - const arg0 = Path.mktmp().join(`${basename}.bash`).write({ force: true, text: undent` - #!/bin/bash - set -e - ${debug ? "set -x" : ""} - ${sh} ${is_oneliner(sh) ? '"$@"' : ''} - ` }).chmod(0o500) - //FIXME ^^ putting "$@" at the end can be invalid, it really depends on the script TBH - //FIXME ^^ shouldn’t necessarily default to bash (or we should at least install it (duh)) - - const cmd = [arg0, ...ass.args] - - await run({cmd, env}) - - } break - - case 'sh': { - const blueprint = opts.env ? await useVirtualEnv({ cwd: ass.path.parent() }) : undefined - const yaml = await usePackageYAMLFrontMatter(ass.path, blueprint?.srcroot) - const cmd = [...yaml?.args ?? [], ass.path, ...ass.args] - - if (blueprint) pkgs.push(...blueprint.pkgs) - if (yaml) pkgs.push(...yaml.pkgs) - - if (magic) { - const found = await async_flatmap(extract_shebang(ass.path), which) - - if (found) { - pkgs.unshift(found) - - //TODO how do we make these tools behave exactly as they would - // during “shebang” executed mode? Because then we don’t need - // any special casing, we just run the shebang - - if (!isArray(yaml?.args)) switch (found.project) { - case "deno.land": - cmd.unshift("deno", "run"); break - case "gnu.org/bash": - cmd.unshift("bash", "-e"); break - case "go.dev": - throw new TeaError('#helpwanted', { details: undent` - go does not support shebangs, but there is a package called gorun - that we could use to do this, please package for us 🙏 - `}) - default: - cmd.unshift(found.shebang) - } - - } else { - const unshift = ({ project, args: new_args }: Interpreter) => { - if (!yaml?.pkgs.length) { - pkgs.unshift({ project, constraint: new semver.Range("*") }) - } - if (!yaml?.args.length) { - cmd.unshift(...new_args) - } - } - - const interpreter = await usePantry().getInterpreter(ass.path.extname()) - if (interpreter) unshift(interpreter) - } - } - - const { env } = await install(pkgs) - - supp(env, blueprint) - if (yaml?.env) Object.assign(env, yaml.env) // explicit YAML-FM env takes precedence - - await run({ cmd, env }) - - } break - - case 'cmd': { - const { env } = await prepare_exec_cmd(pkgs, opts) - await run({ cmd: ass.args, env }) - }} -} - -//// - -export async function prepare_exec_cmd(pkgs: PackageSpecification[], opts: {env: boolean}) { - const { magic } = useFlags() - let blueprint: VirtualEnv | undefined - if (opts.env) { - blueprint = await useVirtualEnv() - pkgs.push(...blueprint.pkgs) - } else if (magic && (blueprint = await useVirtualEnv().swallow(/^not-found/))) { - pkgs.push(...blueprint.pkgs) - } - const { env } = await install(pkgs) - supp(env, blueprint) - return { env, pkgs } -} - -import {readLines} from "deno/io/buffer.ts" - -async function extract_shebang(path: Path) { - const f = await Deno.open(path.string, { read: true }) - const line = (await readLines(f).next()).value - let shebang = line.match(/^\s*#!(\S+)$/)?.[1] - if (shebang) { - return Path.abs(shebang)?.basename() - } - shebang = line.match(/^\s*#!\/usr\/bin\/env (\S+)$/)?.[1] - if (shebang) { - return shebang[1] ?? panic() - } -} - -const subst = function(start: number, end: number, input: string, what: string) { - return input.substring(0, start) + what + input.substring(end) -}; - -export async function which(arg0: string) { - const pantry = usePantry() - let found: { project: string, constraint: semver.Range } | undefined - const promises: Promise[] = [] - - for await (const entry of pantry.ls()) { - if (found) break - const p = pantry.getProvides(entry).then(providers => { - for (const provider of providers) { - if (found) { - return - } else if (provider == arg0) { - const constraint = new semver.Range("*") - found = {...entry, constraint} - } else { - //TODO more efficient to check the prefix fits arg0 first - // eg. if python3 then check if the provides starts with python before - // doing all the regex shit. Matters because there's a *lot* of YAMLs - - let rx = /({{\s*version\.(marketing|major)\s*}})/ - let match = provider.match(rx) - if (!match?.index) continue - const regx = match[2] == 'major' ? '\\d+' : '\\d+\\.\\d+' - const foo = subst(match.index, match.index + match[1].length, provider, `(${regx})`) - rx = new RegExp(`^${foo}$`) - match = arg0.match(rx) - if (match) { - const constraint = new semver.Range(`~${match[1]}`) - found = {...entry, constraint} - } - } - } - }) - promises.push(p) - } - - if (!found) { - // if we didn’t find anything yet then we have to wait on the promises - // otherwise we can ignore them - await Promise.all(promises) - } - - if (found) { - return {...found, shebang: arg0} - } -} - -function is_oneliner(sh: string) { - const lines = sh.split("\n") - for (const line of lines.slice(0, -1)) { - if (!line.trim().endsWith("\\")) return false - } - return true -} - -function isMarkdown(path: Path) { - //ref: https://superuser.com/a/285878 - switch (path.extname()) { - case ".md": - case '.mkd': - case '.mdwn': - case '.mdown': - case '.mdtxt': - case '.mdtext': - case '.markdown': - case '.text': - case '.md.txt': - return true - } -} - -function urlify(arg0: string) { - try { - const url = new URL(arg0) - // we do some magic so GitHub URLs are immediately usable - switch (url.host) { - case "github.com": - url.host = "raw.githubusercontent.com" - url.pathname = url.pathname.replace("/blob/", "/") - break - case "gist.github.com": - url.host = "gist.githubusercontent.com" - //FIXME this is not good enough - // for multifile gists this just gives us a bad URL - //REF: https://gist.github.com/atenni/5604615 - url.pathname += "/raw" - break - } - return url - } catch { - //noop - } -} - -function supp(env: Record, blueprint?: VirtualEnv) { - if (blueprint) { - if (blueprint.version) env['VERSION'] = blueprint.version?.toString() - env['SRCROOT'] = blueprint.srcroot.string - } -} - -import { basename } from "deno/path/mod.ts" -import { isArray, isNumber } from "is_what" - -async function repl(installations: Installation[], env: Record) { - const pkgs_str = () => installations.map(({pkg}) => gray(pkgutils.str(pkg))).join(", ") - console.info('this is a temporary shell containing the following packages:') - console.info(pkgs_str()) - console.info("when done type: `exit'") - const shell = Deno.env.get("SHELL")?.trim() || "/bin/sh" - const cmd = [shell, '-i'] // interactive - - //TODO other shells pls #help-wanted - - switch (basename(shell)) { - case 'bash': - cmd.splice(1, 0, '--norc', '--noprofile') // longopts must precede shortopts - // fall through - case 'sh': - env['PS1'] = "\\[\\033[38;5;86m\\]tea\\[\\033[0m\\] %~ " - break - case 'zsh': - env['PS1'] = "%F{086}tea%F{reset} %~ " - cmd.push('--no-rcs', '--no-globalrcs') - break - case 'fish': - cmd.push( - '--no-config', - '--init-command', - 'function fish_prompt; set_color 5fffd7; echo -n "tea"; set_color grey; echo " %~ "; end' - ) - } - - try { - await run({ cmd, env }) - } catch (err) { - if (err instanceof RunError) { - Deno.exit(err.code) - } else { - throw err - } - } -} - -type RV0 = { type: 'sh', path: Path, args: string[] } | - { type: 'md', path: Path, name?: string, sh?: string, blueprint?: VirtualEnv, args: string[] } -type RV1 = RV0 | - { type: "cmd", args: string[] } -type RV2 = RV1 | - { type: "url", url: URL, args: string[] } | - { type: "dir", path: Path, args: string[] } -type RV3 = RV2 | - { type: 'repl' } - -function assess_file(path: Path, args: string[]): RV0 { - return isMarkdown(path) - ? { type: 'md', path, name: args[0], args: args.slice(1) } - : { type: 'sh', path, args } -} - -function assess([arg0, ...args]: string[]): RV3 { - if (!arg0?.trim()) { - return { type: 'repl' } - } - const url = urlify(arg0) - if (url) { - return { type: 'url', url, args } - } else { - const path = Path.cwd().join(arg0) - if (path.isDirectory()) return { type: 'dir', path, args } - if (path.isFile()) return assess_file(path, args) - return { type: 'cmd', args: [arg0, ...args] } - } -} - -async function install(pkgs: PackageSpecification[]): Promise<{ env: Record, installed: Installation[] }> { - const flags = useFlags() - - const logger = new Logger() - logger.replace("resolving package graph") - - if (flags.magic) { - pkgs = [...pkgs] - const pantry = usePantry() - for (const pkg of pkgs) { - pkgs.push(...await pantry.getCompanions(pkg)) - } - } - - console.debug({hydrating: pkgs}) - - const { pkgs: wet } = await hydrate(pkgs) - const {installed, pending} = await resolve(wet, { update: flags.sync }) - logger.clear() - - for (const pkg of pending) { - - const install = await base_install(pkg) - await link(install) - installed.push(install) - } - const env = await useShellEnv({ installations: installed }) - return { env: flatten(env), installed } -} diff --git a/src/app.help.ts b/src/app.help.ts deleted file mode 100644 index 90e2392e..00000000 --- a/src/app.help.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { useFlags } from "hooks" -import { print, undent } from "utils" - -export default async function help() { - const { verbose } = useFlags() - - // tea -mx +deno.land^1.18 foo.ts -- bar - // tea -mx +deno.land^1.18 deno -- ./script-file foo bar baz - // tea build - // tea -mx ./README.md -- build - - //TODO make the stuff in brackets grayed out a bit - - if (!verbose) { - // 10| 20| 30| 40| 50| 60| 70| | 80| - await print(undent` - usage: - tea [-xdX] [flags] [+package~x.y] [file|URL|target|cmd|interpreter] -- [arg…] - - modes: magical? - 05 --exec,-x execute - --dump,-d dump - -X magic execute - 𝑜𝑚𝑖𝑡𝑡𝑒𝑑 infer operation ✨ - - flags: - 10 --env,-E inject virtual environment ✨ - --sync,-S sync pantries, etc. first ✨ - --magic=no,-m disable magic - --verbose,-v eg. tea -vv - --silent,-s no chat, no errors - --cd,-C change directory first - 15 - more: - tea -vh - 18 open github.com/teaxyz/cli - `) - //HEYU! did you exceed 22 lines? Don’t! That’s the limit! - } else { - // 10| 20| 30| 40| 50| 60| 70| | 80| - await print(undent` - usage: - tea [-xdX] [flags] [+package~x.y] [file|URL|target|cmd|interpreter] -- [arg…] - - modes: magical? env-aware - --exec,-x execute (omittable if ✨) ✨ 𐄂 - --dump,-d dump ✨ 𐄂 - -X infer pkg requirements 𐄂 - - aliases: - --help,-h --dump=usage - --version,-v --dump=version 𐄂 - --prefix --dump=prefix - - flags: - --env,-E inject virtual environment ✨ - --sync,-S sync pantries, etc. first ✨ - --json,-j output json - --disable-magic,-m disable magic - --verbose,-v short form accumulates, shows version first - --silent,-s no chat, no errors - --cd,-C,--chdir change directory first - - environment variables: - VERBOSE {-1: silent, 0: default, 1: verbose, 2: debug} - MAGIC [0,1] - DEBUG [0,1]: alias for \`VERBOSE=2\` - TEA_DIR \`--chdir \${directory}\` - - notes: - - explicit flags override any environment variables - - the results of magic can be observed if verbosity is > 0 - - ideology: - > A successful tool is one that was used to do something undreamed of - > by its author - —𝑠ℎ𝑎𝑑𝑜𝑤𝑦 𝑠𝑢𝑝𝑒𝑟 𝑐𝑜𝑑𝑒𝑟 - `) - } -} diff --git a/src/app.sync.ts b/src/app.sync.ts deleted file mode 100644 index b032aacf..00000000 --- a/src/app.sync.ts +++ /dev/null @@ -1,31 +0,0 @@ -import useFlags, { Args } from "hooks/useFlags.ts" -import { install, link, resolve } from "prefab" -import { useSync, useVirtualEnv } from "hooks" - -//TODO app.exec.ts and app.dump.ts should handle updating packages as part of their install logics - -export default async function sync(opts: Args) { - const { magic } = useFlags() - - // always sync pantry - await useSync() - - const pkgs = [...opts.pkgs] - - if (opts.env) { - pkgs.push(...(await useVirtualEnv()).pkgs) - } else if (magic) { - // TODO shouldn’t use magic if user has explicitly passed eg. a script path - const blueprint = await useVirtualEnv().swallow(/^not-found/) - if (blueprint) { - pkgs.push(...blueprint.pkgs) - } - } - - if (pkgs.length) { - for (const pkg of (await resolve(pkgs, { update: true })).pending) { - const installation = await install(pkg) - await link(installation) - } - } -} diff --git a/src/app.ts b/src/app.ts deleted file mode 100644 index c2dbb517..00000000 --- a/src/app.ts +++ /dev/null @@ -1,77 +0,0 @@ -import { usePrefix, useRequirementsFile } from "hooks" -import err_handler from "./app.err-handler.ts" -import * as logger from "hooks/useLogger.ts" -import useFlags, { useArgs } from "hooks/useFlags.ts" -import syncf from "./app.sync.ts" -import dump from "./app.dump.ts" -import exec from "./app.exec.ts" -import help from "./app.help.ts" -import { print } from "utils" -import X from "./app.X.ts" -import Path from "path" -import { Verbosity } from "./types.ts" - -const version = `${(await useRequirementsFile(new URL(import.meta.url).path().join("../../README.md")).swallow(/not-found/))?.version}+dev` -// ^^ this is statically replaced at deployment - -try { - const [args, {sync}] = useArgs(Deno.args, Deno.execPath()) - - if (args.cd) { - const chdir = args.cd - console.verbose({ chdir }) - Deno.chdir(chdir.string) - } - - if (args.mode == "exec" || args.mode == undefined || args.mode == "eXec" || !Deno.isatty(Deno.stdout.rid) || Deno.env.get('CI')) { - logger.set_global_prefix('tea:') - } - - if (sync) { - await syncf(args) - } - - if (args.mode == "exec" || args.mode == undefined) { - await announce() - await exec(args) - } else if (args.mode == "eXec") { - await announce() - await X(args) - } else switch (args.mode[1]) { - case "env": - await dump(args) - break - case "help": - await help() - break - case "version": - await print_version() - break - case "prefix": - await print(usePrefix().string) - } -} catch (err) { - await err_handler(err) - Deno.exit(1) -} - -async function announce() { - const self = new Path(Deno.execPath()) - const prefix = usePrefix().string - - switch (useFlags().verbosity) { - case Verbosity.debug: - if (self.basename() == "deno") { - console.debug({ deno: self.string, prefix, import: import.meta, tea: version }) - } else { - console.debug(`${prefix}/tea.xyz/v${version}/bin/tea`) - } - break - case Verbosity.loud: - await print_version() - } -} - -async function print_version() { - await print(`tea ${version}`) -} diff --git a/src/hooks/index.ts b/src/hooks/index.ts deleted file mode 100644 index 349fb08e..00000000 --- a/src/hooks/index.ts +++ /dev/null @@ -1,39 +0,0 @@ -// order is important to avoid circular dependencies and thus uncaught ReferenceErrors - -import usePrefix from "./usePrefix.ts" -import useOffLicense from "./useOffLicense.ts" -import useDownload from "./useDownload.ts" -import useCache from "./useCache.ts" -import useCellar from "./useCellar.ts" -import useExecutableMarkdown from "./useExecutableMarkdown.ts" -import useFlags from "./useFlags.ts" -import useGitHubAPI from "./useGitHubAPI.ts" -import useInventory from "./useInventory.ts" -import useShellEnv from "./useShellEnv.ts" -import useSourceUnarchiver from "./useSourceUnarchiver.ts" -import usePantry from "./usePantry.ts" -import useVirtualEnv from "./useVirtualEnv.ts" -import usePackageYAML, { usePackageYAMLFrontMatter } from "./usePackageYAML.ts" -import useSync from "./useSync.ts" -import useRequirementsFile from "./useRequirementsFile.ts" - -// but we can sort these alphabetically -export { - useCache, - useCellar, - useDownload, - useExecutableMarkdown, - useFlags, - useGitHubAPI, - useInventory, - useOffLicense, - usePackageYAML, - usePackageYAMLFrontMatter, - usePantry, - usePrefix, - useShellEnv, - useSourceUnarchiver, - useSync, - useVirtualEnv, - useRequirementsFile -} diff --git a/src/hooks/useCache.ts b/src/hooks/useCache.ts deleted file mode 100644 index 897aef5e..00000000 --- a/src/hooks/useCache.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { usePrefix } from "hooks" -import { Package, Stowed, SupportedArchitecture, SupportedPlatform, Stowage } from "types" -import * as utils from "utils" -import SemVer from "semver" -import Path from "path" - -export default function useCache() { - return { ls, path, decode } -} - -type DownloadOptions = { - type: 'bottle' - pkg: Package -} | { - type: 'src', - url: URL - pkg: Package -} - -const path = (stowage: Stowage) => { - const { pkg, type } = stowage - const stem = pkg.project.replaceAll("/", "∕") - - let filename = `${stem}-${pkg.version}` - if (type == 'bottle') { - const { platform, arch } = stowage.host ?? utils.host() - filename += `+${platform}+${arch}.tar.${stowage.compression}` - } else { - filename += stowage.extname - } - - return usePrefix().www.join(filename) -} - -function decode(path: Path): Stowed | undefined { - const match = path.basename().match(`^(.*)-(\\d+\\.\\d+\\.\\d+.*?)(\\+(.+?)\\+(.+?))?\\.tar\\.[gx]z$`) - if (!match) return - const [_, p, v, host, platform, arch] = match - // Gotta undo the package name manipulation to get the package from the bottle - const project = p.replaceAll("∕", "/") - const version = new SemVer(v) - if (!version) return - const pkg = { project, version } - if (host) { - const compression = path.extname() == '.tar.gz' ? 'gz' : 'xz' - return { - pkg, - type: 'bottle', - host: { - platform: platform as SupportedPlatform, - arch: arch as SupportedArchitecture - }, - compression, - path - } - } else { - return { - pkg, type: 'src', path, - extname: path.extname(), - } - } -} - -const ls = async () => { - const rv: Stowed[] = [] - for await (const [path] of usePrefix().www.ls()) { - const stowed = decode(path) - if (stowed) rv.push(stowed) - } - return rv -} diff --git a/src/hooks/useCellar.ts b/src/hooks/useCellar.ts deleted file mode 100644 index 2d82d9d8..00000000 --- a/src/hooks/useCellar.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { Package, PackageRequirement, Installation } from "types" -import { pkg as pkgutils } from "utils" -import SemVer from "semver" -import Path from "path" -//ALERT!! do not usePantry() or you can softlock in usePantry.git.ts -import { usePrefix } from "hooks" -import useFlags from "./useFlags.ts" - - -export default function useCellar() { - return { - has, - ls, - keg, - resolve, - shelf, - } -} - -/// returns the `Installation` if the pkg is installed -const has = (pkg: Package | PackageRequirement | Path) => resolve(pkg).swallow(/^not-found:/) - -/// eg. ~/.tea/deno.land -const shelf = (project: string) => usePrefix().join(project) - -/// eg. ~/.tea/deno.land/v1.2.3 -const keg = (pkg: Package) => shelf(pkg.project).join(`v${pkg.version}`) - -/// returns a project’s installations (sorted by version) -async function ls(project: string) { - const d = shelf(project) - const { verbose } = useFlags() - - if (!d.isDirectory()) return [] - - const rv: Installation[] = [] - for await (const [path, {name, isDirectory}] of d.ls()) { - try { - if (!isDirectory) continue - if (!name.startsWith("v") || name == 'var') continue - const version = new SemVer(name) - if (await vacant(path)) continue - rv.push({path, pkg: {project, version}}) - } catch { - // not console.warn as we allow other dirs as a design choice - if (verbose) { - console.warn(`warn: invalid version: ${name}`) - } - } - } - - return rv.sort((a, b) => pkgutils.compare(a.pkg, b.pkg)) -} - -/// if package is installed, returns its installation -async function resolve(pkg: Package | PackageRequirement | Path | Installation) { - const installation = await (async () => { - if ("pkg" in pkg) { return pkg } - // ^^ is `Installation` - - const prefix = usePrefix() - if (pkg instanceof Path) { - const path = pkg - const version = new SemVer(path.basename()) - const project = path.parent().relative({ to: prefix }) - return { - path, pkg: { project, version } - } - } else if ("version" in pkg) { - const path = keg(pkg) - return { path, pkg } - } else { - const installations = await ls(pkg.project) - const versions = installations.map(({ pkg: {version}}) => version) - const version = pkg.constraint.max(versions) - console.debug({ installations, versions, version }) - if (version) { - const path = installations.find(({pkg: {version: v}}) => v.eq(version))!.path - return { path, pkg: { project: pkg.project, version } } - } - } - throw new Error(`not-found:${pkgutils.str(pkg)}`) - })() - if (await vacant(installation.path)) { - throw new Error(`not-found:${pkgutils.str(installation.pkg)}`) - } - return installation -} - -/// if we ignore transient files, is there a package here? -async function vacant(path: Path): Promise { - if (!path.isDirectory()) { - return true - } else for await (const _ of path.ls()) { - return false - } - return true -} diff --git a/src/hooks/useDownload.ts b/src/hooks/useDownload.ts deleted file mode 100644 index 3f173563..00000000 --- a/src/hooks/useDownload.ts +++ /dev/null @@ -1,194 +0,0 @@ -import { readerFromStreamReader, copy } from "deno/streams/conversion.ts" -import { Logger, teal, gray } from "./useLogger.ts" -import { chuzzle, error, TeaError } from "utils" -import { Sha256 } from "deno/hash/sha256.ts" -import { useFlags, usePrefix } from "hooks" -import { isString } from "is_what" -import Path from "path" - - -interface DownloadOptions { - src: URL - dst?: Path /// default is our own unique cache path - headers?: Record - logger?: Logger | string -} - -interface RV { - path: Path - - // we only give you the sha if we download - // if we found the cache then you have to calculate the sha yourself - sha: string | undefined -} - -async function internal({ src, dst, headers, logger }: DownloadOptions, - body: (src: ReadableStream, dst: Deno.Writer, sz?: number) => Promise): Promise -{ - if (isString(logger)) { - logger = new Logger(logger) - } else if (!logger) { - logger = new Logger() - } - - console.verbose({src: src, dst}) - - const hash = (() => { - let memo: Path - return () => memo ?? (memo = hash_key(src)) - })() - const mtime_entry = () => hash().join("mtime") - const etag_entry = () => hash().join("etag") - - const { numpty } = useFlags() - dst ??= hash().join(src.path().basename()) - if (src.protocol === "file:") throw new Error() - - if (dst.isReadableFile()) { - headers ??= {} - if (etag_entry().isFile()) { - headers["If-None-Match"] = await etag_entry().read() - } else if (mtime_entry().isFile()) { - headers["If-Modified-Since"] = await mtime_entry().read() - } - logger.replace(teal('querying')) - } else { - logger.replace(teal('downloading')) - } - - // so the user can add private repos if they need to etc. - if (/(^|\.)github.com$/.test(src.host)) { - const token = Deno.env.get("GITHUB_TOKEN") - if (token) { - headers ??= {} - headers["Authorization"] = `bearer ${token}` - } - } - - const rsp = await fetch(src, {headers}) - - switch (rsp.status) { - case 200: { - const sz = chuzzle(parseInt(rsp.headers.get("Content-Length")!)) - - let txt = teal('downloading') - if (sz) txt += ` ${gray(pretty_size(sz))}` - logger.replace(txt) - - const reader = rsp.body ?? error.panic() - dst.parent().mkpath() - const f = await Deno.open(dst.string, {create: true, write: true, truncate: true}) - - try { - await body(reader, f, sz) - - const text = rsp.headers.get("Last-Modified") - const etag = rsp.headers.get("ETag") - - if (text) mtime_entry().write({text, force: true}) - if (etag) etag_entry().write({text: etag, force: true}) - - } finally { - f.close() - } - } break - case 304: - logger.replace(`cache: ${teal('hit')}`) - break - default: - if (!numpty || !dst.isFile()) { - throw new Error(`${rsp.status}: ${src}`) - } - } - - return dst -} - -async function download(opts: DownloadOptions): Promise { - try { - return await internal(opts, (src, dst) => copy(readerFromStreamReader(src.getReader()), dst)) - } catch (underr) { - throw new TeaError('http', {underr, ...opts}) - } -} - -async function download_with_sha({ logger, ...opts}: DownloadOptions): Promise<{path: Path, sha: string}> { - if (isString(logger)) { - logger = new Logger(logger) - } else if (!logger) { - logger = new Logger() - } - - const digest = new Sha256() - let run = false - - // don’t fill CI logs with dozens of download percentage lines - const ci = Deno.env.get("CI") - - const path = await internal({...opts, logger}, (src, dst, sz) => { - let n = 0 - - run = true - const tee = src.tee() - const p1 = copy(readerFromStreamReader(tee[0].getReader()), dst) - const p2 = copy(readerFromStreamReader(tee[1].getReader()), { write: buf => { - //TODO in separate thread would be likely be faster - digest.update(buf) - if (sz && !ci) { - n += buf.length - const pc = Math.round(n / sz * 100); - (logger as Logger).replace(`${teal('downloading')} ${pc}%`) - } else if (!ci) { - (logger as Logger).replace(`${teal('downloading')} ${pretty_size(n)}`) - } - return Promise.resolve(buf.length) - }}) - return Promise.all([p1, p2]) - }) - - if (!run) { - logger.replace(teal('verifying')) - const f = await Deno.open(path.string, { read: true }) - await copy(f, { write: buf => { - //TODO in separate thread would likely be faster - digest.update(buf) - return Promise.resolve(buf.length) - }}) - } - - return { path, sha: digest.hex() } -} - -function hash_key(url: URL): Path { - function hash(url: URL) { - const formatted = `${url.pathname}${url.search ? "?" + url.search : ""}` - return new Sha256().update(formatted).toString() - } - - const prefix = usePrefix().www - - return prefix - .join(url.protocol.slice(0, -1)) - .join(url.hostname) - .join(hash(url)) - .mkpath() -} - -export default function useDownload() { - return { - download, - hash_key, - download_with_sha: error.wrap(download_with_sha, 'http') - } -} - -function pretty_size(n: number) { - const units = ["B", "KiB", "MiB", "GiB", "TiB"] - let i = 0 - while (n > 1024 && i < units.length - 1) { - n /= 1024 - i++ - } - const precision = n < 10 ? 2 : n < 100 ? 1 : 0 - return `${n.toFixed(precision)} ${units[i]}` -} diff --git a/src/hooks/useExecutableMarkdown.ts b/src/hooks/useExecutableMarkdown.ts deleted file mode 100644 index 18b04474..00000000 --- a/src/hooks/useExecutableMarkdown.ts +++ /dev/null @@ -1,55 +0,0 @@ -import Path from "path" -import { TeaError } from "../utils/index.ts"; - -interface Return { - /// throws if not found - findScript(name: string): string -} - -type Parameters = { filename: Path } | { text: string } - -export default function useExecutableMarkdown(parameters: Parameters) { - const getLines = (() => (async () => { - if ("filename" in parameters) { - return await parameters.filename.read() - } else { - return parameters.text - } - })().then(x => x.split("\n")[Symbol.iterator]())) - - const findScript = async (name: string) => { - // firstly check if there is a target named args[0] - // since we don’t want to allow the security exploit where you can make a file - // and steal execution when a target was intended - // NOTE user can still specify eg. `tea ./foo` if they really want the file - name = name == '.' || !name?.trim() ? 'getting-started' : name - - const lines = await getLines() - - const header_rx = new RegExp(`^#+\\s+(.*)\\s*$`) - for (const line of lines) { - const match = line.match(header_rx) - if (!match) continue - if (match[1].toLowerCase().replace(/\s+/, '-') == name) { - break - } - } - - do { - const {value: line, done} = lines.next() - if (done) throw new TeaError('not-found: exe/md: region', {script: name, ...parameters}) - if (!line.trim()) continue - if (line.match(/^```sh\s*$/)) break - } while (true) - - const sh: string[] = [] - for (const line of lines) { - if (line.match(/^```\s*$/)) return sh.join("\n") - sh.push(line.replace(/^\$\s*/, '')) - } - - throw { error: true, script: name, ...parameters, code: "exe/md:cannot-parse" } - } - - return { findScript } -} diff --git a/src/hooks/useFlags.ts b/src/hooks/useFlags.ts deleted file mode 100644 index f53d83ae..00000000 --- a/src/hooks/useFlags.ts +++ /dev/null @@ -1,268 +0,0 @@ -import { flatmap, chuzzle, pkg, validate_str, panic } from "utils" -import { Verbosity, PackageSpecification } from "types" -import { isNumber } from "is_what" -import { set_tmp } from "path" -import { usePrefix } from "hooks" -import Path from "path" -import {TeaError} from "../utils/index.ts"; - -// doing here as this is the only file all our scripts import -set_tmp(usePrefix().join('tea.xyz/tmp')) - - -export type Mode = 'exec' | 'eXec' | ['dump', 'env' | 'help' | 'version' | 'prefix'] - -interface Flags { - verbosity: Verbosity - magic: boolean - json: boolean - numpty: boolean - sync: boolean -} - -interface ConvenienceFlags { - verbose: boolean - debug: boolean - silent: boolean -} - -let flags: Flags - -export default function useFlags(): Flags & ConvenienceFlags { - if (!flags) { - //FIXME scripts/* need this to happen but its yucky - flags = { - sync: false, - verbosity: getVerbosity(0), - magic: getMagic(undefined), - json: !!Deno.env.get("JSON"), - numpty: !!Deno.env.get("NUMPTY") - } - applyVerbosity() - } - - return { - ...flags, - verbose: flags.verbosity >= Verbosity.loud, - debug: flags.verbosity >= Verbosity.debug, - silent: flags.verbosity <= Verbosity.quiet - } -} - -interface Adjustments { - cd?: Path -} - -export type Args = { - mode?: Mode - cd?: Path - args: string[] - pkgs: PackageSpecification[] - env?: boolean -} - -export function useArgs(args: string[], arg0: string): [Args, Flags & ConvenienceFlags] { - if (flags) throw new Error("contract-violated"); - - (() => { - const base = new Path(arg0).isSymlink()?.basename() - if (base === undefined || base === "tea") return - const match = base.match(/^tea_([^\/]+)$/) - args = ["-X", match?.[1] ?? base, ...args] - })() - - const rv: Args = { - args: [], - pkgs: [] - } - - let magic: boolean | undefined - let v: number | undefined - let sync = false - const it = args[Symbol.iterator]() - - for (const arg of it) { - if (arg == '+' || arg == '-' || arg == '--') { - throw new TeaError('not-found: arg', {arg}) - } - - if (arg.startsWith('+')) { - rv.pkgs.push(pkg.parse(arg.slice(1))) - } else if (arg.startsWith('--')) { - const [,key, , value] = arg.match(/^--([\w-]+)(=(.+))?$/)! - - switch (key) { - case 'dump': - switch (value) { - case 'help': - rv.mode = ['dump', 'help'] - break - case 'version': - rv.mode = ['dump', 'version'] - break - case 'prefix': - rv.mode = ['dump', 'prefix'] - break - case 'env': - case undefined: - rv.mode = ['dump', 'env'] - break - default: - throw new Error("usage") - } - break - case 'verbose': - if (value) { - v = chuzzle(parseInt(value) + 1) ?? panic() - } else { - v = 1 - } - break - case 'debug': - v = 2 - break - case 'cd': - case 'chdir': - rv.cd = Path.cwd().join(validate_str(value ?? it.next().value)) - break - case 'help': - rv.mode = ['dump', 'help'] - break - case 'prefix': - rv.mode = ['dump', 'prefix'] - break - case 'version': - rv.mode = ['dump', 'version'] - break - case 'muggle': - case 'disable-magic': - magic = false - break - case 'magic': - magic = !!parseBool(value) - break - case 'silent': - v = -1 - break - case 'sync': - sync = true - break - case 'env': - rv.env = parseBool(value) ?? true - break - case 'disable-env': - rv.env = false - break - default: - throw new TeaError('not-found: arg', {arg}) - } - } else if (arg.startsWith('-')) { - for (const c of arg.slice(1)) { - switch (c) { - case 'x': - rv.mode = 'exec' - break - case 'X': - rv.mode = 'eXec' - break - case 'E': - rv.env = true - break - case 'd': - rv.mode = ['dump', 'env'] - break - case 'v': - v = (v ?? 0) + 1 - break - case 'C': - rv.cd = Path.cwd().join(validate_str(it.next().value)) - break - case 'm': - magic = false - break - case 'M': - magic = true - break - case 's': - v = -1; - break - case 'S': - sync = true - break - case 'h': - rv.mode = ['dump', 'help'] - break - default: - throw new TeaError('not-found: arg', {arg: `-${c}`}) - } - } - } else { - rv.args.push(arg) - for (const arg of it) { // empty the main loop iterator - rv.args.push(arg) - } - } - } - - flags = { - verbosity: getVerbosity(v), - magic: getMagic(magic), - json: !!Deno.env.get("JSON"), - numpty: !!Deno.env.get("NUMPTY"), - sync - } - - applyVerbosity() - - const full_flags = useFlags() - console.debug({ args: rv, flags: full_flags }) - - return [rv, full_flags] -} - -function getVerbosity(v: number | undefined): Verbosity { - if (isNumber(v)) return v - if (Deno.env.get("DEBUG") == '1') return Verbosity.debug - if (Deno.env.get("GITHUB_ACTIONS") == 'true' && Deno.env.get("RUNNER_DEBUG") == '1') return Verbosity.debug - const env = flatmap(Deno.env.get("VERBOSE"), parseInt) - return isNumber(env) ? env : Verbosity.normal -} - -function getMagic(magic: boolean | undefined): boolean { - if (magic !== undefined) return magic - const env = Deno.env.get("MAGIC") - //NOTE darwinsys.com/file uses `MAGIC` and has since 1995 so they have dibs - // however it’s basically ok since we provide the above hatch to disable - // magic and our default is on so if it is set to a Path then nothing is actually - // different from if it wasn't set at all. - return env !== "0" -} - -function applyVerbosity() { - function noop() {} - if (flags.verbosity > Verbosity.debug) flags.verbosity = Verbosity.debug - if (flags.verbosity < Verbosity.debug) console.debug = noop - if (flags.verbosity < Verbosity.loud) console.verbose = noop - if (flags.verbosity < Verbosity.normal) { - console.info = noop - console.log = noop - console.error = noop - } -} - -function parseBool(input: string) { - switch (input) { - case '1': - case 'true': - case 'yes': - case 'on': - case 'enable': - return true - case '0': - case 'false': - case 'no': - case 'off': - case 'disable': - return false - } -} diff --git a/src/hooks/useGitHubAPI.ts b/src/hooks/useGitHubAPI.ts deleted file mode 100644 index 6e04c2e4..00000000 --- a/src/hooks/useGitHubAPI.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { GET, undent, validate_arr, validate_str } from "utils" -import { isArray } from "is_what" - -//TODO pagination - -interface GetVersionsOptions { - user: string - repo: string - type: 'releases' | 'tags' | 'releases/tags' -} - -interface Response { - getVersions(opts: GetVersionsOptions): Promise -} - -interface GHRelease { - tag_name: string - name: string -} - -export default function useGitHubAPI(): Response { - return { getVersions } -} - -async function getVersions({ user, repo, type }: GetVersionsOptions): Promise { - //TODO set `Accept: application/vnd.github+json` - //TODO we can use ETags to check if the data we have cached is still valid - - if (type.startsWith("releases")) { - const json = await GET(`https://api.github.com/repos/${user}/${repo}/releases?per_page=100`) - if (!isArray(json)) throw new Error("unexpected json") - return json.map(({ tag_name, name }) => type == 'releases/tags' ? tag_name : name) - } else { - // GitHub tags API returns in reverse alphabetical order lol - // so we have to use their graphql endpoint - // sadly the graph ql endpoint requires auth :/ - - //NOTE realistically the bad sort order for the REST api only effects ~5% of projects - // so potentially could flag those projects (eg. go.dev) - - const headers: HeadersInit = {} - const token = Deno.env.get("GITHUB_TOKEN") - if (token) headers['Authorization'] = `bearer ${token}` - - const query = undent` - query { - repository(owner: "${user}", name: "${repo}") { - refs(last: 100, refPrefix: "refs/tags/", orderBy: {field: TAG_COMMIT_DATE, direction: ASC}) { - nodes { - name - } - } - } - }` - const rsp = await fetch('https://api.github.com/graphql', { - method: 'POST', - body: JSON.stringify({ query }), - headers - }) - const json = await rsp.json() - - if (!rsp.ok) { - console.error({ rsp, json }) - throw new Error() - } else { - console.debug(json) - } - - // deno-lint-ignore no-explicit-any - const foo = validate_arr(json?.data?.repository?.refs?.nodes).map((x: any) => validate_str(x?.name)) - - return foo - } -} diff --git a/src/hooks/useInventory.ts b/src/hooks/useInventory.ts deleted file mode 100644 index 494e5e00..00000000 --- a/src/hooks/useInventory.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { Package, PackageRequirement } from "types" -import { host, error, TeaError } from "utils" -import SemVer from "semver" -import Path from "../vendor/Path.ts" - -export interface Inventory { - [project: string]: { - [platform: string]: { - [arch: string]: string[] - } - } -} - -const select = async (rq: PackageRequirement | Package) => { - const { platform, arch } = host() - - const url = new URL('https://dist.tea.xyz') - url.pathname = Path.root.join(rq.project, platform, arch, 'versions.txt').string - - const rsp = await fetch(url) - - if (!rsp.ok) throw new TeaError('http', {url}) //FIXME - - const releases = await rsp.text() - let versions = releases.split("\n").map(x => new SemVer(x)) - - if (versions.length < 1) throw new Error() - - if (rq.project == 'openssl.org') { - // workaround our previous sins - const v = new SemVer("1.1.118") - versions = versions.filter(x => x.neq(v)) - } - - console.debug({ project: rq.project, versions }) - - if ("constraint" in rq) { - return rq.constraint.max(versions) - } else if (versions.find(x => x.eq(rq.version))) { - return rq.version - } -} - -export default function useInventory() { - return { select: error.wrap(select, 'http') } -} diff --git a/src/hooks/useLogger.ts b/src/hooks/useLogger.ts deleted file mode 100644 index d882a930..00000000 --- a/src/hooks/useLogger.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { colors, tty } from "cliffy/ansi/mod.ts" -import { flatmap } from "../utils/index.ts"; -import useFlags from "./useFlags.ts" - -let global_prefix: string | undefined -export function set_global_prefix(prefix: string) { - if (global_prefix !== undefined) throw new Error() - global_prefix = prefix.trim() -} - -// ref https://github.com/chalk/ansi-regex/blob/main/index.js -const ansi_escapes_rx = new RegExp([ - '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', - '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))' -].join('|'), 'g') - -function ln(s: string, prefix_length: number) { - try { - // remove ansi escapes to get actual length - const n = s.replace(ansi_escapes_rx, '').length + prefix_length - const { columns } = Deno.consoleSize() - return Math.ceil(n / columns) - } catch { - // consoleSize() throws if not a tty - // eg. in GitHub Actions - return 1 - } -} - -export default function useLogger(prefix?: string) { - return new Logger(prefix) -} - -function colorIfTTY(x: string, colorMethod: (x: string)=>string) { - if (Deno.env.get("CI")) { - // this is what charm’s lipgloss does, we copy their lead - return colorMethod(x) - } else if (Deno.isatty(Deno.stdout.rid) && Deno.isatty(Deno.stderr.rid)) { - return colorMethod(x) - } else { - return x - } -} - -export const teal = (x: string) => colorIfTTY(x, (x) => colors.rgb8(x, 86)) -export const red = (x: string) => colorIfTTY(x, colors.brightRed) -export const gray = (x: string) => colorIfTTY(x, (x) => colors.rgb8(x, 244)) -export const dark = (x: string) => colorIfTTY(x, (x) => colors.rgb8(x, 238)) -export const lite = (x: string) => colorIfTTY(x, (x) => colors.rgb8(x, 252)) - -export class Logger { - readonly prefix: string - lines = 0 - last_line = '' - tty = tty({ stdout: Deno.stderr }) - verbosity = useFlags().verbosity - prefix_length: number - - constructor(prefix?: string) { - prefix = prefix?.chuzzle() - this.prefix_length = prefix?.length ?? 0 - this.prefix = prefix ? `${gray(prefix)} ` : '' - if (global_prefix) { - this.prefix = `${dark(global_prefix)} ${this.prefix}` - this.prefix_length += global_prefix.length + 1 - } - } - - //TODO don’t erase whole lines, just erase the part that is different - replace(line: string, {prefix: wprefix}: {prefix: boolean} = {prefix: true}) { - if (this.verbosity < 0) return - - if (line == this.last_line) { - return //noop - } - - if (this.lines) { - const n = ln(this.last_line, this.prefix_length) - if (this.verbosity < 1) { - this.tty.cursorLeft.cursorUp(n).eraseDown() - } - this.lines -= n - if (this.lines < 0) throw new Error(`${n}`) //assertion error - } - - const prefix = wprefix - ? this.prefix - : flatmap(global_prefix?.chuzzle(), x => `${dark(x)} `) ?? '' - console.error(prefix + line) - this.lines += ln(line, wprefix ? this.prefix_length : global_prefix?.length ?? 0) - this.last_line = line - } - - clear() { - this.tty.cursorLeft.cursorUp(this.lines).eraseDown(this.lines) - this.lines = 0 - } -} diff --git a/src/hooks/useMoustaches.ts b/src/hooks/useMoustaches.ts deleted file mode 100644 index b1d5d9dd..00000000 --- a/src/hooks/useMoustaches.ts +++ /dev/null @@ -1,45 +0,0 @@ -import SemVer from "semver" -import { host } from "utils" - -export default function useMoustaches() { - return { - apply, - tokenize: { - version: tokenizeVersion, - host: tokenizeHost - } - } -} - -function tokenizeVersion(version: SemVer, prefix = 'version') { - const rv = [ - { from: prefix, to: `${version}` }, - { from: `${prefix}.major`, to: `${version.major}` }, - { from: `${prefix}.minor`, to: `${version.minor}` }, - { from: `${prefix}.patch`, to: `${version.patch}` }, - { from: `${prefix}.marketing`, to: `${version.major}.${version.minor}` }, - { from: `${prefix}.build`, to: version.build.join('+') }, - { from: `${prefix}.raw`, to: version.raw }, - ] - if ('tag' in version) { - rv.push({from: `${prefix}.tag`, to: (version as unknown as {tag: string}).tag}) - } - return rv -} - -//TODO replace `hw` with `host` -function tokenizeHost() { - const { arch, target, platform } = host() - return [ - { from: "hw.arch", to: arch }, - { from: "hw.target", to: target }, - { from: "hw.platform", to: platform }, - { from: "hw.concurrency", to: navigator.hardwareConcurrency.toString() } - ] -} - -function apply(input: string, map: { from: string, to: string }[]) { - return map.reduce((acc, {from, to}) => - acc.replace(new RegExp(`(^\\$)?{{\\s*${from}\\s*}}`, "g"), to), - input) -} diff --git a/src/hooks/useOffLicense.ts b/src/hooks/useOffLicense.ts deleted file mode 100644 index 4eb38509..00000000 --- a/src/hooks/useOffLicense.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Stowage } from "types" -import { host } from "utils" -import Path from "path" - -type Type = 's3' - -export default function useOffLicense(_type: Type) { - return { url, key } -} - -function key(stowage: Stowage) { - let rv = Path.root.join(stowage.pkg.project) - if (stowage.type == 'bottle') { - const { platform, arch } = stowage.host ?? host() - rv = rv.join(`${platform}/${arch}`) - } - let fn = `v${stowage.pkg.version}` - if (stowage.type == 'bottle') { - fn += `.tar.${stowage.compression}` - } else { - fn += stowage.extname - } - return rv.join(fn).string.slice(1) -} - -function url(stowage: Stowage) { - return new URL(`https://dist.tea.xyz/${key(stowage)}`) -} diff --git a/src/hooks/usePackageYAML.ts b/src/hooks/usePackageYAML.ts deleted file mode 100644 index 0cade405..00000000 --- a/src/hooks/usePackageYAML.ts +++ /dev/null @@ -1,109 +0,0 @@ -import { PackageRequirement } from "types" -import { isPlainObject, isString, isArray, PlainObject } from "is_what" -import { validatePackageRequirement } from "utils/hacks.ts" -import { usePrefix } from "hooks" -import { validate_plain_obj } from "utils" -import Path from "path" -import useMoustaches from "./useMoustaches.ts"; - -interface Return1 { - getDeps: (wbuild: boolean) => PackageRequirement[] - yaml: PlainObject -} - -export default function usePackageYAML(yaml: unknown): Return1 { - //TODO do magic: if (err == "no-front-matter") - - if (!isPlainObject(yaml)) throw new Error("bad-yaml") - - const getDeps = (wbuild: boolean) => { - return [...go(yaml.dependencies), ...go(wbuild && yaml.build?.dependencies)] - // deno-lint-ignore no-explicit-any - function go(node: any) { - if (!node) return [] - return Object.entries(validate_plain_obj(node)) - .compact(([project, constraint]) => validatePackageRequirement({ project, constraint })) - } - } - - return { getDeps, yaml } -} - -interface Return2 extends Return1 { - getArgs: () => string[] -} - -interface FrontMatter { - args: string[] - pkgs: PackageRequirement[] - env: Record -} - -export async function usePackageYAMLFrontMatter(script: Path, srcroot?: Path): Promise { - const yaml = await readYAMLFrontMatter(script) - if (!yaml) return - const rv = usePackageYAML(yaml) - - const getArgs = () => { - const fn1 = () => { - if (rv.yaml.args === undefined) return [] - if (isString(rv.yaml.args)) return rv.yaml.args.split(/\s+/) - if (isArray(rv.yaml.args)) return rv.yaml.args.map(x => `${x}`) - throw new Error("bad-yaml") - } - if (srcroot) { - //TODO if no srcroot and args contain {{srcroot}} show warning - return fn1().map(fix) - } else { - return fn1() - } - } - - const env: Record = {} - if (isPlainObject(yaml.env)) { - for (const [k, v] of Object.entries(yaml.env)) { - if (!isString(v)) throw new Error() - env[k] = fix(v) - } - } - - return { - pkgs: rv.getDeps(false), - args: getArgs(), - env - } - - function fix(input: string): string { - const moustaches = useMoustaches() - - return moustaches.apply(input, [ - ...moustaches.tokenize.host(), - { from: "tea.prefix", to: usePrefix().string }, - { from: "srcroot", to: srcroot!.string}, - { from: "home", to: Path.home().string } - ]) - } -} - - -import { parse as parseYaml } from "deno/encoding/yaml.ts" - -async function readYAMLFrontMatter(path: Path): Promise { - //TODO be smart with knowing the comment types - // this parsing logic should be in the pantry ofc - - let yaml: string | undefined - for await (const line of path.readLines()) { - if (yaml !== undefined) { - if (line.trim().match(/^((#|\/\/)\s*)?---(\s*\*\/)?$/)) { - const rv = await parseYaml(yaml) - if (!isPlainObject(rv)) throw new Error("bad-yaml") - return rv - } - yaml += line?.replace(/^#/, '') - yaml += "\n" - } else if (line.trim().match(/^((\/\*|#|\/\/)\s*)?---/)) { - yaml = '' - } - } -} diff --git a/src/hooks/usePantry.ls.ts b/src/hooks/usePantry.ls.ts deleted file mode 100644 index 6499ed47..00000000 --- a/src/hooks/usePantry.ls.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { usePrefix } from "hooks" -import Path from "path" -import TeaError from "utils/error.ts" - -//TODO keeping this because some pantry scripts expect it -export const prefix = usePrefix().join('tea.xyz/var/pantry/projects') - -export function pantry_paths(): Path[] { - const rv: Path[] = [prefix] - const env = Deno.env.get("TEA_PANTRY_PATH") - if (env) for (const path of env.split(":").reverse()) { - rv.unshift(Path.cwd().join(path, "projects")) - } - return rv -} - -interface Entry { - project: string - path: Path -} - -export async function* ls(): AsyncGenerator { - for (const prefix of pantry_paths()) { - for await (const path of _ls_pantry(prefix)) { - yield { - project: path.parent().relative({ to: prefix }), - path - } - } - } -} - -async function* _ls_pantry(dir: Path): AsyncGenerator { - if (!dir.isDirectory()) throw new TeaError('not-found: pantry', { path: dir }) - - for await (const [path, { name, isDirectory }] of dir.ls()) { - if (isDirectory) { - for await (const x of _ls_pantry(path)) { - yield x - } - } else if (name === "package.yml") { - yield path - } - } -} diff --git a/src/hooks/usePantry.ts b/src/hooks/usePantry.ts deleted file mode 100644 index 3f6b741d..00000000 --- a/src/hooks/usePantry.ts +++ /dev/null @@ -1,481 +0,0 @@ -// deno-lint-ignore-file no-cond-assign -import { Package, PackageRequirement, Installation } from "types" -import { host, flatmap, undent, validate_plain_obj, validate_str, validate_arr, panic, pkg, TeaError } from "utils" -import { isNumber, isPlainObject, isString, isArray, isPrimitive, PlainObject, isBoolean } from "is_what" -import { validatePackageRequirement } from "utils/hacks.ts" -import { useCellar, useGitHubAPI, usePrefix } from "hooks" -import { ls, pantry_paths, prefix } from "./usePantry.ls.ts" -import SemVer, * as semver from "semver" -import Path from "path" - -interface Entry { - dir: Path - yml: () => Promise - versions: Path -} - -export interface Interpreter { - project: string // FIXME: should probably be a stronger type - args: string[] -} - -export default function usePantry() { - return { - getClosestPackageSuggestion, - getVersions, - getDeps, - getDistributable, - getCompanions, - getScript, - getProvides, - getYAML, - getInterpreter, - getRuntimeEnvironment, - resolve, - ls, - prefix - } -} - -async function resolve(spec: Package | PackageRequirement): Promise { - const constraint = "constraint" in spec ? spec.constraint : new semver.Range(`=${spec.version}`) - const versions = await getVersions(spec) - const version = constraint.max(versions) - if (!version) throw new Error(`no-version-found: ${pkg.str(spec)}`) - return { project: spec.project, version }; -} - -//TODO take `T` and then type check it -const getYAML = (pkg: Package | PackageRequirement): { path: Path, parse: () => PlainObject} => { - const foo = entry(pkg) - return { - path: foo.dir.join("package.yml"), - parse: foo.yml - } -} - -/// returns ONE LEVEL of deps, to recurse use `hydrate.ts` -const getDeps = async (pkg: Package | PackageRequirement) => { - const yml = await entry(pkg).yml() - return { - runtime: parse_pkgs_node(yml.dependencies), - build: parse_pkgs_node(yml.build?.dependencies), - test: parse_pkgs_node(yml.test?.dependencies) - } -} - -// deno-lint-ignore no-explicit-any -function parse_pkgs_node(node: any) { - if (!node) return [] - node = validate_plain_obj(node) - platform_reduce(node) - - const rv: PackageRequirement[] = [] - for (const [project, constraint] of Object.entries(node)) { - rv.compact_push(validatePackageRequirement({ project, constraint })) - } - return rv -} - -const getRawDistributableURL = (yml: PlainObject) => { - if (isPlainObject(yml.distributable)) { - return validate_str(yml.distributable.url) - } else if (isString(yml.distributable)) { - return yml.distributable - } else if (yml.distributable === null || yml.distributable === undefined) { - return - } else { - throw new Error(`invalid distributable node: ${yml.distributable}`) - } -} -const getDistributable = async (pkg: Package) => { - const moustaches = useMoustaches() - - const yml = await entry(pkg).yml() - let urlstr = getRawDistributableURL(yml) - if (!urlstr) return - let stripComponents: number | undefined - if (isPlainObject(yml.distributable)) { - stripComponents = flatmap(yml.distributable["strip-components"], coerceNumber) - } - - urlstr = moustaches.apply(urlstr, [ - ...moustaches.tokenize.version(pkg.version), - ...moustaches.tokenize.host() - ]) - - const url = new URL(urlstr) - - return { url, stripComponents } -} - -const getScript = async (pkg: Package, key: 'build' | 'test', deps: Installation[]) => { - const yml = await entry(pkg).yml() - const node = yml[key] - - const mm = useMoustaches() - const script = (input: string) => mm.apply(validate_str(input), mm.tokenize.all(pkg, deps)) - - if (isPlainObject(node)) { - let raw = script(node.script) - - let wd = node["working-directory"] - if (wd) { - wd = mm.apply(wd, [ - ...mm.tokenize.version(pkg.version), - ...mm.tokenize.host(), - ...tokenizePackage(pkg) - ]) - raw = undent` - mkdir -p ${wd} - cd ${wd} - - ${raw} - ` - } - - const env = node.env - if (isPlainObject(env)) { - raw = `${expand_env(env, pkg, deps)}\n\n${raw}` - } - return raw - } else { - return script(node) - } -} - -const getProvides = async (pkg: { project: string }) => { - const yml = await entry(pkg).yml() - const node = yml["provides"] - if (!node) return [] - if (!isArray(node)) throw new Error("bad-yaml") - - return node.compact(x => { - if (isPlainObject(x)) { - x = x["executable"] - } - if (isString(x)) { - return x.startsWith("bin/") && x.slice(4) - } - }) -} - -const getCompanions = async (pkg: {project: string}) => { - const yml = await entry(pkg).yml() - const node = yml["companions"] - return parse_pkgs_node(node) -} - -const getInterpreter = async (_extension: string): Promise => { - const extension = _extension.slice(1) - for await (const pkg of ls()) { - const yml = await entry(pkg).yml() - const node = yml["interprets"] - if (!isPlainObject(node)) continue - try { - const { extensions, args } = yml["interprets"] - if ((isString(extensions) && extensions === extension) || - (isArray(extensions) && extensions.includes(extension))) { - return { project: pkg.project, args: isArray(args) ? args : [args] } - } - } catch { - continue - } - } - return undefined -} - -const getRuntimeEnvironment = async (pkg: Package): Promise> => { - const yml = await entry(pkg).yml() - const obj = validate_plain_obj(yml["runtime"]?.["env"] ?? {}) - return expand_env_obj(obj, pkg, []) -} - -// deno-lint-ignore no-explicit-any -function coerceNumber(input: any) { - if (isNumber(input)) return input -} - -function entry({ project }: { project: string }): Entry { - for (const prefix of pantry_paths()) { - if (!prefix.exists()) throw new TeaError('not-found: pantry', { path: prefix.parent() }) - const dir = prefix.join(project) - const filename = dir.join("package.yml") - if (!filename.exists()) continue - const yml = async () => { - try { - const yml = await filename.readYAML() - if (!isPlainObject(yml)) throw null - return yml - } catch (underr) { - throw new TeaError('parser: pantry: package.yml', {underr, project, filename}) - } - } - const versions = dir.join("versions.txt") - return { dir, yml, versions } - } - - throw new TeaError('not-found: pantry: package.yml', {project}, ) -} - -async function getClosestPackageSuggestion(input: string) { - let choice: string | undefined - let min = Infinity - for await (const {project} of ls()) { - if (min == 0) break - - getProvides({ project }).then(provides => { - if (provides.includes(input)) { - choice = project - min = 0 - } - }) - - const dist = levenshteinDistance(project, input) - if (dist < min) { - min = dist - choice = project - } - } - return choice -} - -function levenshteinDistance (str1: string, str2:string):number{ - const track = Array(str2.length + 1).fill(null).map(() => - Array(str1.length + 1).fill(null)) - for (let i = 0; i <= str1.length; i += 1) { - track[0][i] = i - } - for (let j = 0; j <= str2.length; j += 1) { - track[j][0] = j - } - for (let j = 1; j <= str2.length; j += 1) { - for (let i = 1; i <= str1.length; i += 1) { - const indicator = str1[i - 1] === str2[j - 1] ? 0 : 1 - track[j][i] = Math.min( - track[j][i - 1] + 1, // deletion - track[j - 1][i] + 1, // insertion - track[j - 1][i - 1] + indicator, // substitution - ); - } - } - return track[str2.length][str1.length] -} - -/// returns sorted versions -async function getVersions(spec: Package | PackageRequirement): Promise { - const files = entry(spec) - const versions = await files.yml().then(x => x.versions) - - if (isArray(versions)) { - return versions.map(raw => - semver.parse(validate_str(raw)) ?? panic(`couldn’t parse \`${raw}' into a semantic version`) - ) - } else if (isPlainObject(versions)) { - return handleComplexVersions(versions) - } else { - throw new Error(`couldn’t parse versions: ${pkg.str(spec)}`) - } -} - -//SRC https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions -function escapeRegExp(string: string) { - return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') // $& means the whole matched string -} - -async function handleComplexVersions(versions: PlainObject): Promise { - const [user, repo, ...types] = validate_str(versions.github).split("/") - const type = types?.join("/").chuzzle() ?? 'releases' - - const ignore = (() => { - const arr = (() => { - if (!versions.ignore) return [] - if (isString(versions.ignore)) return [versions.ignore] - return validate_arr(versions.ignore) - })() - return arr.map(input => { - let rx = validate_str(input) - if (!(rx.startsWith("/") && rx.endsWith("/"))) { - rx = escapeRegExp(rx) - rx = rx.replace(/(x|y|z)\b/g, '\\d+') - rx = `^${rx}$` - } else { - rx = rx.slice(1, -1) - } - return new RegExp(rx) - }) - })() - - const strip: (x: string) => string = (() => { - let rxs = versions.strip - if (!rxs) return x => x - if (!isArray(rxs)) rxs = [rxs] - // deno-lint-ignore no-explicit-any - rxs = rxs.map((rx: any) => { - if (!isString(rx)) throw new Error() - if (!(rx.startsWith("/") && rx.endsWith("/"))) throw new Error() - return new RegExp(rx.slice(1, -1)) - }) - return x => { - for (const rx of rxs) { - x = x.replace(rx, "") - } - return x - } - })() - - switch (type) { - case 'releases': - case 'releases/tags': - case 'tags': - break - default: - throw new Error() - } - - const rsp = await useGitHubAPI().getVersions({ user, repo, type }) - - const rv: SemVer[] = [] - for (const pre_strip_name of rsp) { - const name = strip(pre_strip_name) - - if (ignore.some(x => x.test(name))) { - console.debug({ignoring: pre_strip_name, reason: 'explicit'}) - } else { - const v = semver.parse(name) - if (!v) { - console.warn({ignoring: pre_strip_name, reason: 'unparsable'}) - } else if (v.prerelease.length <= 0) { - console.verbose({ found: v.toString(), from: name }); - // used by some packages - (v as unknown as {tag: string}).tag = pre_strip_name - rv.push(v) - } else { - console.debug({ignoring: pre_strip_name, reason: 'prerelease'}) - } - } - } - return rv -} - -/// expands platform specific keys into the object -/// expands inplace because JS is nuts and you have to suck it up -function platform_reduce(env: PlainObject) { - const sys = host() - for (const [key, value] of Object.entries(env)) { - const [os, arch] = (() => { - let match = key.match(/^(darwin|linux)\/(aarch64|x86-64)$/) - if (match) return [match[1], match[2]] - if (match = key.match(/^(darwin|linux)$/)) return [match[1]] - if (match = key.match(/^(aarch64|x86-64)$/)) return [,match[1]] - return [] - })() - - if (!os && !arch) continue - delete env[key] - if (os && os != sys.platform) continue - if (arch && arch != sys.arch) continue - - const dict = validate_plain_obj(value) - for (const [key, value] of Object.entries(dict)) { - // if user specifies an array then we assume we are supplementing - // otherwise we are replacing. If this is too magical let us know - if (isArray(value)) { - if (!env[key]) env[key] = [] - else if (!isArray(env[key])) env[key] = [env[key]] - //TODO if all-platforms version comes after the specific then order accordingly - env[key].push(...value) - } else { - env[key] = value - } - } - } -} - -function expand_env_obj(env_: PlainObject, pkg: Package, deps: Installation[]): Record { - const env = {...env_} - - platform_reduce(env) - - const rv: Record = {} - - for (let [key, value] of Object.entries(env)) { - if (isArray(value)) { - value = value.map(transform).join(" ") - } else { - value = transform(value) - } - - rv[key] = value - } - - return rv - - // deno-lint-ignore no-explicit-any - function transform(value: any): string { - if (!isPrimitive(value)) throw new Error(`invalid-env-value: ${JSON.stringify(value)}`) - - if (isBoolean(value)) { - return value ? "1" : "0" - } else if (value === undefined || value === null) { - return "0" - } else if (isString(value)) { - const mm = useMoustaches() - return mm.apply(value, mm.tokenize.all(pkg, deps)) - } else if (isNumber(value)) { - return value.toString() - } - throw new Error("unexpected-error") - } -} - -function expand_env(env: PlainObject, pkg: Package, deps: Installation[]): string { - return Object.entries(expand_env_obj(env, pkg, deps)).map(([key,value]) => { - // weird POSIX string escaping/concat stuff - // eg. export FOO="bar ""$baz"" bun" - value = `"${value.trim().replace(/"/g, '""')}"` - while (value.startsWith('""')) value = value.slice(1) //FIXME lol better pls - while (value.endsWith('""')) value = value.slice(0,-1) //FIXME lol better pls - - return `export ${key}=${value}` - }).join("\n") -} - - -//////////////////////////////////////////// useMoustaches() additions -import useMoustachesBase from "./useMoustaches.ts" - -function useMoustaches() { - const base = useMoustachesBase() - - const deps = (deps: Installation[]) => { - const map: {from: string, to: string}[] = [] - for (const dep of deps ?? []) { - map.push({ from: `deps.${dep.pkg.project}.prefix`, to: dep.path.string }) - map.push(...useMoustaches().tokenize.version(dep.pkg.version, `deps.${dep.pkg.project}.version`)) - } - return map - } - - const tea = () => [{ from: "tea.prefix", to: usePrefix().string }] - - const all = (pkg: Package, deps_: Installation[]) => [ - ...deps(deps_), - ...tokenizePackage(pkg), - ...tea(), - ...base.tokenize.version(pkg.version), - ...base.tokenize.host(), - ] - - return { - apply: base.apply, - tokenize: { - ...base.tokenize, - deps, pkg, tea, all - } - } -} - -function tokenizePackage(pkg: Package) { - return [{ from: "prefix", to: useCellar().keg(pkg).string }] -} diff --git a/src/hooks/usePrefix.ts b/src/hooks/usePrefix.ts deleted file mode 100644 index 366939e5..00000000 --- a/src/hooks/usePrefix.ts +++ /dev/null @@ -1,39 +0,0 @@ -import Path from "path" - -const prefix = (() => { - //NOTE doesn't work for scripts as Deno.run doesn't push through most env :/ - const env = Deno.env.get("TEA_PREFIX") - if (env) { - return new Path(env) - } else { - // we’re either deno.land/vx/bin/deno, tea.xyz/vx/bin/tea or some symlink to the latter - const shelf = new Path(Deno.execPath()) - .readlink() // resolves the leaf symlink (if any) - .parent() - .parent() - .parent() - - switch (shelf.basename()) { - case 'tea.xyz': - case 'deno.land': - return shelf.parent() - default: - // we’re being generous for users who just download `tea` by itself - // and execute it without installing it in a sanctioned structure - return Path.home().join(".tea") - } - } -})() - -class Prefix extends Path { - www: Path - - constructor(prefix: Path) { - super(prefix) - this.www = prefix.join("tea.xyz/var/www") - } -} - -export default function usePrefix() { - return new Prefix(prefix) -} diff --git a/src/hooks/useRequirementsFile.ts b/src/hooks/useRequirementsFile.ts deleted file mode 100644 index 24ac8d1a..00000000 --- a/src/hooks/useRequirementsFile.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { isPlainObject, PlainObject } from "is_what" -import { PackageRequirement } from "../types.ts" -import { flatmap } from "utils" -import Path from "../vendor/Path.ts" -import SemVer, * as semver from "semver" - - -export interface RequirementsFile { - file: Path - pkgs: PackageRequirement[] - version?: SemVer -} - -/// undefined if this file contains nothing we use to consider it a “requirements file” -//FIXME well, not doing this for markdown yet as would complicate code a lot -export default function useRequirementsFile(file: Path): Promise { - if (file.basename() == "package.json") { - return package_json(file) - } else { - return markdown(file) - } -} - -async function package_json(path: Path): Promise { - const json = await path.readJSON() - if (!isPlainObject(json)) throw new Error("bad-json") - if (!json.tea) return - const requirements = (() => { - if (!json.tea.dependencies) return - if (!isPlainObject(json.tea?.dependencies)) throw new Error("bad-json") - return parsePackageRequirements(json.tea.dependencies) - })() - const version = flatmap(json.version, x => new SemVer(x)) - return { file: path, pkgs: requirements ?? [], version } -} - -function parsePackageRequirements(input: PlainObject): PackageRequirement[] { - const included = new Set() - const rv: PackageRequirement[] = [] - for (const [project, v] of Object.entries(input)) { - if (included.has(project)) throw new Error(`duplicate-constraint:${project}`) - const rq: PackageRequirement = { project, constraint: new semver.Range(`=${v}`) } - rv.push(rq) - console.verbose({ found: rq }) - } - return rv -} - -//TODO support windows newlines -//TODO use a markdown parser lol -async function markdown(path: Path): Promise { - const text = await path.read() - const lines = text.split("\n") - - const findTable = (header: string) => { - let rows: [string, string][] | undefined = undefined - let found: 'nope' | 'header' | 'table' = 'nope' - done: for (const line of lines) { - switch (found) { - case 'header': { - if (!line.trim()) continue - if (line.match(/^\|\s*-+\s*\|\s*-+\s*\|(\s*-+\s*\|)?\s*$/)) found = 'table' - } break - case 'table': { - const match = line.match(/^\|([^|]+)\|([^|]+)\|/) - if (!match) break done - if (!rows) rows = [] - rows.push([match[1].trim(), match[2].trim()]) - } break - case 'nope': - if (line.match(new RegExp(`^#+\\s*${header}\\s*$`))) { - found = 'header' - } - } - } - return rows - } - - const requirements = (() => { - return findTable("Dependencies")?.compact(([project, constraint]) => { - if (project.startsWith("tea.xyz")) return //FIXME - return { - project, - constraint: new semver.Range(constraint) - } - }) - })() - - const fromMetadataTable = () => flatmap( - findTable("Metadata")?.find(([key, value]) => key.toLowerCase() == "version" && value), - ([,x]) => new SemVer(x) - ) - - const fromFirstHeader = () => { - for (let line of lines) { - line = line.trim() - if (/^#+/.test(line)) { - const match = line.match(new RegExp(`v?(${semver.regex.source})$`)) - if (match) { - return new SemVer(match[1]) - } else { - return // we only check the first header - } - } - } - } - - const version = fromMetadataTable() ?? fromFirstHeader() - - return { file: path, pkgs: requirements ?? [], version } -} \ No newline at end of file diff --git a/src/hooks/useShellEnv.ts b/src/hooks/useShellEnv.ts deleted file mode 100644 index edbf8b31..00000000 --- a/src/hooks/useShellEnv.ts +++ /dev/null @@ -1,217 +0,0 @@ -import { Installation, PackageSpecification } from "types" -import { OrderedHashSet } from "rimbu/ordered/set/index.ts" -import { host } from "utils" -import { usePrefix, usePantry } from "hooks" -import Path from "path" - -// returns an environment that supports the provided packages -//TODO possibly should add the env for pending and not delegate via tea -//TODO like ideally we would provide shims for POSIX and not include the system PATHs at all - -export const EnvKeys = [ - 'PATH', - 'MANPATH', - 'PKG_CONFIG_PATH', - 'LIBRARY_PATH', - 'LD_LIBRARY_PATH', - 'CPATH', - 'XDG_DATA_DIRS', - 'CMAKE_PREFIX_PATH', - 'DYLD_FALLBACK_LIBRARY_PATH', - 'SSL_CERT_FILE', - 'LDFLAGS', - 'TEA_PREFIX', - 'PYTHONPATH', - 'npm_config_prefix', - 'ACLOCAL_PATH' -] as const -export type EnvKey = typeof EnvKeys[number] - -interface Options { - installations: Installation[] - pending?: PackageSpecification[], - pristine?: boolean -} - -export default async function useShellEnv({installations, pending, pristine}: Options): Promise> { - const {getRuntimeEnvironment} = usePantry() - - const vars: Partial>> = {} - const isMac = host().platform == 'darwin' - pending ??= [] - - const projects = new Set([...installations.map(x => x.pkg.project), ...pending.map(x=>x.project)]) - const has_cmake = projects.has('cmake.org') - const archaic = true - - const rv: Record = {} - const seen = new Set() - - for (const installation of installations) { - - if (!seen.insert(installation.pkg.project).inserted) { - console.warn("warning: env is being duped:", installation.pkg.project) - } - - for (const key of EnvKeys) { - for (const suffix of suffixes(key)!) { - vars[key] = compact_add(vars[key], installation.path.join(suffix).compact()?.string) - } - } - - if (archaic) { - vars.LIBRARY_PATH = compact_add(vars.LIBRARY_PATH, installation.path.join("lib").compact()?.string) - vars.CPATH = compact_add(vars.CPATH, installation.path.join("include").compact()?.string) - } - - if (has_cmake) { - vars.CMAKE_PREFIX_PATH = compact_add(vars.CMAKE_PREFIX_PATH, installation.path.string) - } - - if (projects.has('gnu.org/autoconf')) { - vars.ACLOCAL_PATH = compact_add(vars.ACLOCAL_PATH, installation.path.join("share/aclocal").compact()?.string) - } - - if (installation.pkg.project === 'openssl.org') { - const certPath = installation.path.join("ssl/cert.pem").compact()?.string - // this is a single file, so we assume a - // valid entry is correct - if (certPath) vars.SSL_CERT_FILE = OrderedHashSet.of(certPath) - } - - // pip requires knowing where its root is - // otherwise it bases it off the location - // of python, which won't work for us - if (installation.pkg.project === 'pip.pypa.io') { - vars.PYTHONPATH = compact_add(vars.PYTHONPATH, installation.path.string) - } - - // npm requires knowing where its root is - // otherwise it bases it off the location - // of node, which won't work for us - if (installation.pkg.project === 'npmjs.com') { - vars.npm_config_prefix = OrderedHashSet.of(installation.path.string) - } - - // pantry configured runtime environment - const runtime = await getRuntimeEnvironment(installation.pkg) - for (const key in runtime) { - rv[key] ??= [] - rv[key].push(runtime[key]) - } - } - - // this is how we use precise versions of libraries - // for your virtual environment - //FIXME SIP on macOS prevents DYLD_FALLBACK_LIBRARY_PATH from propagating to grandchild processes - if (vars.LIBRARY_PATH) { - vars.LD_LIBRARY_PATH = vars.LIBRARY_PATH - if (isMac) { - // non FALLBACK variety causes strange issues in edge cases - // where our symbols somehow override symbols from the macOS system - vars.DYLD_FALLBACK_LIBRARY_PATH = vars.LIBRARY_PATH - } - } - - //FIXME refactor lol - for (const key of EnvKeys) { - //FIXME where is this `undefined` __happening__? - if (vars[key] === undefined || vars[key]!.isEmpty) continue - rv[key] = vars[key]!.toArray() - - if (!pristine && key == 'PATH') { - rv[key] ??= [] - - if (!projects.has('tea.xyz')) { - const tea = find_tea() - if (tea) { - const tea = find_tea()?.parent().string - if (tea && !rv["PATH"].includes(tea)) { - rv["PATH"].push(tea) - } - } - } - - // for std POSIX tools FIXME: we provide shims or pkgs for (at least some of) these - //NOTE we deliberately do not include /usr/local/bin - //NOTE though we add that back for `tea --dump` since users will want their tools ofc - for (const path of ["/usr/bin", "/bin", "/usr/sbin", "/sbin"]) { - if (!rv["PATH"].includes(path)) { - rv["PATH"].push(path) - } - } - } - } - - if (isMac) { - // required to link to our libs - // tea.xyz/gx/cc automatically adds this, but use of any other compilers will not - rv["LDFLAGS"] = [`-Wl,-rpath,${usePrefix()}`] - } - - rv["TEA_PREFIX"] = [usePrefix().string] - - // don’t break `man` lol - rv["MANPATH"]?.push("/usr/share/man") - - return rv -} - -function suffixes(key: EnvKey) { - switch (key) { - case 'PATH': - return ["bin", "sbin"] - case 'MANPATH': - return ["share/man"] - case 'PKG_CONFIG_PATH': - return ['share/pkgconfig', 'lib/pkgconfig'] - case 'XDG_DATA_DIRS': - return ['share'] - case 'LIBRARY_PATH': - case 'LD_LIBRARY_PATH': - case 'DYLD_FALLBACK_LIBRARY_PATH': - case 'CPATH': - case 'CMAKE_PREFIX_PATH': - case 'SSL_CERT_FILE': - case 'LDFLAGS': - case 'TEA_PREFIX': - case 'PYTHONPATH': - case 'npm_config_prefix': - case 'ACLOCAL_PATH': - return [] // we handle these specially - default: { - const exhaustiveness_check: never = key - throw new Error(`unhandled id: ${exhaustiveness_check}`) - }} -} - -export function expand(env: Record) { - let rv = '' - for (const [key, value] of Object.entries(env)) { - if (value.length == 0) continue - rv += `export ${key}='${value.join(":")}'\n` - } - return rv -} - -export function flatten(env: Record) { - const rv: Record = {} - for (const [key, value] of Object.entries(env)) { - rv[key] = value.join(":") - } - return rv -} - -function find_tea() { - for (const bindir of Deno.env.get("PATH")?.split(":") ?? []) { - const file = Path.abs(bindir)?.join("tea").isExecutableFile() - if (file) return file - } -} - -function compact_add(set: OrderedHashSet | undefined, item: T | null | undefined): OrderedHashSet { - if (!set) set = OrderedHashSet.empty() - if (item) set = set.add(item) - - return set -} \ No newline at end of file diff --git a/src/hooks/useSourceUnarchiver.ts b/src/hooks/useSourceUnarchiver.ts deleted file mode 100644 index 8fe47904..00000000 --- a/src/hooks/useSourceUnarchiver.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { run, Unarchiver, TarballUnarchiver, ZipUnarchiver } from "utils" -import { useFlags } from "hooks" -import { Verbosity } from "types" -import Path from "path" - -//FIXME assuming strip 1 on components is going to trip people up - -interface Options { - dstdir: Path /// must be empty - zipfile: Path - stripComponents?: number -} - -interface Response { - unarchive(opts: Options): Promise -} - -export default function useSourceUnarchiver(): Response { - const unarchive = async (opts: Options) => { - const { verbosity } = useFlags() - - let unarchiver: Unarchiver - if (ZipUnarchiver.supports(opts.zipfile)) { - const stripComponents = opts.stripComponents ?? 0 - const needsTmpdir = stripComponents > 0 - const dstdir = needsTmpdir ? Path.mktmp() : opts.dstdir - try { - unarchiver = new ZipUnarchiver({ verbosity, ...opts, dstdir }) - if (needsTmpdir) { - throw new Error("unimpl") - } - } finally { - if (needsTmpdir) { - if (verbosity >= Verbosity.debug) { - dstdir.rm() - } else { - console.debug({ leaving: dstdir }) - } - } - } - } else if (TarballUnarchiver.supports(opts.zipfile) || opts.stripComponents !== undefined) { - //FIXME we need to determine file type from the magic bytes - // rather than assume tarball if not zip - opts.dstdir.mkpath() - unarchiver = new TarballUnarchiver({ verbosity, ...opts }) - } else { - // the “tarball” is actually just a single file like beyondgrep.com - return opts.zipfile.cp({ into: opts.dstdir.mkpath() }) - } - - const cmd = unarchiver.args() - - await run({ cmd }) - - return opts.dstdir - } - return { unarchive } -} diff --git a/src/hooks/useSync.ts b/src/hooks/useSync.ts deleted file mode 100644 index 9b118c67..00000000 --- a/src/hooks/useSync.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { useDownload, useCellar, usePantry, useFlags } from "hooks" -import { host, run as base_run, RunOptions } from "utils" -import useLogger, { Logger } from "./useLogger.ts" -import * as semver from "semver" -import Path from "path" - -async function find_git(): Promise { - for (const path_ of Deno.env.get('PATH')?.split(':') ?? []) { - const path = Path.root.join(path_, 'git') - if (path.string == '/usr/bin/git' && host().platform == 'darwin' && !await clt_installed()) { - // if the CLT or Xcode is installed then we can use the system git - // if neither is installed then git will actually immediately exit with an error - continue - } - if (path.isExecutableFile()) { - return Promise.resolve(path) - } - } - - const pkg = {project: 'git-scm.org', constraint: new semver.Range('*')} - const git = await useCellar().has(pkg) - return git?.path.join('bin/git') - - //ALERT! don’t install git with tea - // there's no pantry yet, so attempting to do so will infinitely recurse -} - -async function clt_installed() { - // returns either the CLT path or the Xcode path - const proc = Deno.run({ cmd: ["xcode-select", "--print-path"], stdout: "null", stderr: 'null' }) - const exit = await proc.status() - return exit.success -} - -const pantry_dir = usePantry().prefix.parent() -const pantries_dir = pantry_dir.parent().join("pantries") - -let avoid_softlock = false - -async function lock(body: () => Promise) { - if (avoid_softlock) throw new Error("aborting to prevent softlock") - avoid_softlock = true - - const { rid } = await Deno.open(pantry_dir.mkpath().string) - await Deno.flock(rid, true) - - try { - return await body() - } finally { - //TODO if this gets stuck then nothing will work so need a handler for that - await Deno.funlock(rid) - Deno.close(rid) // docs aren't clear if we need to do this or not - avoid_softlock = false - } -} - -//TODO we have a better system in mind than git -export async function install(logger: Logger): Promise { - if (usePantry().prefix.exists()) { - if (pantries_dir.exists()) return 'noop' - if (pantry_dir.join('.git').exists()) return 'deprecated' - - // FIXME in this case we have a downloaded pantry so we should install git - return 'not-git' - } - - logger.replace("fetching pantries…") - - try { - const git = await find_git() - const rv = await lock(async () => { - if (usePantry().prefix.exists()) return 'noop' - // ^^ another instance of tea did the install while we waited - - if (git) { - await clone(git) - return true - } else { - await unzip() - return 'not-git' - } - }) - logger.replace("pantries init’d ⎷") - return rv - } catch (e) { - pantries_dir.rm({ recursive: true }) // leave us in a blank state - pantry_dir.rm({ recursive: true }) // ^^ - throw e - } - - async function clone(git: Path) { - const pp: Promise[] = [] - for (const name of ["pantry.core", "pantry.extra"]) { - const p = run({ - cmd: [ - git, "clone", - "--bare", "--depth=1", - `https://github.com/teaxyz/${name}`, - pantries_dir.join("teaxyz").mkpath().join(name) - ] - }) - pp.push(p) - } - - await Promise.all(pp) - await co(git) - } - - async function unzip() { - //FIXME if we do this, we need to be able to convert it to a git installation later - //TODO use our tar if necessary - //TODO if we keep this then don’t store the files, just pipe to tar - for (const name of ["pantry.core", "pantry.extra"]) { - const src = new URL(`https://github.com/teaxyz/${name}/archive/refs/heads/main.tar.gz`) - const tgz = await useDownload().download({ src }) - const cwd = pantry_dir.mkpath() - await run({cmd: ["tar", "xzf", tgz, "--strip-components=1"], cwd }) - } - } -} - -async function *ls() { - for await (const [user] of pantries_dir.ls()) { - for await (const [repo] of user.ls()) { - yield repo - } - } -} - -export const update = async () => { - const logger = useLogger() - - logger.replace("inspecting pantries…") - - switch (await install(logger)) { - case 'deprecated': - console.warn("pantry is a clone, this is deprecated, please clean-install tea") - break - case 'not-git': - console.warn("pantry is not a git repository, cannot update") - break - case 'noop': { - logger.replace("syncing pantries…") - - const git = await find_git() - if (!git) return console.warn("cannot update pantry without git") - const pp: Promise[] = [] - for await (const cwd of ls()) { - const p = run({cmd: [git, "fetch", "origin", "main:main"], cwd }) - pp.push(p) - } - await Promise.all(pp) - - logger.replace("overlaying pantries…") - await co(git) - - logger.replace("pantries sync’d ⎷") - } break - default: - break // we don’t update if we only just cloned it - } -} - -//FIXME order matters -//NOTE well this overlay method is not permanent for sure -async function co(git: string | Path) { - for await (const git_dir of ls()) { - const cmd = [git, - "--git-dir", git_dir, - "--work-tree", pantry_dir, - "checkout", - "--force" - ] - await run({ cmd }) - } -} - -export default update - -function run(opts: RunOptions) { - const spin = useFlags().verbosity < 1 - return base_run({ ...opts, spin }) -} diff --git a/src/hooks/useVirtualEnv.ts b/src/hooks/useVirtualEnv.ts deleted file mode 100644 index e4285234..00000000 --- a/src/hooks/useVirtualEnv.ts +++ /dev/null @@ -1,130 +0,0 @@ -// deno-lint-ignore-file no-cond-assign -import useRequirementsFile, { RequirementsFile } from "./useRequirementsFile.ts" -import { PackageRequirement } from "types" -import SemVer, * as semver from "semver" -import { flatmap, TeaError } from "utils" -import { useFlags } from "hooks" -import Path from "path" - -//CONSIDER -// add requirementsFiles from CWD down to srcroot - -export interface VirtualEnv { - pkgs: PackageRequirement[] - file: Path - srcroot: Path - version?: SemVer -} - -const markdown_extensions = [ - "md", - 'mkd', - 'mdwn', - 'mdown', - 'mdtxt', - 'mdtext', - 'markdown', - 'text', - 'md.txt' -] - -function find({cwd}: {cwd?: Path} = {cwd: undefined}) { - const TEA_DIR = Deno.env.get("TEA_DIR") - if (TEA_DIR) return Path.cwd().join(TEA_DIR) - - let dir = cwd ?? Path.cwd() - const home = Path.home() - while (dir.neq(Path.root) && dir.neq(home)) { - for (const vcs of [".git", ".svn", ".hg"]) { - if (dir.join(vcs).isDirectory()) return dir - } - dir = dir.parent() - } -} - -export default async function useVirtualEnv(opts?: { cwd: Path }): Promise { - const ctx = { - cwd: opts?.cwd ?? Path.cwd(), - TEA_DIR: Deno.env.get("TEA_DIR") - } - - const srcroot = find(opts) - if (!srcroot) throw new TeaError("not-found: srcroot", ctx) - - const files: RequirementsFile[] = await (async () => { - const rv: RequirementsFile[] = [] - const basenames = ["package.json", ...markdown_extensions.map(x => `README.${x}`)] - for (const basename of basenames) { - const path = srcroot.join(basename).isFile() - if (!path) continue - const rf = await useRequirementsFile(path) - if (rf) rv.push(rf) - } - return rv - })() - - if (files.length < 1) throw new TeaError("not-found: virtual-env", ctx) - - const { file, version: version_README } = files.find(x => x.file.basename() == "README.md") ?? files[0] - - const version = flatmap(srcroot.join("VERSION").isFile(), x => semver.parse(x.string)) ?? version_README - - const pkgs = files.flatMap(x => x.pkgs) - - //TODO magic deps should not conflict with requirements files deps - if (useFlags().magic) { - pkgs.push(...await domagic(srcroot)) - } - - return { - pkgs, - file, - srcroot, - version - } -} - -//TODO get version too -async function domagic(srcroot: Path): Promise { - let path: Path | undefined - - //TODO don’t stop if we find something, keep adding all deps - - const requirements = await (async () => { - if (path = srcroot.join("action.yml").isReadableFile()) { - // deno-lint-ignore no-explicit-any - const yaml = await path.readYAML() as any - const using = yaml?.runs?.using - switch (using) { - case "node16": return [{ - project: "nodejs.org", - constraint: new semver.Range("^16") - }] - case "node12": return [{ - project: "nodejs.org", - constraint: new semver.Range("^12") - }] - } - } - if (path = srcroot.join(".node-version").isReadableFile()) { - const constraint = parse(await path.read()) - return [{ project: "nodejs.org", constraint }] - } - if (path = srcroot.join("package.json").isReadableFile()) { - return [{ - project: "nodejs.org", - constraint: new semver.Range("*") - }] - } - return [] - })() - - return requirements -} - -function parse(input: string): semver.Range { - if (/^\d/.test(input)) { - input = `^${input}` - } - return new semver.Range(input) -} diff --git a/src/prefab/README.md b/src/prefab/README.md deleted file mode 100644 index 48703d16..00000000 --- a/src/prefab/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Prefabs - -* **Don’t** offer informative messages - * Thus they should be units that represent something you would inform the - user about *before* you call them -* **Do** copious verbose messages -* **Don’t** do stdout, they return data you *may* want to stdout diff --git a/src/prefab/hydrate.ts b/src/prefab/hydrate.ts deleted file mode 100644 index 53f420d8..00000000 --- a/src/prefab/hydrate.ts +++ /dev/null @@ -1,137 +0,0 @@ -import { PackageRequirement, Package } from "types" -import { isArray } from "is_what" -import * as semver from "semver" -import { usePantry } from "hooks" -import "utils" - - -//TODO linktime cyclic dependencies cannot be allowed -//NOTE however if they aren’t link time it's presumably ok in some scenarios -// eg a tool that lists a directory may depend on a tool that identifies the -// mime types of files which could depend on the listing tool -//FIXME actually we are not refining the constraints currently -//TODO we are not actually restricting subsequent asks, eg. deno^1 but then deno^1.2 - - -interface ReturnValue { - /// full list topologically sorted (ie dry + wet) - pkgs: PackageRequirement[] - - /// your input, but version constraints refined based on the whole graph - /// eg. you hydrate the graph for a and b, but b depends on a tighter range of a than you input - dry: PackageRequirement[] - - /// packages that were not supplied to input or that require bootstrap - wet: PackageRequirement[] - - /// the graph cycles at these packages - /// this is only a problem if you need to build one of these, - // in which case TADA! here's the list! - bootstrap_required: Set -} - -const get = (x: PackageRequirement) => usePantry().getDeps(x).then(x => x.runtime) - -/// sorts a list of packages topologically based on their -/// dependencies. Throws if there is a cycle in the input. -/// ignores changes in dependencies based on versions -export default async function hydrate( - input: (PackageRequirement | Package)[] | (PackageRequirement | Package), - get_deps: (pkg: PackageRequirement, dry: boolean) => Promise = get, -): Promise -{ - if (!isArray(input)) input = [input] - - const dry = input.map(spec => { - if ("version" in spec) { - return {project: spec.project, constraint: new semver.Range(`=${spec.version}`)} - } else { - return spec - } - }) - - const graph: Record = {} - const bootstrap = new Set() - const initial_set = new Set(dry.map(x => x.project)) - - const go = async (target: Node) => { - /// we trace up a target pkg’s dependency graph - /// the target pkg is thus the youngest child and we are ascending up its parents - const ascend = async (node: Node, children: Set) => { - - for (const dep of await get_deps(node.pkg, initial_set.has(node.project))) { - if (children.has(dep.project)) { - if (!bootstrap.has(dep.project)) { - console.warn(`${dep.project} must be bootstrapped to build ${node.project}`) - - //TODO the bootstrap should keep the version constraint since it may be different - bootstrap.add(dep.project) - } - } else { - const found = graph[dep.project] - if (found) { - /// we already traced this graph - - if (found.count() < node.count()) { - found.parent = node - } - - //FIXME strictly we only have to constrain graphs that contain linkage - // ie. you cannot have a binary that links two separate versions of eg. openssl - // or (maybe) services, eg. you might suffer if there are two versions of postgres running (though tea mitigates this) - found.pkg.constraint = semver.intersect(found.pkg.constraint, dep.constraint) - - } else { - const new_node = new Node(dep, node) - graph[dep.project] = new_node - await ascend(new_node, new Set([...children, dep.project])) - } - } - } - } - await ascend(target, new Set([target.project])) - } - - for (const pkg of dry) { - if (pkg.project in graph) continue - const new_node = new Node(pkg) - graph[pkg.project] = new_node - await go(new_node) - } - - const pkgs = Object.values(graph) - .sort((a, b) => b.count() - a.count()) - .map(({pkg}) => pkg) - - //TODO strictly we need to record precisely the bootstrap version constraint - const bootstrap_required = new Set(pkgs.compact(({project}) => bootstrap.has(project) && project)) - - return { - pkgs, - dry: pkgs.filter(({project}) => initial_set.has(project)), - wet: pkgs.filter(({project}) => !initial_set.has(project) || bootstrap_required.has(project)), - bootstrap_required - } -} - - -/////////////////////////////////////////////////////////////////////////// lib -class Node { - parent: Node | undefined - readonly pkg: PackageRequirement - readonly project: string - - constructor(pkg: PackageRequirement, parent?: Node) { - this.parent = parent - this.pkg = pkg - this.project = pkg.project - } - - count(): number { - let n = 0 - let node = this as Node | undefined - // deno-lint-ignore no-cond-assign - while (node = node?.parent) n++ - return n - } -} diff --git a/src/prefab/index.ts b/src/prefab/index.ts deleted file mode 100644 index f6adb339..00000000 --- a/src/prefab/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import hydrate from "./hydrate.ts" -import install from "./install.ts" -import link from "./link.ts" -import resolve from "./resolve.ts" - -export { - hydrate, - install, - link, - resolve -} diff --git a/src/prefab/install.ts b/src/prefab/install.ts deleted file mode 100644 index b1c8333f..00000000 --- a/src/prefab/install.ts +++ /dev/null @@ -1,109 +0,0 @@ -import { usePrefix, useCache, useCellar, useFlags, useDownload, useOffLicense } from "hooks" -import { run, TarballUnarchiver, host, pkg as pkgutils } from "utils" -import { Installation, StowageNativeBottle } from "types" -import { Logger, red, teal, gray } from "hooks/useLogger.ts" -import { Package } from "types" - -// # NOTE -// *only installs binaries* -// > to install from source you must use `$SRCROOT../pantry/scripts/build.ts` - -// # contract -// - if already installed, will extract over the top -// - files not in the newer archive will not be deleted - -export default async function install(pkg: Package, logger?: Logger): Promise { - const { project, version } = pkg - logger ??= new Logger(pkgutils.str(pkg)) - - const { download_with_sha: download } = useDownload() - const cellar = useCellar() - const { verbosity } = useFlags() - const dstdir = usePrefix() - const compression = get_compression() - const stowage = StowageNativeBottle({ pkg: { project, version }, compression }) - const url = useOffLicense('s3').url(stowage) - const dst = useCache().path(stowage) - - logger.replace(teal("locking")) - - const { rid } = await Deno.open(dstdir.string) - await Deno.flock(rid, true) - - try { - await (async () => { - const installation = await cellar.has(pkg) - if (installation) { - logger.replace(teal("installed")) - return installation - } - })() - - logger.replace(teal("querying")) - - //FIXME if we already have the gz or xz versions don’t download the other version! - const { path: tarball, sha } = await download({ src: url, dst, logger }) - - try { - const url = useOffLicense('s3').url({pkg, compression, type: 'bottle'}) - logger.replace(teal("verifying")) - await sumcheck(sha, new URL(`${url}.sha256sum`)) - } catch (err) { - logger.replace(red('error')) - tarball.rm() - console.error("we deleted the invalid tarball. try again?") - throw err - } - - const cmd = new TarballUnarchiver({ - zipfile: tarball, dstdir, verbosity - }).args() - - logger.replace(teal('extracting')) - - await run({ cmd, clearEnv: true }) - - const install = await cellar.resolve(pkg) - - const str = [ - gray(usePrefix().prettyString()), - install.pkg.project, - `${gray('v')}${install.pkg.version}` - ].join(gray('/')) - logger.replace(`installed: ${str}`, { prefix: false }) - - return install - - } finally { - await Deno.funlock(rid) - Deno.close(rid) // docs aren't clear if we need to do this or not - } -} - -//TODO strictly the checksum file needs to be rewritten -//TODO so instead download to default cache path and write a checksum file for all bottles/srcs -//FIXME there is a potential attack here since download streams to a file -// and AFTER we read back out of the file, a malicious actor could rewrite the file -// in that gap. Also it’s less efficient. - -async function sumcheck(local_SHA: string, url: URL) { - const remote_SHA = await (async () => { - const rsp = await fetch(url) - if (!rsp.ok) throw rsp - const txt = await rsp.text() - return txt.split(' ')[0] - })() - - console.verbose({ remote_SHA, local_SHA }) - - if (remote_SHA != local_SHA) { - throw {expected: remote_SHA, got: local_SHA} - } -} - -function get_compression() { - if (Deno.env.get("CI")) return 'gz' // in CI CPU is more constrained than bandwidth - if (host().platform == 'darwin') return 'xz' // most users are richer in CPU than bandwidth - // TODO determine if `tar` can handle xz - return 'gz' -} diff --git a/src/prefab/link.ts b/src/prefab/link.ts deleted file mode 100644 index ff813287..00000000 --- a/src/prefab/link.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { Package, Installation } from "types" -import { useCellar } from "hooks" -import Path from "path" -import SemVer, * as semver from "semver" -import { panic } from "../utils/safe-utils.ts" - -export default async function link(pkg: Package | Installation) { - const installation = await useCellar().resolve(pkg) - pkg = installation.pkg - - const versions = (await useCellar() - .ls(installation.pkg.project)) - .map(({pkg: {version}, path}) => [version, path] as [SemVer, Path]) - .sort(([a],[b]) => a.compare(b)) - - if (versions.length <= 0) { - console.error(pkg, installation) - throw new Error(`no versions`) - } - - const shelf = installation.path.parent() - const newest = versions.slice(-1)[0] - const vMm = `${pkg.version.major}.${pkg.version.minor}` - const minorRange = new semver.Range(`^${vMm}`) - const mostMinor = versions.filter(v => minorRange.satisfies(v[0])).at(-1) ?? panic() - - if (mostMinor[0].neq(pkg.version)) return - // ^^ if we’re not the most minor we definitely not the most major - - await makeSymlink(`v${vMm}`) - - const majorRange = new semver.Range(`^${pkg.version.major.toString()}`) - const mostMajor = versions.filter(v => majorRange.satisfies(v[0])).at(-1) ?? panic() - - if (mostMajor[0].neq(pkg.version)) return - // ^^ if we’re not the most major we definitely aren’t the newest - - await makeSymlink(`v${pkg.version.major}`) - - if (pkg.version.eq(newest[0])) { - await makeSymlink('v*') - } - - async function makeSymlink(symname: string) { - const to = shelf.join(symname) - console.verbose({ "symlinking:": to }) - await shelf.symlink({ from: (await installation).path, to, force: true }) - } -} diff --git a/src/prefab/resolve.ts b/src/prefab/resolve.ts deleted file mode 100644 index dee5a19b..00000000 --- a/src/prefab/resolve.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { Package, PackageRequirement, Installation } from "types" -import { useCellar, useInventory } from "hooks" - -/// NOTE resolves to bottles -/// NOTE contract there are no duplicate projects in input - -interface RT { - /// fully resolved list (includes both installed and pending) - pkgs: Package[] - - /// already installed packages - installed: Installation[] - - /// these are the pkgs that aren’t yet installed - pending: Package[] -} - -/// resolves a list of package specifications based on what is available in -/// bottle storage if `update` is false we will return already installed pkgs -/// that resolve so if we are resolving `node>=12`, node 13 is installed, but -/// node 19 is the latest we return node 13. if `update` is true we return node -/// 19 and *you will need to install it*. -export default async function resolve(reqs: (Package | PackageRequirement)[], {update}: {update: boolean} = {update: false}): Promise { - const inventory = useInventory() - const cellar = useCellar() - const rv: RT = { pkgs: [], installed: [], pending: [] } - let installation: Installation | undefined - for (const req of reqs) { - if (!update && (installation = await cellar.has(req))) { - // if something is already installed that satisfies the constraint then use it - rv.installed.push(installation) - rv.pkgs.push(installation.pkg) - } else { - const version = await inventory.select(req) - if (!version) { - console.error({ ...req, version }) - throw new Error("no bottle available") - } - const pkg = { version, project: req.project } - rv.pkgs.push(pkg) - - if ((installation = await cellar.has(pkg))) { - rv.installed.push(installation) - } else { - rv.pending.push(pkg) - } - } - } - return rv -} diff --git a/src/types.ts b/src/types.ts deleted file mode 100644 index 4252a2be..00000000 --- a/src/types.ts +++ /dev/null @@ -1,55 +0,0 @@ -import SemVer, { Range as SemVerRange } from "semver" -import Path from "path" -import { host } from "./utils/index.ts" - -export interface Package { - project: string - version: SemVer -} - -export interface PackageRequirement { - project: string - constraint: SemVerRange -} - -export type PackageSpecification = Package | PackageRequirement - -export interface Installation { - path: Path - pkg: Package -} - -export enum Verbosity { - quiet = -1, - normal = 0, - loud = 1, - debug = 2, - trace = 3 -} - -// when we support more variants of these that require specification -// we will tuple a version in with each eg. 'darwin' | ['windows', 10 | 11 | '*'] -export const SupportedPlatforms = ["darwin", "linux", "windows"] as const -export type SupportedPlatform = typeof SupportedPlatforms[number] - -export const SupportedArchitectures = ["x86-64", "aarch64"] as const -export type SupportedArchitecture = typeof SupportedArchitectures[number] - -/// remotely available package content (bottles or source tarball) -export type Stowage = { - type: 'src' - pkg: Package - extname: string -} | { - type: 'bottle' - pkg: Package - compression: 'xz' | 'gz' - host?: { platform: SupportedPlatform, arch: SupportedArchitecture } -} - -/// once downloaded, `Stowage` becomes `Stowed` -export type Stowed = Stowage & { path: Path } - -export function StowageNativeBottle(opts: { pkg: Package, compression: 'xz' | 'gz' }): Stowage { - return { ...opts, host: host(), type: 'bottle' } -} diff --git a/src/utils/Unarchiver.ts b/src/utils/Unarchiver.ts deleted file mode 100644 index f4ad6b3c..00000000 --- a/src/utils/Unarchiver.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { PackageRequirement, Verbosity } from "types" -import Path from "path" -import * as semver from "utils/semver.ts" - -interface Options { - dstdir: Path - zipfile: Path - verbosity: Verbosity -} - -export class Unarchiver { - protected opts: Options - - constructor(opts: Options) { - this.opts = opts - } - - args(): (string | Path)[] { - return [] - } - - supports(_filename: Path): boolean { - return false - } - - dependencies(): PackageRequirement[] { - return [] - } -} - -const constraint = new semver.Range("*") - -export class TarballUnarchiver extends Unarchiver { - private stripComponents?: number - - constructor(opts: Options & { stripComponents?: number }) { - super(opts) - this.stripComponents = opts.stripComponents - } - - args(): (string | Path)[] { - const args = [ - "tar", "xf", this.opts.zipfile, - "-C", this.opts.dstdir - ] - if (this.opts.verbosity > Verbosity.normal) args.push("--verbose") - if (this.stripComponents) args.push(`--strip-components=${this.stripComponents}`) - return args - } - - static supports(filename: Path): boolean { - switch (filename.extname()) { - case ".tar.gz": - case ".tar.bz2": - case ".tar.xz": - case ".tgz": - return true - default: - return false - } - } - - dependencies() { - const rv = [{ - project: "gnu.org/tar", - constraint - }] - switch (this.opts.zipfile.extname()) { - case ".tbz": - case ".tar.bz2": - rv.push({ - project: "sourceware.org/bzip2", - constraint - }) - break - case ".txz": - case ".tar.xz": - rv.push({ - project: "tukaani.org/xz", - constraint - }) - break - } - return rv - } -} - -export class ZipUnarchiver extends Unarchiver { - force: boolean - - constructor(opts: Options & { force?: boolean }) { - super(opts) - this.force = opts.force ?? false - } - - args(): (string | Path)[] { - const args = ["unzip"] - // if (this.opts.verbose) args.push("-v") seems to break it - if (this.force) args.push("-of") - - return [ - ...args, - this.opts.zipfile, - "-d", this.opts.dstdir - ] - } - - static supports(filename: Path): boolean { - return filename.extname() == ".zip" - } - - dependencies() { - return [{ - project: "info-zip.org/unzip", - constraint - }] - } -} diff --git a/src/utils/error.ts b/src/utils/error.ts deleted file mode 100644 index 4f249140..00000000 --- a/src/utils/error.ts +++ /dev/null @@ -1,158 +0,0 @@ -import { PlainObject } from "is_what" -import { undent } from "utils" - -type ID = - 'not-found: tea -X: arg0' | - 'not-found: exe/md: default target' | - 'not-found: exe/md: region' | - 'http' | - 'not-found: pantry' | - 'not-found: pantry: package.yml' | - 'parser: pantry: package.yml' | - 'not-found: virtual-env' | - 'not-found: srcroot' | - 'not-found: arg' | - '#helpwanted' - -export default class TeaError extends Error { - id: ID - ctx: PlainObject - - code() { - switch (this.id) { - case 'not-found: tea -X: arg0': return 'spilt-tea-001' - case 'not-found: exe/md: default target': return 'spilt-tea-002' - case 'not-found: exe/md: region': return 'spilt-tea-003' - case 'not-found: srcroot': return 'spilt-tea-004' - case 'not-found: pantry: package.yml': return 'spilt-tea-005' - case 'not-found: virtual-env': return 'spilt-tea-006' - case 'not-found: pantry': return 'spilt-tea-007' - case 'not-found: arg': return 'spilt-tea-008' - case 'parser: pantry: package.yml': return 'spilt-tea-102' - case '#helpwanted': return 'spilt-tea-411' - case 'http': return 'spilt-tea-500' - default: { - const exhaustiveness_check: never = this.id - throw new Error(`unhandled id: ${exhaustiveness_check}`) - }} - } - - title() { - switch (this.id) { - case 'not-found: pantry: package.yml': - return `not found in pantry: ${this.ctx.project}` - default: - return this.id - } - } - - constructor(id: ID, ctx: PlainObject) { - let msg = '' - switch (id) { - case 'not-found: tea -X: arg0': - msg = undent` - couldn’t find a pkg to provide: \`${ctx.arg0}' - - https://github.com/teaxyz/pantry.zero#contributing - - ` - break - case 'not-found: exe/md: region': - msg = `markdown section for \`${ctx.script}\` has no \`\`\`sh code block` - break - case 'not-found: exe/md: default target': - if (ctx.requirementsFile) { - msg = `markdown section \`# Getting Started\` not found in \`${ctx.requirementsFile}\`` - } else { - msg = undent` - no \`README.md\` or \`package.json\` found. - ` - } - break - case 'not-found: pantry': - if (ctx.path) { - msg = `no pantry at path: ${ctx.path}, try \`tea --sync\`` - } else { - msg = 'no pantry: run `tea --sync`' - } - break - case 'http': - msg = ctx.underr?.message ?? "unknown error" - break - case 'not-found: pantry: package.yml': - msg = " https://github.com/teaxyz/pantry.zero#contributing\n" - break - case 'parser: pantry: package.yml': - msg = undent` - pantry entry invalid. please report this bug! - - https://github.com/teaxyz/pantry.core/issues/new - - ----------------------------------------------------->> attachment begin - ${ctx.project}: ${ctx.underr.message} - <<------------------------------------------------------- attachment end - ` - break - case 'not-found: virtual-env': - msg = undent` - \`${ctx.cwd}\` is not a tea virtual environment. - - currently, a virtual environment is defined by a \`README.md\` or \`package.json\` - existing alongside a source control directory (eg. \`.git\`). - ` - break - case 'not-found: srcroot': - msg = undent` - we couldn’t determine \`$SRCROOT\` when descending from \`${ctx.cwd}\`. - - currently, a virtual environment is defined by a \`README.md\` or \`package.json\` - existing alongside a source control directory (eg. \`.git\`). - ` - break - case 'not-found: arg': - msg = undent` - \`${ctx.arg}\` isn't a valid flag. - - see: \`tea --help\` - ` - break - case '#helpwanted': - msg = ctx.details - break - default: { - const exhaustiveness_check: never = id - throw new Error(`unhandled id: ${exhaustiveness_check}`) - }} - - super(msg) - this.id = id ?? msg - this.ctx = ctx - } -} - -export class UsageError extends Error -{} - -export { panic } from "./safe-utils.ts" - -export const wrap = , U>(fn: (...args: T) => U, id: ID) => { - return (...args: T): U => { - try { - let foo = fn(...args) - if (foo instanceof Promise) { - foo = foo.catch(converter) as U - } - return foo - } catch (underr) { - converter(underr) - } - - function converter(underr: unknown): never { - if (underr instanceof TeaError) { - throw underr - } else { - throw new TeaError(id, {...args, underr}) - } - } - } -} diff --git a/src/utils/hacks.ts b/src/utils/hacks.ts deleted file mode 100644 index 745f5db8..00000000 --- a/src/utils/hacks.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { PackageRequirement } from "types" -import { PlainObject } from "is_what" -import { host, validate_str } from "utils" -import { isString, isNumber } from "is_what" -import * as semver from "semver" - -export function validatePackageRequirement(input: PlainObject): PackageRequirement | undefined { - let { constraint, project } = input - - if (host().platform == 'darwin' && (project == "apple.com/xcode/clt" || project == "tea.xyz/gx/make")) { - // Apple will error out and prompt the user to install - //NOTE what we would really like is to error out when this dependency is *used* - // this is not the right place to error that. so FIXME - return // compact this dep away - } - if (host().platform == 'linux' && project == "tea.xyz/gx/make") { - project = "gnu.org/make" - constraint = '*' - } - - validate_str(project) - - //HACKS - if (constraint == 'c99' && project == 'tea.xyz/gx/cc') { - constraint = '^0.1' - } - - if (constraint === undefined) { - constraint = '*' - } else if (isNumber(constraint)) { - //FIXME change all pantry entries to use proper syntax - constraint = `^${constraint}` - } - if (!isString(constraint)) { - throw new Error(`invalid constraint: ${constraint}`) - } else if (/^\d/.test(constraint)) { - //FIXME change all pantry entries to use proper syntax - constraint = `^${constraint}` - } - - constraint = new semver.Range(constraint) - - return { - project, - constraint - } -} diff --git a/src/utils/index.ts b/src/utils/index.ts deleted file mode 100644 index 4ea2843b..00000000 --- a/src/utils/index.ts +++ /dev/null @@ -1,315 +0,0 @@ -//CONTRACT you can’t use anything from hooks - -import { isString, isPlainObject, isArray, isRegExp, PlainObject } from "is_what" - -// deno-lint-ignore no-explicit-any -export function validate_str(input: any): string { - if (typeof input == 'boolean') return input ? 'true' : 'false' - if (typeof input == 'number') return input.toString() - if (typeof input != 'string') throw new Error(`not-string: ${input}`) - return input -} - -// deno-lint-ignore no-explicit-any -export function validate_plain_obj(input: any): PlainObject { - if (!isPlainObject(input)) throw new Error(`not-plain-obj: ${JSON.stringify(input)}`) - return input -} - -// deno-lint-ignore no-explicit-any -export function validate_arr(input: any): Array { - if (!isArray(input)) throw new Error(`not-array: ${JSON.stringify(input)}`) - return input -} - - -///////////////////////////////////////////////////////////////////////// HTTP -export async function GET(url: URL | string, headers?: Headers): Promise { - if (isString(url)) url = new URL(url) - if (url.host == "api.github.com") { - const token = Deno.env.get("GITHUB_TOKEN") - if (token) { - headers ??= new Headers() - headers.append("Authorization", `bearer ${token}`) - } - } - const rsp = await fetch(url, { headers }) - if (!rsp.ok) throw new TeaError('http', { url, headers }) - const json = await rsp.json() - return json as T -} - - -////////////////////////////////////////////////////////////// base extensions -import outdent from "outdent" -export { outdent as undent } - -declare global { - interface Array { - compact(body?: (t: T) => S | null | undefined | false, opts?: { rescue: boolean }): Array - compact_push(item: T | undefined | null): void - compact_unshift(item: T | undefined | null): void - chuzzle(): Array | undefined - uniq(): Array - } - - interface String { - chuzzle(): string | undefined - } - - interface Console { - // deno-lint-ignore no-explicit-any - verbose(...args: any[]): void - - /// prohibits standard logging unless verbosity is loud or above - silence(body: () => Promise): Promise - } - - interface Set { - insert(t: T): { inserted: boolean } - } -} - -String.prototype.chuzzle = function() { - return this.trim() || undefined -} - -export { chuzzle } from "./safe-utils.ts" - -Set.prototype.insert = function(t: T) { - if (this.has(t)) { - return {inserted: false} - } else { - this.add(t) - return {inserted: true} - } -} - -Array.prototype.uniq = function(): Array { - const set = new Set() - return this.compact(x => { - const s = x.toString() - if (set.has(s)) return - set.add(s) - return x - }) -} - -Array.prototype.compact = function(body?: (t: T) => S | null | undefined | false, opts?: { rescue: boolean }) { - const rv: Array = [] - for (const e of this) { - try { - const f = body ? body(e) : e - if (f) rv.push(f) - } catch (err) { - if (opts === undefined || opts.rescue === false) throw err - } - } - return rv -} - -//TODO would be nice to chuzzle contents to reduce first -Array.prototype.chuzzle = function() { - if (this.length <= 0) { - return undefined - } else { - return this - } -} - -console.verbose = console.log - -console.silence = async function(body: () => Promise) { - const originals = [console.log, console.info] - try { - console.log = () => {} - console.info = () => {} - return await body() - } finally { - console.log = originals[0] - console.info = originals[1] - } -} - -Array.prototype.compact_push = function(item: T | null | undefined) { - if (item) this.push(item) -} - -Array.prototype.compact_unshift = function(item: T | null | undefined) { - if (item) this.unshift(item) -} - -export function flatmap(t: T | undefined | null, body: (t: T) => S | undefined, opts?: {rescue?: boolean}): NonNullable | undefined { - try { - if (t) return body(t) ?? undefined - } catch (err) { - if (!opts?.rescue) throw err - } -} - -export async function async_flatmap(t: Promise, body: (t: T) => Promise | undefined, opts?: {rescue?: boolean}): Promise | undefined> { - try { - const tt = await t - if (tt) return await body(tt) ?? undefined - } catch (err) { - if (!opts?.rescue) throw err - } -} - -export function pivot(input: E[], body: (e: E) => ['L', L] | ['R', R]): [L[], R[]] { - const rv = { - 'L': [] as L[], - 'R': [] as R[] - } - for (const e of input) { - const [side, value] = body(e) - // deno-lint-ignore no-explicit-any - rv[side].push(value as any) - } - return [rv['L'], rv['R']] -} - -declare global { - interface Promise { - swallow(err?: unknown): Promise - } -} - -Promise.prototype.swallow = function(gristle?: unknown) { - return this.catch((err: unknown) => { - if (gristle === undefined) { - return - } - - if (err instanceof TeaError) { - err = err.id - } else if (err instanceof Error) { - err = err.message - } else if (isPlainObject(err) && isString(err.code)) { - err = err.code - } else if (isRegExp(gristle) && isString(err)) { - if (!err.match(gristle)) throw err - } else if (err !== gristle) { - throw err - } - return undefined - }) -} - -export async function attempt(body: () => Promise, opts: {swallow: unknown}): Promise { - try { - return await body() - } catch (err) { - if (err !== opts.swallow) throw err - } -} - -/////////////////////////////////////////////////////////////////// Unarchiver -import { Unarchiver, TarballUnarchiver, ZipUnarchiver } from "./Unarchiver.ts" -export { Unarchiver, TarballUnarchiver, ZipUnarchiver } - - -////////////////////////////////////////////////////////////////////////// run -import Path from "path" - -export interface RunOptions extends Omit { - cmd: (string | Path)[] | Path - cwd?: (string | Path) - clearEnv?: boolean //NOTE might not be cross platform! - spin?: boolean // hide output unless an error occurs -} - -export class RunError extends Error { - code: number - constructor(code: number, cmd: (Path | string)[]) { - super(`cmd failed: ${code}: ${cmd.join(' ')}`) - this.code = code - } -} - -export async function run({ spin, ...opts }: RunOptions) { - const cmd = isArray(opts.cmd) ? opts.cmd.map(x => `${x}`) : [opts.cmd.string] - const cwd = opts.cwd?.toString() - console.verbose({ cwd, ...opts, cmd }) - - const stdio = { stdout: 'inherit', stderr: 'inherit' } as Pick - if (spin) { - stdio.stderr = stdio.stdout = 'piped' - } - - let proc: Deno.Process | undefined - try { - proc = Deno.run({ ...opts, cmd, cwd, ...stdio }) - const exit = await proc.status() - console.verbose({ exit }) - if (!exit.success) throw new RunError(exit.code, cmd) - } catch (err) { - if (spin && proc) { - //FIXME this doesn’t result in the output being correctly interlaced - // ie. stderr and stdout may (probably) have been output interleaved rather than sequentially - const decode = (() => { const e = new TextDecoder(); return e.decode.bind(e) })() - console.error(decode(await proc.output())) - console.error(decode(await proc.stderrOutput())) - } - err.cmd = cmd // help us out since deno-devs don’t want to - throw err - } -} - -export async function backticks(opts: RunOptions): Promise { - const cmd = isArray(opts.cmd) ? opts.cmd.map(x => `${x}`) : [opts.cmd.string] - const cwd = opts.cwd?.toString() - console.verbose({ cwd, ...opts, cmd }) - const proc = Deno.run({ ...opts, cwd, cmd, stdout: "piped" }) - const out = await proc.output() - const txt = new TextDecoder().decode(out) - return txt -} - -/////////////////////////////////////////////////////////////////////////// io -// for output that the user requested, everything from console.* might be silenced -const encoder = new TextEncoder() -export const print = (x: string) => Deno.stdout.write(encoder.encode(`${x}\n`)) - - -///////////////////////////////////////////////////////////////////////// misc -import TeaError, { UsageError, panic } from "./error.ts" -export { TeaError, UsageError, panic } -export * as error from "./error.ts" - -// deno-lint-ignore no-explicit-any -export function tuplize(...elements: T) { - return elements -} - -///////////////////////////////////////////////////////////////////////// pkgs -export * as pkg from "./pkg.ts" - -///////////////////////////////////////////////////////////////////// platform -import { SupportedPlatform, SupportedArchitecture } from "types" - -interface HostReturnValue { - platform: SupportedPlatform - arch: SupportedArchitecture - target: string - build_ids: [SupportedPlatform, SupportedArchitecture] -} - -export function host(): HostReturnValue { - const arch = (() => { - switch (Deno.build.arch) { - case "aarch64": return "aarch64" - case "x86_64": return "x86-64" - // ^^ ∵ https://en.wikipedia.org/wiki/X86-64 and semver.org prohibits underscores - } - })() - - const { os: platform, target } = Deno.build - - return { - platform, - arch, - target, - build_ids: [platform, arch] - } -} diff --git a/src/utils/pkg.ts b/src/utils/pkg.ts deleted file mode 100644 index f6853f5b..00000000 --- a/src/utils/pkg.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { Package, PackageRequirement } from "types" -import * as semver from "semver" - -/// allows inputs `nodejs.org@16` when `semver.parse` would reject -export function parse(input: string): PackageRequirement | Package { - const match = input.match(/^(.+?)([\^=~<>@].+)?$/) - if (!match) throw new Error(`invalid pkgspec: ${input}`) - if (!match[2]) match[2] = "*" - - const project = match[1] - - if (match[2] =="@latest") { - console.warn(`@latest is deprecated, instead specify \`${project}*' or just \`${project}'`) - return { project, constraint: new semver.Range('*') } - } else { - // everyone expects `@` and for it to work this way - if (match[2].startsWith("@")) match[2] = `^${match[2].slice(1)}` - - const constraint = new semver.Range(match[2]) - const version = constraint.single() - - if (version) { - return { project, version } - } else { - return { project, constraint } - } - } -} - -export function compare(a: Package, b: Package): number { - return a.project === b.project - ? a.version.compare(b.version) - : a.project.localeCompare(b.project) -} - -export function str(pkg: Package | PackageRequirement): string { - if (!("constraint" in pkg)) { - return `${pkg.project}=${pkg.version}` - } else if (pkg.constraint.set === "*") { - return pkg.project - } else { - return `${pkg.project}${pkg.constraint}` - } -} diff --git a/src/utils/safe-utils.ts b/src/utils/safe-utils.ts deleted file mode 100644 index 548b1ac1..00000000 --- a/src/utils/safe-utils.ts +++ /dev/null @@ -1,17 +0,0 @@ -// utils safe enough “pure” stuff (eg. semver.ts, Path.ts) - -export function chuzzle(input: number) { - return Number.isNaN(input) ? undefined : input -} - -export function panic(message?: string): never { - throw new Error(message) -} - -export function flatmap(t: T | undefined | null, body: (t: T) => S | undefined, opts?: {rescue?: boolean}): NonNullable | undefined { - try { - if (t) return body(t) ?? undefined - } catch (err) { - if (!opts?.rescue) throw err - } -} diff --git a/src/utils/semver.ts b/src/utils/semver.ts deleted file mode 100644 index 3dc9db06..00000000 --- a/src/utils/semver.ts +++ /dev/null @@ -1,294 +0,0 @@ -import { isArray, isString } from "is_what" - -/** - * we have our own implementation because open source is full of weird - * but *almost* valid semver schemes, eg: - * openssl 1.1.1q - * ghc 5.64.3.2 - * it also allows us to implement semver_intersection without hating our lives - */ -export default class SemVer { - readonly components: number[] - - major: number - minor: number - patch: number - - //FIXME parse these - readonly prerelease: string[] = [] - readonly build: string[] = [] - - readonly raw: string - readonly pretty?: string - - constructor(input: string | number[] | Range | SemVer) { - if (typeof input == 'string') { - if (input.startsWith('v')) input = input.slice(1) - const parts = input.split('.') - let pretty_is_raw = false - this.components = parts.flatMap((x, index) => { - const match = x.match(/^(\d+)([a-z])$/) - if (match) { - if (index != parts.length - 1) throw new Error(`invalid version: ${input}`) - const n = parseInt(match[1]) - if (isNaN(n)) throw new Error(`invalid version: ${input}`) - pretty_is_raw = true - return [n, char_to_num(match[2])] - } else if (/^\d+$/.test(x)) { - const n = parseInt(x) // parseInt will parse eg. `5-start` to `5` - if (isNaN(n)) throw new Error(`invalid version: ${input}`) - return [n] - } else { - throw new Error(`invalid version: ${input}`) - } - }) - this.raw = input - if (pretty_is_raw) this.pretty = input - } else if (input instanceof Range || input instanceof SemVer) { - const v = input instanceof Range ? input.single() : input - if (!v) throw new Error(`range represents more than a single version: ${input}`) - this.components = v.components - this.raw = v.raw - this.pretty = v.pretty - } else { - this.components = input - this.raw = input.join('.') - } - - this.major = this.components[0] - this.minor = this.components[1] ?? 0 - this.patch = this.components[2] ?? 0 - - function char_to_num(c: string) { - return c.charCodeAt(0) - 'a'.charCodeAt(0) + 1 - } - } - - toString(): string { - return this.pretty ?? - (this.components.length <= 3 - ? `${this.major}.${this.minor}.${this.patch}` - : this.components.join('.')) - } - - eq(that: SemVer): boolean { - return this.compare(that) == 0 - } - - neq(that: SemVer): boolean { - return this.compare(that) != 0 - } - - gt(that: SemVer): boolean { - return this.compare(that) > 0 - } - - lt(that: SemVer): boolean { - return this.compare(that) < 0 - } - - compare(that: SemVer): number { - return _compare(this, that) - } - - [Symbol.for("Deno.customInspect")]() { - return this.toString() - } -} - -/// the same as the constructor but swallows the error returning undefined instead -export function parse(input: string) { - try { - return new SemVer(input) - } catch { - return undefined - } -} - -/// we don’t support as much as node-semver but we refuse to do so because it is badness -export class Range { - // contract [0, 1] where 0 != 1 and 0 < 1 - readonly set: ([SemVer, SemVer] | SemVer)[] | '*' - - constructor(input: string | ([SemVer, SemVer] | SemVer)[]) { - if (input === "*") { - this.set = '*' - } else if (!isString(input)) { - this.set = input - } else { - input = input.trim() - - const err = () => new Error(`invalid semver range: ${input}`) - - this.set = input.split(/(?:,|\s*\|\|\s*)/).map(input => { - let match = input.match(/^>=((\d+\.)*\d+)\s*(<((\d+\.)*\d+))?$/) - if (match) { - const v1 = new SemVer(match[1]) - const v2 = match[3] ? new SemVer(match[4])! : new SemVer([Infinity, Infinity, Infinity]) - return [v1, v2] - } else if ((match = input.match(/^([~=<^])(.+)$/))) { - let v1: SemVer | undefined, v2: SemVer | undefined - switch (match[1]) { - case "^": - v1 = new SemVer(match[2]) - v2 = new SemVer([v1.major + 1]) - return [v1, v2] - case "~": { - v1 = new SemVer(match[2]) - if (v1.components.length == 1) { - // yep this is the official policy - v2 = new SemVer([v1.major + 1]) - } else { - v2 = new SemVer([v1.major, v1.minor + 1]) - } - } return [v1, v2] - case "<": - v1 = new SemVer([0]) - v2 = new SemVer(match[2]) - return [v1, v2] - case "=": - return new SemVer(match[2]) - } - } - throw err() - }) - - if (this.set.length == 0) { - throw err() - } - - for (const i of this.set) { - if (isArray(i) && !i[0].lt(i[1])) throw err() - } - } - } - - toString(): string { - if (this.set === '*') { - return '*' - } else { - return this.set.map(v => { - if (!isArray(v)) return `=${v.toString()}` - const [v1, v2] = v - if (v2.major == v1.major + 1 && v2.minor == 0 && v2.patch == 0) { - const v = chomp(v1) - return `^${v}` - } else if (v2.major == v1.major && v2.minor == v1.minor + 1 && v2.patch == 0) { - const v = chomp(v1) - return `~${v}` - } else if (v2.major == Infinity) { - const v = chomp(v1) - return `>=${v}` - } else { - return `>=${chomp(v1)}<${chomp(v2)}` - } - }).join(",") - } - } - - // eq(that: Range): boolean { - // if (this.set.length !== that.set.length) return false - // for (let i = 0; i < this.set.length; i++) { - // const [a,b] = [this.set[i], that.set[i]] - // if (typeof a !== 'string' && typeof b !== 'string') { - // if (a[0].neq(b[0])) return false - // if (a[1].neq(b[1])) return false - // } else if (a != b) { - // return false - // } - // } - // return true - // } - - satisfies(version: SemVer): boolean { - if (this.set === '*') { - return true - } else { - return this.set.some(v => { - if (isArray(v)) { - const [v1, v2] = v - return version.compare(v1) >= 0 && version.compare(v2) < 0 - } else { - return version.eq(v) - } - }) - } - } - - max(versions: SemVer[]): SemVer | undefined { - return versions.filter(x => this.satisfies(x)).sort((a,b) => a.compare(b)).pop() - } - - single(): SemVer | undefined { - if (this.set === '*') return - if (this.set.length > 1) return - return isArray(this.set[0]) ? undefined : this.set[0] - } - - [Symbol.for("Deno.customInspect")]() { - return this.toString() - } -} - -function zip(a: T[], b: U[]) { - const N = Math.max(a.length, b.length) - const rv: [T | undefined, U | undefined][] = [] - for (let i = 0; i < N; ++i) { - rv.push([a[i], b[i]]) - } - return rv -} - -function _compare(a: SemVer, b: SemVer): number { - for (const [c,d] of zip(a.components, b.components)) { - if (c != d) return (c ?? 0) - (d ?? 0) - } - return 0 -} -export { _compare as compare } - - -export function intersect(a: Range, b: Range): Range { - if (b.set === '*') return a - if (a.set === '*') return b - - // calculate the intersection between two semver.Ranges - const set: ([SemVer, SemVer] | SemVer)[] = [] - - for (const aa of a.set) { - for (const bb of b.set) { - if (!isArray(aa) && !isArray(bb)) { - if (aa.eq(bb)) set.push(aa) - } else if (!isArray(aa)) { - const bbb = bb as [SemVer, SemVer] - if (aa.compare(bbb[0]) >= 0 && aa.lt(bbb[1])) set.push(aa) - } else if (!isArray(bb)) { - const aaa = aa as [SemVer, SemVer] - if (bb.compare(aaa[0]) >= 0 && bb.lt(aaa[1])) set.push(bb) - } else { - const a1 = aa[0] - const a2 = aa[1] - const b1 = bb[0] - const b2 = bb[1] - - if (a1.compare(b2) >= 0 || b1.compare(a2) >= 0) { - continue - } - - set.push([a1.compare(b1) > 0 ? a1 : b1, a2.compare(b2) < 0 ? a2 : b2]) - } - } - } - - if (set.length <= 0) throw new Error(`cannot intersect: ${a} && ${b}`) - - return new Range(set) -} - - -//FIXME yes yes this is not sufficient -export const regex = /\d+\.\d+\.\d+/ - -function chomp(v: SemVer) { - return v.toString().replace(/(\.0)+$/g, '') || '0' -} diff --git a/src/vendor/Path.ts b/src/vendor/Path.ts deleted file mode 100644 index f4f5e6ee..00000000 --- a/src/vendor/Path.ts +++ /dev/null @@ -1,447 +0,0 @@ -import { parse as parseYaml } from "deno/encoding/yaml.ts" -import * as sys from "deno/path/mod.ts" -import * as fs from "deno/fs/mod.ts" -import { PlainObject } from "is_what" -import {readLines} from "deno/io/buffer.ts" -import "utils" //FIXME for console.verbose - -// based on https://github.com/mxcl/Path.swift - -// everything is Sync because TypeScript will unfortunately not -// cascade `await`, meaing our chainable syntax would become: -// -// await (await foo).bar -// -// however we use async versions for “terminators”, eg. `ls()` - -export default class Path { - /// the normalized string representation of the underlying filesystem path - readonly string: string - - /// the filesystem root - static root = new Path("/") - - static cwd(): Path { - return new Path(Deno.cwd()) - } - - static home(): Path { - return new Path((() => { - switch (Deno.build.os) { - case "linux": - case "darwin": - return Deno.env.get("HOME")! - case "windows": - return Deno.env.get("USERPROFILE")! - } - })()) - } - - /// normalizes the path - /// throws if not an absolute path - constructor(input: string | Path) { - if (input instanceof Path) { - this.string = input.string - } else if (!input || input[0] != '/') { - throw new Error(`invalid absolute path: ${input}`) - } else { - this.string = sys.normalize(input) - } - } - - /// returns Path | undefined rather than throwing error if Path is not absolute - static abs(input: string | Path) { - try { - return new Path(input) - } catch { - return - } - } - - /** - If the path represents an actual entry that is a symlink, returns the symlink’s - absolute destination. - - - Important: This is not exhaustive, the resulting path may still contain a symlink. - - Important: The path will only be different if the last path component is a symlink, any symlinks in prior components are not resolved. - - Note: If file exists but isn’t a symlink, returns `self`. - - Note: If symlink destination does not exist, is **not** an error. - */ - readlink(): Path { - try { - const output = Deno.readLinkSync(this.string) - return this.parent().join(output) - } catch (err) { - const code = err.code - if (err instanceof TypeError) { - switch (code) { - case 'EINVAL': - return this // is file - case 'ENOENT': - throw err // there is no symlink at this path - } - } - throw err - } - } - /** - Returns the parent directory for this path. - Path is not aware of the nature of the underlying file, but this is - irrlevant since the operation is the same irrespective of this fact. - - Note: always returns a valid path, `Path.root.parent` *is* `Path.root`. - */ - parent(): Path { - return new Path(sys.dirname(this.string)) - } - - /// returns normalized absolute path string - toString(): string { - return this.string - } - - /// joins this path with the provided component and normalizes it - /// if you provide an absolute path that path is returned - /// rationale: usually if you are trying to join an absolute path it is a bug in your code - /// TODO should warn tho - join(...components: string[]): Path { - const joined = components.join("/") - if (joined[0] == '/') { - return new Path(joined) - } else if (joined) { - return new Path(`${this.string}/${joined}`) - } else { - return this - } - } - - /// Returns true if the path represents an actual filesystem entry that is *not* a directory. - /// NOTE we use `stat`, so if the file is a symlink it is resolved, usually this is what you want - isFile(): Path | undefined { - try { - return Deno.statSync(this.string).isFile ? this : undefined - } catch { - return //FIXME - // if (err instanceof Deno.errors.NotFound == false) { - // throw err - // } - } - } - - isSymlink(): Path | undefined { - try { - return Deno.lstatSync(this.string).isSymlink ? this : undefined - } catch { - return //FIXME - // if (err instanceof Deno.errors.NotFound) { - // return false - // } else { - // throw err - // } - } - } - - isExecutableFile(): Path | undefined { - try { - if (!this.isFile()) return - const info = Deno.statSync(this.string) - if (!info.mode) throw new Error() - const is_exe = (info.mode & 0o111) > 0 - if (is_exe) return this - } catch { - return //FIXME catch specific errors - } - } - - isReadableFile(): Path | undefined { - return this.isFile() /*FIXME*/ ? this : undefined - } - - exists(): Path | undefined { - //FIXME can be more efficient - try { - Deno.statSync(this.string) - return this - } catch { - return //FIXME - // if (err instanceof Deno.errors.NotFound) { - // return false - // } else { - // throw err - // } - } - } - - /// Returns true if the path represents an actual directory. - /// NOTE we use `stat`, so if the file is a symlink it is resolved, usually this is what you want - isDirectory(): Path | undefined { - try { - return Deno.statSync(this.string).isDirectory ? this : undefined - } catch { - return //FIXME catch specific errorrs - } - } - - async *ls(): AsyncIterable<[Path, Deno.DirEntry]> { - for await (const entry of Deno.readDir(this.string)) { - yield [this.join(entry.name), entry] - } - } - - //FIXME probs can be infinite - async *walk(): AsyncIterable<[Path, Deno.DirEntry]> { - const stack: Path[] = [this] - while (stack.length > 0) { - const dir = stack.pop()! - for await (const entry of Deno.readDir(dir.string)) { - const path = dir.join(entry.name) - yield [path, entry] - if (entry.isDirectory) { - stack.push(path) - } - } - } - } - - components(): string[] { - return this.string.split('/') - } - - static mktmp({ prefix }: { prefix: string } = { prefix: 'tea' }): Path { - const parts = tmp.join(prefix).split() - parts[0].mkpath() - const dir = parts[0].string - prefix = parts[1] - const rv = Deno.makeTempDirSync({prefix, dir}) - return new Path(rv) - } - - split(): [Path, string] { - const d = this.parent() - const b = this.basename() - return [d, b] - } - - /// the file extension with the leading period - extname(): string { - const match = this.string.match(/\.tar\.\w+$/) - if (match) { - return match[0] - } else { - return sys.extname(this.string) - } - } - - basename(): string { - return sys.basename(this.string) - } - - /** - Moves a file. - - Path.root.join("bar").mv({to: Path.home.join("foo")}) - // => Path("/Users/mxcl/foo") - - - Parameter to: Destination filename. - - Parameter into: Destination directory (you get `into/${this.basename()`) - - Parameter overwrite: If true overwrites any entry that already exists at the destination. - - Returns: `to` to allow chaining. - - Note: `force` will still throw if `to` is a directory. - - Note: Throws if `overwrite` is `false` yet `to` is *already* identical to - `self` because even though *our policy* is to noop if the desired - end result preexists, checking for this condition is too expensive a - trade-off. - */ - mv({force, ...opts}: {to: Path, force?: boolean} | {into: Path, force?: boolean}): Path { - if ("to" in opts) { - fs.moveSync(this.string, opts.to.string, { overwrite: force }) - return opts.to - } else { - const dst = opts.into.join(this.basename()) - fs.moveSync(this.string, dst.string, { overwrite: force }) - return dst - } - } - - ///FIXME operates in ”force” mode - cp({into}: {into: Path}): Path { - const dst = into.join(this.basename()) - Deno.copyFileSync(this.string, dst.string) - return dst - } - - rm({recursive} = {recursive: false}) { - if (this.exists()) { - Deno.removeSync(this.string, { recursive }) - } - } - - mkdir(): Path { - if (!this.isDirectory()) { - Deno.mkdirSync(this.string) - } - return this - } - - isEmpty(): Path | undefined { - for (const _ of Deno.readDirSync(this.string)) { - return - } - return this - } - - mkpath(): Path { - if (!(this.isSymlink() && this.isDirectory())) { - // if it's a symlink and a directory ensureDirSync fails - fs.ensureDirSync(this.string) - } - return this - } - - mkparent(): Path { - this.parent().mkpath() - return this - } - - eq(that: Path): boolean { - return this.string == that.string - } - - neq(that: Path): boolean { - return this.string != that.string - } - - /// creates a symlink of `from` aliased as a relative path `to`, relative to directory `this` - //TODO deprecate - async symlink({from, to, force}: { from: Path, to: Path, force?: boolean }): Promise { - // NOTE that we use Deno.run as there is no other way in Deno currently to create - // relative symlinks. Also Deno.symlink requires full write permissions for no reason that I understand. - - const src = from.relative({ to: this }) - const dst = to.relative({ to: this }) - - let opts = "-s" - if (force) opts += "fn" - const status = await Deno.run({ - cmd: ["/bin/ln", opts, src, dst], - cwd: this.string - }).status() - - if (status.code != 0) throw new Error(`failed: cd ${this} && ln -sf ${src} ${dst}`) - - return to - } - - /// creates a symlink from `this` to `to` - ln(_: 's', {to}: { to: Path }): Path { - Deno.symlinkSync(this.string, to.string) - return to - } - - read(): Promise { - return Deno.readTextFile(this.string) - } - - readLines(): AsyncIterableIterator { - const fd = Deno.openSync(this.string) - return readLines(fd) - } - - //FIXME like, we don’t want a hard dependency in the published library - //TODO would be nice to validate the output against a type - //TODO shouldn't be part of this module since we want to publish it - async readYAML(): Promise { - console.verbose({ readYAML: this.string }) - try { - const txt = await this.read() - return parseYaml(txt) - } catch (err) { - console.error(this) //because deno errors are shit - throw err - } - } - - readJSON(): Promise { - return this.read().then(x => JSON.parse(x)) - } - - write({ force, ...content }: ({text: string} | {json: PlainObject, space?: number}) & {force?: boolean}): Path { - if (this.exists()) { - if (!force) throw new Error(`file-exists:${this}`) - this.rm() - } - if ("text" in content) { - Deno.writeTextFileSync(this.string, content.text) - } else { - const text = JSON.stringify(content.json, null, content.space) - Deno.writeTextFileSync(this.string, text) - } - return this - } - - touch(): Path { - //FIXME work more as expected - return this.write({force: true, text: ""}) - } - - in(that: Path) { - //FIXME a bit naive - return this.string.startsWith(that.string) - } - - chmod(mode: number): Path { - Deno.chmodSync(this.string, mode) - return this - } - - compact(): Path | undefined { - if (this.exists()) return this - } - - relative({ to: base }: { to: Path }): string { - const pathComps = ['/'].concat(this.string.split("/").filter(x=>x)) - const baseComps = ['/'].concat(base.string.split("/").filter(x=>x)) - - if (this.string.startsWith(base.string)) { - return pathComps.slice(baseComps.length).join("/") - } else { - const newPathComps = [...pathComps] - const newBaseComps = [...baseComps] - - while (newPathComps[0] == newBaseComps[0]) { - newPathComps.shift() - newBaseComps.shift() - } - - const relComps = Array.from({ length: newBaseComps.length } , () => "..") - relComps.push(...newPathComps) - return relComps.join("/") - } - } - - prettyString(): string { - return this.string.replace(new RegExp(`^${Path.home()}`), '~') - } - - // if we’re inside the CWD we print that - prettyLocalString(): string { - const cwd = Path.cwd() - return this.in(cwd) ? `./${this.relative({ to: cwd })}` : this.prettyString() - } - - [Symbol.for("Deno.customInspect")]() { - return this.prettyString() - } -} - -let tmp = new Path('/tmp') -const set_tmp = (path: Path) => tmp = path - -export { set_tmp } - -declare global { - interface URL { - path(): Path - } -} - -URL.prototype.path = function() { return new Path(this.pathname) } diff --git a/src/vendor/README.md b/src/vendor/README.md deleted file mode 100644 index e7ce0fbc..00000000 --- a/src/vendor/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Path.ts - -Note not actually vendored yet as we haven’t yet published this package. - -Goals: - -1. Chainable -2. Robust -3. noop if the result of a requested operation has already been done. -4. Delightful -5. Conforms to principle of least surprise -6. Consistent - -Based on [mxcl/Path.swift](https://github.com/mxcl/Path.swift) diff --git a/tea.yaml b/tea.yaml new file mode 100644 index 00000000..c0002bcf --- /dev/null +++ b/tea.yaml @@ -0,0 +1,8 @@ +# https://tea.xyz/what-is-this-file +--- +version: 1.0.0 +codeOwners: + - '0x5E2DE4A68df811AAAD32d71fb065e6946fA5C8d9' #mxcl + - '0x2faad60792b0594fB37592BA25Bd7387Fbd35191' #thomas-borrel + - '0xAb9A89fA4Bbd04Fc37116F0d7766866D001EA704' #jhheider +quorum: 1 diff --git a/tests/integration/cli.test.ts b/tests/integration/cli.test.ts deleted file mode 100644 index e08bb4d9..00000000 --- a/tests/integration/cli.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { assert } from "deno/testing/asserts.ts" -import { sandbox } from '../utils.ts' - -Deno.test("usage", async () => { - const out = await sandbox(({ run }) => run({args: ["--help"]}).stdout()) - assert(out.split("\n").length > 0) -}) - -Deno.test("+zlib.net", async () => { - await sandbox(async tea => { - await tea.run({ - args: ["--sync", "+zlib.net", "true"], - net: true - }) - }) -}) diff --git a/tests/integration/tea-XX.test.ts b/tests/integration/tea-XX.test.ts deleted file mode 100644 index fcb4e1b2..00000000 --- a/tests/integration/tea-XX.test.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { assertEquals } from "deno/testing/asserts.ts" -import { sandbox } from '../utils.ts' - -//TODO verify that python version is actually what we request - -Deno.test("tea -X python", async () => { - await sandbox(async ({ run }) => { - const out = await run({args: ["-SX", "python", "-c", "print(1)"], net: true }).stdout() - assertEquals(out, "1\n") - }) -}) - -Deno.test("tea -SX python3", async () => { - await sandbox(async ({ run }) => { - const out = await run({args: ["-SX", "python3", "-c", "print(2)"], net: true }).stdout() - assertEquals(out, "2\n") - }) -}) - -Deno.test("tea -SX python3.11", async () => { - await sandbox(async ({ run }) => { - const out = await run({args: ["-SX", "python3.11", "-c", "print(3)"], net: true }).stdout() - assertEquals(out, "3\n") - }) -}) diff --git a/tests/integration/tea-x.test.ts b/tests/integration/tea-x.test.ts deleted file mode 100644 index 91d750eb..00000000 --- a/tests/integration/tea-x.test.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { assertEquals } from "deno/testing/asserts.ts" -import { undent } from "../../src/utils/index.ts"; -import { sandbox } from '../utils.ts' - -Deno.test("tea -x", async () => { - await sandbox(async ({ run, tmpdir }) => { - tmpdir.join("fixture.py").write({ text: "print('hello')" }) - const out = await run({args: ["--sync", "fixture.py"], net: true }).stdout() - assertEquals(out, "hello\n") - }) - - await sandbox(async ({ run, tmpdir }) => { - tmpdir.join("fixture.ts").write({ text: "console.log('hello')" }) - const out = await run({args: ["--sync", "fixture.ts"], net: true }).stdout() - assertEquals(out, "hello\n") - }) -}) - -Deno.test("shebangs", async harness => { - await harness.step("without args", async () => { - await sandbox(async ({ run, tmpdir }) => { - const fixture = tmpdir.join("fixture.py").write({ text: undent` - #!/usr/bin/env python3 - import platform - print(platform.python_version()) - ` - }).chmod(0o500) - const out = await run({args: ["--sync", fixture.string], net: true }).stdout() - assertEquals(out[0], "3") //TODO better - }) - }) - - // verifies that we run `sh fixture.sh` and not `bash sh fixture.sh` - await harness.step("with args", async () => { - await sandbox(async ({ run, tmpdir }) => { - const fuzz = "hi" - const fixture = tmpdir.join("fixture.sh").write({ text: undent` - #!/bin/bash - - #--- - # args: [sh] - #--- - - echo "${fuzz}" - ` - }).chmod(0o500) - const out = await run({args: ["--sync", fixture.string], net: true}).stdout() - assertEquals(out.trim(), fuzz) - }) - }) -}) diff --git a/tests/unit/cache.test.ts b/tests/unit/cache.test.ts deleted file mode 100644 index e7ba5949..00000000 --- a/tests/unit/cache.test.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { assert } from "deno/testing/asserts.ts" -import { useDownload } from "hooks" -import { sandbox } from "../utils.ts" - -Deno.test("etag-mtime-check",async () => { - await sandbox(async ({ tmpdir }) => { - const src = new URL("https://dist.tea.xyz/ijg.org/versions.txt") - await useDownload().download({src, dst: tmpdir.join("versions.txt")}) - - const mtimePath = await useDownload().hash_key(src).join("mtime") - const etagPath = await useDownload().hash_key(src).join("etag") - - const mtime = await mtimePath.read() - const etag = await etagPath.read() - - const rsp = await fetch(src, {}) - const mtimeA = rsp.headers.get("Last-Modified") - const etagA = rsp.headers.get("etag") - - assert(mtimeA === mtime) - assert(etagA === etag) - await rsp.body?.cancel() - }) -}) diff --git "a/tests/unit/exe\342\210\225md.test.ts" "b/tests/unit/exe\342\210\225md.test.ts" deleted file mode 100644 index e32fd7bb..00000000 --- "a/tests/unit/exe\342\210\225md.test.ts" +++ /dev/null @@ -1,86 +0,0 @@ -import { assert, assertEquals } from "deno/testing/asserts.ts" -import { useExecutableMarkdown } from "hooks" -import { undent } from "utils" -import { sandbox } from "../utils.ts" - -////////////////////////////////////////////////////////////////////////// unit -Deno.test("find-script-simple", async () => { - const {script, markdown } = fixture() - const text = undent` - # Title - ## Build - ${markdown} - ` - - const output = await useExecutableMarkdown({ text }) - .findScript("build") - - assert(output === script) -}) - -Deno.test("find-script-complex", async () => { - const {markdown: dummy} = fixture() - const {markdown, script} = fixture() - const text = undent` - # Title - - Lorem ipsum. - Lorem ipsum. - - ## Build - - Lorem ipsum. - Lorem ipsum. - - ${dummy} - - ## Deploy - - Lorem ipsum. - - ${markdown} - - Lorem ipsum. - - # Foo - - Bar. - ` - - const output = await useExecutableMarkdown({ text }) - .findScript("deploy") - - assert(output === script) -}) - -////////////////////////////////////////////////////////////////////////// impl -Deno.test("tea build", async () => { - const { markdown } = fixture() - const output = await sandbox(async ({ tmpdir, run }) => { - tmpdir.join(".git").mkdir() - tmpdir.join("README.md").write({ text: undent` - # Build - ${markdown} - - # Metadata - | Key | Value | - |---------|---------| - | Version | 1.2.3 | - `}) - //FIXME metadata table because depending on tea.xyz is silently ignored - return await run({args: ["build"]}).stdout() - }) - assertEquals(output, "foo bar\n") -}) - - -////////////////////////////////////////////////////////////////////////// util -function fixture() { - const script = 'echo foo bar' - const markdown = undent` - \`\`\`sh - ${script} - \`\`\` - ` - return { script, markdown } -} diff --git a/tests/unit/pkgutils.test.ts b/tests/unit/pkgutils.test.ts deleted file mode 100644 index 977b27a2..00000000 --- a/tests/unit/pkgutils.test.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { assert, assertEquals } from "deno/testing/asserts.ts" -import SemVer, { Range } from "utils/semver.ts" -import * as pkg from "utils/pkg.ts" - - -Deno.test("pkg.str", async test => { - let out: string - - await test.step("precise", () => { - out = pkg.str({ - project: "test", - version: new SemVer("1.2.3") - }) - assertEquals(out, "test=1.2.3") - }) - - for (const range of ["^1", "^1.2", "^1.2.3"]) { - await test.step(range, () => { - out = pkg.str({ - project: "test", - constraint: new Range(range) - }) - assertEquals(out, `test${range}`) - }) - } - - for (const [range, expected] of [[">=1 <2", "^1"], [">=1.2 <2", "^1.2"], [">=1.2.3 <2", "^1.2.3"]]) { - await test.step(`${range} == ${expected}`, () => { - out = pkg.str({ - project: "test", - constraint: new Range(range) - }) - assertEquals(out, `test${expected}`) - }) - } - - await test.step("range of one version", () => { - const constraint = new Range("=1.2.3") - - out = pkg.str({ - project: "test", - constraint - }) - assert(constraint.single()) - assertEquals(out, `test=1.2.3`) - }) -}) diff --git a/tests/unit/semver.test.ts b/tests/unit/semver.test.ts deleted file mode 100644 index 10dd3c99..00000000 --- a/tests/unit/semver.test.ts +++ /dev/null @@ -1,162 +0,0 @@ -import { assert, assertEquals, assertFalse, assertThrows } from "deno/testing/asserts.ts" -import SemVer, * as semver from "utils/semver.ts" - - -Deno.test("semver", async test => { - await test.step("sort", () => { - const input = [new SemVer([1,2,3]), new SemVer("2.3.4"), new SemVer("1.2.4"), semver.parse("1.2.3.1")!] - const sorted1 = input.sort(semver.compare) - const sorted2 = input.sort() - - assertEquals(sorted1.join(","), "1.2.3,1.2.3.1,1.2.4,2.3.4") - assertEquals(sorted2.join(","), "1.2.3,1.2.3.1,1.2.4,2.3.4") - }) - - await test.step("parse", () => { - assertEquals(semver.parse("1.2.3.4.5")?.toString(), "1.2.3.4.5") - assertEquals(semver.parse("1.2.3.4")?.toString(), "1.2.3.4") - assertEquals(semver.parse("1.2.3")?.toString(), "1.2.3") - assertEquals(semver.parse("1.2")?.toString(), "1.2.0") - assertEquals(semver.parse("1")?.toString(), "1.0.0") - }) - - await test.step("constructor", () => { - assertEquals(new SemVer("1.2.3.4.5.6").toString(), "1.2.3.4.5.6") - assertEquals(new SemVer("1.2.3.4.5").toString(), "1.2.3.4.5") - assertEquals(new SemVer("1.2.3.4").toString(), "1.2.3.4") - assertEquals(new SemVer("1.2.3").toString(), "1.2.3") - assertEquals(new SemVer("v1.2.3").toString(), "1.2.3") - assertEquals(new SemVer("1.2").toString(), "1.2.0") - assertEquals(new SemVer("v1.2").toString(), "1.2.0") - - assertEquals(new SemVer("1.1.1q").toString(), "1.1.1q") - assertEquals(new SemVer("1.1.1q").components, [1,1,1,17]) - - // we refuse these as it is just too lenient in our opinion - assertEquals(new SemVer("1").toString(), "1.0.0") - assertEquals(new SemVer("v1").toString(), "1.0.0") - }) - - await test.step("ranges", () => { - const a = new semver.Range(">=1.2.3<2.3.4 || >=3") - assertEquals(a.toString(), ">=1.2.3<2.3.4,>=3") - - assert(a.satisfies(new SemVer("1.2.3"))) - assert(a.satisfies(new SemVer("1.4.1"))) - assert(a.satisfies(new SemVer("3.0.0"))) - assert(a.satisfies(new SemVer("90.0.0"))) - assertFalse(a.satisfies(new SemVer("2.3.4"))) - assertFalse(a.satisfies(new SemVer("2.5.0"))) - - const b = new semver.Range("^0.15") - assertEquals(b.toString(), "^0.15") - - const c = new semver.Range("~0.15") - assertEquals(c.toString(), "~0.15") - - assert(c.satisfies(new SemVer("0.15.0"))) - assert(c.satisfies(new SemVer("0.15.1"))) - assertFalse(c.satisfies(new SemVer("0.14.0"))) - assertFalse(c.satisfies(new SemVer("0.16.0"))) - - const d = new semver.Range("~0.15.1") - assertEquals(d.toString(), "~0.15.1") - assert(d.satisfies(new SemVer("0.15.1"))) - assert(d.satisfies(new SemVer("0.15.2"))) - assertFalse(d.satisfies(new SemVer("0.15.0"))) - assertFalse(d.satisfies(new SemVer("0.16.0"))) - assertFalse(d.satisfies(new SemVer("0.14.0"))) - - // `~` is weird - const e = new semver.Range("~1") - assertEquals(e.toString(), "^1") - assert(e.satisfies(new SemVer("1.0"))) - assert(e.satisfies(new SemVer("1.1"))) - assertFalse(e.satisfies(new SemVer("2"))) - - const f = new semver.Range("^14||^16||^18") - assert(f.satisfies(new SemVer("14.0.0"))) - assertFalse(f.satisfies(new SemVer("15.0.0"))) - assert(f.satisfies(new SemVer("16.0.0"))) - assertFalse(f.satisfies(new SemVer("17.0.0"))) - assert(f.satisfies(new SemVer("18.0.0"))) - - const g = new semver.Range("<15") - assert(g.satisfies(new SemVer("14.0.0"))) - assert(g.satisfies(new SemVer("0.0.1"))) - assertFalse(g.satisfies(new SemVer("15.0.0"))) - - const i = new semver.Range("^1.2.3.4") - assert(i.satisfies(new SemVer("1.2.3.4"))) - assert(i.satisfies(new SemVer("1.2.3.5"))) - assert(i.satisfies(new SemVer("1.2.4.2"))) - assert(i.satisfies(new SemVer("1.3.4.2"))) - assertFalse(i.satisfies(new SemVer("2.0.0"))) - - assertThrows(() => new semver.Range("1")) - assertThrows(() => new semver.Range("1.2")) - assertThrows(() => new semver.Range("1.2.3")) - assertThrows(() => new semver.Range("1.2.3.4")) - }) - - await test.step("intersection", async test => { - await test.step("^3.7…=3.11", () => { - const a = new semver.Range("^3.7") - const b = new semver.Range("=3.11") - - assertEquals(b.toString(), "=3.11.0") - - const c = semver.intersect(a, b) - assertEquals(c.toString(), "=3.11.0") - }) - - await test.step("^3.7…^3.9", () => { - const a = new semver.Range("^3.7") - const b = new semver.Range("^3.9") - - assertEquals(b.toString(), "^3.9") - - const c = semver.intersect(a, b) - assertEquals(c.toString(), "^3.9") - }) - - await test.step("^3.7…*", () => { - const a = new semver.Range("^3.7") - const b = new semver.Range("*") - - assertEquals(b.toString(), "*") - - const c = semver.intersect(a, b) - assertEquals(c.toString(), "^3.7") - }) - - await test.step("~3.7…~3.8", () => { - const a = new semver.Range("~3.7") - const b = new semver.Range("~3.8") - - assertThrows(() => semver.intersect(a, b)) - }) - - await test.step("^3.7…=3.8", () => { - const a = new semver.Range("^3.7") - const b = new semver.Range("=3.8") - const c = semver.intersect(a, b) - assertEquals(c.toString(), "=3.8.0") - }) - - await test.step("^11,^12…^11.3", () => { - const a = new semver.Range("^11,^12") - const b = new semver.Range("^11.3") - const c = semver.intersect(a, b) - assertEquals(c.toString(), "^11.3") - }) - - //FIXME this *should* work - // await test.step("^11,^12…^11.3,^12.2", () => { - // const a = new semver.Range("^11,^12") - // const b = new semver.Range("^11.3") - // const c = semver.intersect(a, b) - // assertEquals(c.toString(), "^11.3,^12.2") - // }) - }) -}) diff --git a/tests/unit/useCache.test.ts b/tests/unit/useCache.test.ts deleted file mode 100644 index cf471a00..00000000 --- a/tests/unit/useCache.test.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { assertEquals } from "deno/testing/asserts.ts" -import useCache from "hooks/useCache.ts" -import Path from "../../src/vendor/Path.ts" - -Deno.test("decode", async test => { - await test.step("std", () => { - const stow = useCache().decode(Path.root.join("gnome.org∕glib-2.72.4+darwin+aarch64.tar.xz")) - - assertEquals(stow?.type, "bottle") - if (stow?.type != 'bottle') throw new Error() // for type checker - - assertEquals(stow.pkg.project, "gnome.org/glib") - assertEquals(stow.pkg.version.toString(), "2.72.4") - assertEquals(stow.compression, 'xz') - assertEquals(stow.host!.arch, "aarch64") - assertEquals(stow.host!.platform, "darwin") - }) - - await test.step("openssl", () => { - const stow = useCache().decode(Path.root.join("openssl.org-1.1.1s+darwin+aarch64.tar.xz")) - - assertEquals(stow?.type, "bottle") - if (stow?.type != 'bottle') throw new Error() // for type checker - - assertEquals(stow.pkg.project, "openssl.org") - assertEquals(stow.pkg.version.toString(), "1.1.1s") - assertEquals(stow.compression, 'xz') - assertEquals(stow.host!.arch, "aarch64") - assertEquals(stow.host!.platform, "darwin") - }) - - await test.step("ghc", () => { - const stow = useCache().decode(Path.root.join("haskell.org-1.2.3.4+darwin+aarch64.tar.xz")) - - assertEquals(stow?.type, "bottle") - if (stow?.type != 'bottle') throw new Error() // for type checker - - assertEquals(stow.pkg.project, "haskell.org") - assertEquals(stow.pkg.version.toString(), "1.2.3.4") - assertEquals(stow.compression, 'xz') - assertEquals(stow.host!.arch, "aarch64") - assertEquals(stow.host!.platform, "darwin") - }) -}) diff --git a/tests/utils.ts b/tests/utils.ts deleted file mode 100644 index b4f60971..00000000 --- a/tests/utils.ts +++ /dev/null @@ -1,94 +0,0 @@ -import Path from "path" - -interface Parameters { args: string[], net?: boolean, env?: Record } - -interface Enhancements { - stdout(): Promise -} - -interface Tea { - run(opts: Parameters): Promise & Enhancements - tmpdir: Path -} - -export async function sandbox(body: (tea: Tea) => Promise, { throws }: { throws: boolean } = {throws: true}) { - const TEA_PREFIX = new Path(await Deno.makeTempDir({ prefix: "tea" })) - - const existing_www_cache = Path.home().join(".tea/tea.xyz/var/www") - if (existing_www_cache.isDirectory()) { - // we're not testing our ISP - const to = TEA_PREFIX.join("tea.xyz/var").mkpath().join("www") - const proc = Deno.run({cmd: [ - 'ln', '-s', existing_www_cache.string, to.string - ]}) - await proc.status() - proc.close() - } - - const run = ({args, net, env}: Parameters) => { - const srcroot = Deno.env.get("SRCROOT") - const cmd = [ - 'deno', - 'run', - '--allow-env', '--allow-run', - '--allow-read' // required for Deno.execPath() (sigh) - ] - - if (net) cmd.push('--allow-net') - - const PATH = Deno.env.get("PATH") - const HOME = Deno.env.get("HOME") - const CI = Deno.env.get("HOME") - if (!env) env = {} - Object.assign(env, { - PATH, HOME, CI, - TEA_PREFIX: TEA_PREFIX.string - }) - - cmd.push( - '--unstable', - `--allow-write=${TEA_PREFIX},${existing_www_cache}`, - `--import-map=${srcroot}/import-map.json`, - `${srcroot}/src/app.ts`, - ...args.map(x => `${x}`) - ) - - let stdout: "piped" | undefined - - // we delay instantiating the proc so we can set `stdout` if the user calls that function - // so the contract is the user must call `stdout` within this event loop iteration - const p = Promise.resolve().then(async () => { - const proc = Deno.run({ cmd, cwd: TEA_PREFIX.string, stdout, env, clearEnv: true}) - try { - const status = await proc.status() - if (throws && !status.success) { - throw status - } - if (stdout == 'piped') { - const out = await proc.output() - return new TextDecoder().decode(out) - } else { - return status - } - } finally { - proc.close() - } - }) as Promise & Enhancements - - p.stdout = () => { - stdout = "piped" - return p as unknown as Promise - } - - return p - } - - try { - return await body({ - tmpdir: TEA_PREFIX, - run - }) - } finally { - await Deno.remove(TEA_PREFIX.string, { recursive: true }) - } -}