diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 640f202..9e44e84 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,6 +1,7 @@
name: ci
on:
+ workflow_dispatch:
push:
branches:
- master
@@ -9,6 +10,27 @@ on:
- master
jobs:
+ minimal:
+ runs-on: ubuntu-latest
+ steps:
+ -
+ name: Checkout
+ uses: actions/checkout@v2.3.3
+ with:
+ path: action
+ -
+ name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v1
+ -
+ name: Build
+ uses: ./action
+ with:
+ file: ./test/Dockerfile
+ -
+ name: Dump context
+ if: always()
+ uses: crazy-max/ghaction-dump-context@v1
+
git-context:
runs-on: ubuntu-latest
services:
@@ -25,8 +47,6 @@ jobs:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
- with:
- platforms: all
-
name: Set up Docker Buildx
id: buildx
@@ -53,6 +73,13 @@ jobs:
-
name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
+ -
+ name: Check digest
+ run: |
+ if [ -z "${{ steps.docker_build.outputs.digest }}" ]; then
+ echo "::error::Digest should not be empty"
+ exit 1
+ fi
-
name: Dump context
if: always()
@@ -74,14 +101,11 @@ jobs:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
- with:
- platforms: all
-
name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
with:
- version: ${{ matrix.buildx-version }}
driver-opts: network=host
-
name: Build and push
@@ -104,6 +128,13 @@ jobs:
-
name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
+ -
+ name: Check digest
+ run: |
+ if [ -z "${{ steps.docker_build.outputs.digest }}" ]; then
+ echo "::error::Digest should not be empty"
+ exit 1
+ fi
-
name: Dump context
if: always()
@@ -129,8 +160,6 @@ jobs:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
- with:
- platforms: all
-
name: Set up Docker Buildx
id: buildx
@@ -157,6 +186,115 @@ jobs:
-
name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
+ -
+ name: Check digest
+ run: |
+ if [ -z "${{ steps.docker_build.outputs.digest }}" ]; then
+ echo "::error::Digest should not be empty"
+ exit 1
+ fi
+ -
+ name: Dump context
+ if: always()
+ uses: crazy-max/ghaction-dump-context@v1
+
+ error:
+ runs-on: ubuntu-latest
+ steps:
+ -
+ name: Checkout
+ uses: actions/checkout@v2.3.3
+ -
+ name: Set up QEMU
+ uses: docker/setup-qemu-action@v1
+ -
+ name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v1
+ -
+ name: Build
+ id: docker_build
+ continue-on-error: true
+ uses: ./
+ with:
+ context: ./test
+ file: ./test/Dockerfile
+ platforms: linux/386,linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64,linux/ppc64le,linux/s390x
+ push: true
+ tags: localhost:5000/name/app:latest
+ -
+ name: Check
+ run: |
+ echo "${{ toJson(steps.docker_build) }}"
+ if [ "${{ steps.docker_build.outcome }}" != "failure" ] || [ "${{ steps.docker_build.conclusion }}" != "success" ]; then
+ echo "::error::Should have failed"
+ exit 1
+ fi
+ -
+ name: Dump context
+ if: always()
+ uses: crazy-max/ghaction-dump-context@v1
+
+ docker-driver:
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ push:
+ - true
+ - false
+ services:
+ registry:
+ image: registry:2
+ ports:
+ - 5000:5000
+ steps:
+ -
+ name: Checkout
+ uses: actions/checkout@v2.3.3
+ -
+ name: Build
+ id: docker_build
+ continue-on-error: ${{ matrix.push }}
+ uses: ./
+ with:
+ context: ./test
+ file: ./test/Dockerfile
+ push: ${{ matrix.push }}
+ tags: localhost:5000/name/app:latest
+ -
+ name: Check
+ run: |
+ echo "${{ toJson(steps.docker_build) }}"
+ if [ "${{ matrix.push }}" = "false" ]; then
+ exit 0
+ fi
+ if [ "${{ steps.docker_build.outcome }}" != "failure" ] || [ "${{ steps.docker_build.conclusion }}" != "success" ]; then
+ echo "::error::Should have failed"
+ exit 1
+ fi
+ -
+ name: Dump context
+ if: always()
+ uses: crazy-max/ghaction-dump-context@v1
+
+ export-docker:
+ runs-on: ubuntu-latest
+ steps:
+ -
+ name: Checkout
+ uses: actions/checkout@v2.3.3
+ -
+ name: Build
+ uses: ./
+ with:
+ context: ./test
+ file: ./test/Dockerfile
+ load: true
+ tags: myimage:latest
+ -
+ name: Inspect
+ run: |
+ docker image inspect myimage:latest
-
name: Dump context
if: always()
@@ -185,8 +323,6 @@ jobs:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
- with:
- platforms: all
-
name: Set up Docker Buildx
id: buildx
@@ -214,6 +350,13 @@ jobs:
-
name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
+ -
+ name: Check digest
+ run: |
+ if [ -z "${{ steps.docker_build.outputs.digest }}" ]; then
+ echo "::error::Digest should not be empty"
+ exit 1
+ fi
-
name: Dump context
if: always()
@@ -233,13 +376,12 @@ jobs:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
- with:
- platforms: all
-
name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
with:
+ # TODO: Remove image=moby/buildkit:buildx-stable-1 when moby/buildkit#1727 fixed
driver-opts: |
network=host
image=moby/buildkit:buildx-stable-1
@@ -265,6 +407,13 @@ jobs:
-
name: Image digest (1)
run: echo ${{ steps.docker_build.outputs.digest }}
+ -
+ name: Check digest (1)
+ run: |
+ if [ -z "${{ steps.docker_build.outputs.digest }}" ]; then
+ echo "::error::Digest should not be empty"
+ exit 1
+ fi
-
name: Prune
run: |
@@ -292,7 +441,14 @@ jobs:
name: Image digest (2)
run: echo ${{ steps.docker_build2.outputs.digest }}
-
- name: Check digests
+ name: Check digest (2)
+ run: |
+ if [ -z "${{ steps.docker_build2.outputs.digest }}" ]; then
+ echo "::error::Digest should not be empty"
+ exit 1
+ fi
+ -
+ name: Compare digests
run: |
echo Compare "${{ steps.docker_build.outputs.digest }}" with "${{ steps.docker_build2.outputs.digest }}"
if [ "${{ steps.docker_build.outputs.digest }}" != "${{ steps.docker_build2.outputs.digest }}" ]; then
@@ -320,13 +476,12 @@ jobs:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
- with:
- platforms: all
-
name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
with:
+ # TODO: Remove image=moby/buildkit:buildx-stable-1 when moby/buildkit#1727 fixed
driver-opts: |
network=host
image=moby/buildkit:buildx-stable-1
@@ -348,7 +503,7 @@ jobs:
uses: ./
with:
context: ./test
- file: ./test/Dockerfile-multi-golang
+ file: ./test/Dockerfile-multi
builder: ${{ steps.buildx.outputs.name }}
platforms: linux/amd64,linux/arm64
push: true
@@ -364,6 +519,13 @@ jobs:
-
name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
+ -
+ name: Check digest
+ run: |
+ if [ -z "${{ steps.docker_build.outputs.digest }}" ]; then
+ echo "::error::Digest should not be empty"
+ exit 1
+ fi
-
name: Dump context
if: always()
@@ -384,13 +546,12 @@ jobs:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
- with:
- platforms: all
-
name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
with:
+ # TODO: Remove image=moby/buildkit:buildx-stable-1 when moby/buildkit#1727 fixed
driver-opts: |
network=host
image=moby/buildkit:buildx-stable-1
@@ -409,7 +570,7 @@ jobs:
uses: ./
with:
context: ./test
- file: ./test/Dockerfile-multi-golang
+ file: ./test/Dockerfile-multi
builder: ${{ steps.buildx.outputs.name }}
platforms: linux/amd64,linux/arm64
push: true
@@ -426,7 +587,14 @@ jobs:
name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
-
- name: Check digests
+ name: Check digest
+ run: |
+ if [ -z "${{ steps.docker_build.outputs.digest }}" ]; then
+ echo "::error::Digest should not be empty"
+ exit 1
+ fi
+ -
+ name: Compare digests
run: |
echo Compare "${{ needs.github-cache-first.outputs.digest }}" with "${{ steps.docker_build.outputs.digest }}"
if [ "${{ needs.github-cache-first.outputs.digest }}" != "${{ steps.docker_build.outputs.digest }}" ]; then
diff --git a/README.md b/README.md
index ac68fe6..3e92f9a 100644
--- a/README.md
+++ b/README.md
@@ -7,10 +7,10 @@
## Upgrade from v1
`v2` of this action includes significant updates and now uses Docker [Buildx](https://github.com/docker/buildx). It
-works with 3 new optional actions ([login](https://github.com/docker/login-action), [setup-buildx](https://github.com/docker/setup-buildx-action)
+works with 3 new actions ([login](https://github.com/docker/login-action), [setup-buildx](https://github.com/docker/setup-buildx-action)
and [setup-qemu](https://github.com/docker/setup-qemu-action)) that we have created. It's also rewritten as a
-[typescript-action](https://github.com/actions/typescript-action/) to be as close as possible of the
-[GitHub Runner](https://github.com/actions/virtual-environments) during its execution (#71 #92).
+[typescript-action](https://github.com/actions/typescript-action/) to be as closed as possible of the
+[GitHub Runner](https://github.com/actions/virtual-environments) during its execution.
[Upgrade notes](UPGRADE.md) and many [usage examples](#usage) have been added to handle most use cases but `v1` is
still available through [`releases/v1` branch](https://github.com/docker/build-push-action/tree/releases/v1).
@@ -37,6 +37,7 @@ ___
* [Push to multi-registries](#push-to-multi-registries)
* [Cache to registry](#push-to-multi-registries)
* [Local registry](#local-registry)
+ * [Export image to Docker](#export-image-to-docker)
* [Leverage GitHub cache](#leverage-github-cache)
* [Complete workflow](#complete-workflow)
* [Update DockerHub repo description](#update-dockerhub-repo-description)
@@ -56,7 +57,8 @@ build-secrets, remote cache, etc. and different builder deployment/namespacing o
### Git context
-The default behavior of this action is to use the [Git context invoked by your workflow](https://github.com/docker/build-push-action/blob/master/src/context.ts#L10-L12).
+The default behavior of this action is to use the [Git context invoked](https://github.com/docker/build-push-action/blob/master/src/context.ts#L31-L35)
+by your workflow.
```yaml
name: ci
@@ -88,6 +90,9 @@ jobs:
with:
push: true
tags: user/app:latest
+ build-args: |
+ arg1=value1
+ arg2=value2
-
name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
@@ -377,6 +382,46 @@ For testing purposes you may need to create a [local registry](https://hub.docke
```
+### Export image to Docker
+
+You may want your build result to be available in the Docker client through `docker images` to be able to use it
+in another step of your workflow:
+
+
+ Show workflow
+
+ ```yaml
+ name: ci
+
+ on:
+ push:
+ branches: master
+
+ jobs:
+ export-docker:
+ runs-on: ubuntu-latest
+ steps:
+ -
+ name: Checkout
+ uses: actions/checkout@v2
+ -
+ name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v1
+ -
+ name: Build
+ uses: docker/build-push-action@v2
+ with:
+ context: .
+ file: ./Dockerfile
+ load: true
+ tags: myimage:latest
+ -
+ name: Inspect
+ run: |
+ docker image inspect myimage:latest
+ ```
+
+
### Leverage GitHub cache
You can leverage [GitHub cache](https://docs.github.com/en/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows)
@@ -424,15 +469,20 @@ using [actions/cache](https://github.com/actions/cache) with this action:
```
+> If you want to [export layers for all stages](https://github.com/docker/buildx#--cache-tonametypetypekeyvalue),
+> you have to specify `mode=max` attribute in `cache-to`.
+
### Complete workflow
-If you come from [`v1`](https://github.com/docker/build-push-action/tree/releases/v1#readme) and you want an
+If you come from [`v1`](https://github.com/docker/build-push-action/tree/releases/v1#readme) and want an
"automatic" tag management through Git reference and [OCI Image Format Specification](https://github.com/opencontainers/image-spec/blob/master/annotations.md)
-for labels, you will have to do it in a dedicated step [for now](https://github.com/docker/build-push-action/issues/116).
+for labels, you will have to do it in a dedicated step.
The following workflow with the `Prepare` step will generate some [outputs](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjobs_idoutputs)
-to handle tags and labels based on GitHub actions events. This is just an example to show many cases that you
-might want to use:
+to handle tags and labels based on GitHub actions events.
+
+This is just an example to show many cases that you might want to use and that you will have to adapt according
+to your needs:
Show workflow
@@ -540,8 +590,8 @@ might want to use:
### Update DockerHub repo description
-You can update the [Docker Hub repository description](https://docs.docker.com/docker-hub/repos/) using
-a third-party action called [Docker Hub Description](https://github.com/peter-evans/dockerhub-description)
+You can update the [DockerHub repository description](https://docs.docker.com/docker-hub/repos/) using
+a third-party action called [DockerHub Description](https://github.com/peter-evans/dockerhub-description)
with this action:
@@ -592,46 +642,38 @@ with this action:
Following inputs can be used as `step.with` keys
-| Name | Type | Description |
-|---------------------|---------|------------------------------------|
-| `builder` | String | Builder instance (see [setup-buildx](https://github.com/docker/setup-buildx-action) action) |
-| `context` | String | Build's context is the set of files located in the specified [`PATH` or `URL`](https://docs.docker.com/engine/reference/commandline/build/) (default [Git context](#git-context)) |
-| `file` | String | Path to the Dockerfile (default `Dockerfile`) |
-| `build-args` | List | List of build-time variables |
-| `labels` | List | List of metadata for an image |
-| `tags` | List | List of tags |
-| `pull` | Bool | Always attempt to pull a newer version of the image (default `false`) |
-| `target` | String | Sets the target stage to build |
-| `allow` | List | List of [extra privileged entitlement](https://github.com/docker/buildx#--allowentitlement) (eg. `network.host,security.insecure`) |
-| `no-cache` | Bool | Do not use cache when building the image (default `false`) |
-| `platforms` | List | List of [target platforms](https://github.com/docker/buildx#---platformvaluevalue) for build |
-| `load` | Bool | [Load](https://github.com/docker/buildx#--load) is a shorthand for `--output=type=docker` (default `false`) |
-| `push` | Bool | [Push](https://github.com/docker/buildx#--push) is a shorthand for `--output=type=registry` (default `false`) |
-| `outputs` | CSV | List of [output destinations](https://github.com/docker/buildx#-o---outputpath-typetypekeyvalue) (format: `type=local,dest=path`) |
-| `cache-from` | CSV | List of [external cache sources](https://github.com/docker/buildx#--cache-fromnametypetypekeyvalue) (eg. `type=local,src=path/to/dir`) |
-| `cache-to` | CSV | List of [cache export destinations](https://github.com/docker/buildx#--cache-tonametypetypekeyvalue) (eg. `type=local,dest=path/to/dir`) |
-| `secrets` | CSV | List of secrets to expose to the build (eg. `key=value`, `GIT_AUTH_TOKEN=mytoken`) |
-
-> `List` type can be a comma or newline-delimited string
-> ```yaml
-> tags: name/app:latest,name/app:1.0.0
-> ```
-> ```yaml
-> tags: |
-> name/app:latest
-> name/app:1.0.0
-> ```
-
-> `CSV` type must be a newline-delimited string
-> ```yaml
-> cache-from: user/app:cache
-> ```
+> `List` type is a newline-delimited string
> ```yaml
> cache-from: |
> user/app:cache
> type=local,src=path/to/dir
> ```
+> `CSV` type is a comma-delimited string
+> ```yaml
+> tags: name/app:latest,name/app:1.0.0
+> ```
+
+| Name | Type | Description |
+|---------------------|----------|------------------------------------|
+| `builder` | String | Builder instance (see [setup-buildx](https://github.com/docker/setup-buildx-action) action) |
+| `context` | String | Build's context is the set of files located in the specified [`PATH` or `URL`](https://docs.docker.com/engine/reference/commandline/build/) (default [Git context](#git-context)) |
+| `file` | String | Path to the Dockerfile (default `Dockerfile`) |
+| `build-args` | List | List of build-time variables |
+| `labels` | List | List of metadata for an image |
+| `tags` | List/CSV | List of tags |
+| `pull` | Bool | Always attempt to pull a newer version of the image (default `false`) |
+| `target` | String | Sets the target stage to build |
+| `allow` | List/CSV | List of [extra privileged entitlement](https://github.com/docker/buildx#--allowentitlement) (eg. `network.host,security.insecure`) |
+| `no-cache` | Bool | Do not use cache when building the image (default `false`) |
+| `platforms` | List/CSV | List of [target platforms](https://github.com/docker/buildx#---platformvaluevalue) for build |
+| `load` | Bool | [Load](https://github.com/docker/buildx#--load) is a shorthand for `--output=type=docker` (default `false`) |
+| `push` | Bool | [Push](https://github.com/docker/buildx#--push) is a shorthand for `--output=type=registry` (default `false`) |
+| `outputs` | List | List of [output destinations](https://github.com/docker/buildx#-o---outputpath-typetypekeyvalue) (format: `type=local,dest=path`) |
+| `cache-from` | List | List of [external cache sources](https://github.com/docker/buildx#--cache-fromnametypetypekeyvalue) (eg. `type=local,src=path/to/dir`) |
+| `cache-to` | List | List of [cache export destinations](https://github.com/docker/buildx#--cache-tonametypetypekeyvalue) (eg. `type=local,dest=path/to/dir`) |
+| `secrets` | List | List of secrets to expose to the build (eg. `key=value`, `GIT_AUTH_TOKEN=mytoken`) |
+
### outputs
Following outputs are available
diff --git a/UPGRADE.md b/UPGRADE.md
index bda8509..95d9c6b 100644
--- a/UPGRADE.md
+++ b/UPGRADE.md
@@ -65,7 +65,9 @@ steps:
file: ./Dockerfile
pull: true
push: true
- build-args: arg1=value1,arg2=value2
+ build-args: |
+ arg1=value1
+ arg2=value2
cache-from: type=registry,ref=myorg/myrepository:latest
cache-to: type=inline
tags: myorg/myrepository:latest
diff --git a/__tests__/buildx.test.ts b/__tests__/buildx.test.ts
index 85f8276..0c9ef5f 100644
--- a/__tests__/buildx.test.ts
+++ b/__tests__/buildx.test.ts
@@ -1,10 +1,24 @@
-import fs from 'fs';
+import * as fs from 'fs';
+import * as path from 'path';
import * as semver from 'semver';
import * as buildx from '../src/buildx';
import * as exec from '@actions/exec';
+import * as context from '../src/context';
const digest = 'sha256:bfb45ab72e46908183546477a08f8867fc40cebadd00af54b071b097aed127a9';
+jest.spyOn(context, 'tmpDir').mockImplementation((): string => {
+ const tmpDir = path.join('/tmp/.docker-build-push-jest').split(path.sep).join(path.posix.sep);
+ if (!fs.existsSync(tmpDir)) {
+ fs.mkdirSync(tmpDir, {recursive: true});
+ }
+ return tmpDir;
+});
+
+jest.spyOn(context, 'tmpNameSync').mockImplementation((): string => {
+ return path.join('/tmp/.docker-build-push-jest', '.tmpname-jest').split(path.sep).join(path.posix.sep);
+});
+
describe('getImageID', () => {
it('matches', async () => {
const imageIDFile = await buildx.getImageIDFile();
@@ -16,9 +30,68 @@ describe('getImageID', () => {
});
});
+describe('isLocalOrTarExporter', () => {
+ // prettier-ignore
+ test.each([
+ [
+ [
+ 'type=registry,ref=user/app',
+ ],
+ false
+ ],
+ [
+ [
+ 'type=docker',
+ ],
+ false
+ ],
+ [
+ [
+ 'type=local,dest=./release-out'
+ ],
+ true
+ ],
+ [
+ [
+ 'type=tar,dest=/tmp/image.tar'
+ ],
+ true
+ ],
+ [
+ [
+ 'type=docker',
+ 'type=tar,dest=/tmp/image.tar'
+ ],
+ true
+ ],
+ [
+ [
+ '"type=tar","dest=/tmp/image.tar"'
+ ],
+ true
+ ],
+ [
+ [
+ '" type= local" , dest=./release-out'
+ ],
+ true
+ ],
+ [
+ [
+ '.'
+ ],
+ true
+ ],
+ ])(
+ 'given %p returns %p',
+ async (outputs: Array, expected: boolean) => {
+ expect(buildx.isLocalOrTarExporter(outputs)).toEqual(expected);
+ }
+ );
+});
+
describe('getVersion', () => {
it('valid', async () => {
- await exec.exec('docker', ['buildx', 'version']);
const version = await buildx.getVersion();
console.log(`version: ${version}`);
expect(semver.valid(version)).not.toBeNull();
diff --git a/__tests__/context.test.ts b/__tests__/context.test.ts
index ce53efc..4c87e1b 100644
--- a/__tests__/context.test.ts
+++ b/__tests__/context.test.ts
@@ -1,5 +1,177 @@
+import * as fs from 'fs';
+import * as path from 'path';
+import * as buildx from '../src/buildx';
import * as context from '../src/context';
+jest.spyOn(context, 'defaultContext').mockImplementation((): string => {
+ return 'https://github.com/docker/build-push-action.git#test-jest';
+});
+
+jest.spyOn(context, 'tmpDir').mockImplementation((): string => {
+ const tmpDir = path.join('/tmp/.docker-build-push-jest').split(path.sep).join(path.posix.sep);
+ if (!fs.existsSync(tmpDir)) {
+ fs.mkdirSync(tmpDir, {recursive: true});
+ }
+ return tmpDir;
+});
+
+jest.spyOn(context, 'tmpNameSync').mockImplementation((): string => {
+ return path.join('/tmp/.docker-build-push-jest', '.tmpname-jest').split(path.sep).join(path.posix.sep);
+});
+
+describe('getArgs', () => {
+ beforeEach(() => {
+ process.env = Object.keys(process.env).reduce((object, key) => {
+ if (!key.startsWith('INPUT_')) {
+ object[key] = process.env[key];
+ }
+ return object;
+ }, {});
+ });
+
+ // prettier-ignore
+ test.each([
+ [
+ '0.4.1',
+ new Map([
+ ['context', '.'],
+ ]),
+ [
+ 'buildx',
+ 'build',
+ '--iidfile', '/tmp/.docker-build-push-jest/iidfile',
+ '--file', 'Dockerfile',
+ '.'
+ ]
+ ],
+ [
+ '0.4.2',
+ new Map([
+ ['build-args', 'MY_ARG=val1,val2,val3\nARG=val'],
+ ]),
+ [
+ 'buildx',
+ 'build',
+ '--build-arg', 'MY_ARG=val1,val2,val3',
+ '--build-arg', 'ARG=val',
+ '--iidfile', '/tmp/.docker-build-push-jest/iidfile',
+ '--file', 'Dockerfile',
+ 'https://github.com/docker/build-push-action.git#test-jest'
+ ]
+ ],
+ [
+ '0.4.2',
+ new Map([
+ ['context', '.'],
+ ['labels', 'org.opencontainers.image.title=buildkit\norg.opencontainers.image.description=concurrent, cache-efficient, and Dockerfile-agnostic builder toolkit'],
+ ['outputs', 'type=local,dest=./release-out']
+ ]),
+ [
+ 'buildx',
+ 'build',
+ '--label', 'org.opencontainers.image.title=buildkit',
+ '--label', 'org.opencontainers.image.description=concurrent, cache-efficient, and Dockerfile-agnostic builder toolkit',
+ '--output', 'type=local,dest=./release-out',
+ '--file', 'Dockerfile',
+ '.'
+ ]
+ ],
+ [
+ '0.4.1',
+ new Map([
+ ['context', '.'],
+ ['platforms', 'linux/amd64,linux/arm64']
+ ]),
+ [
+ 'buildx',
+ 'build',
+ '--platform', 'linux/amd64,linux/arm64',
+ '--file', 'Dockerfile',
+ '.'
+ ]
+ ],
+ [
+ '0.4.1',
+ new Map([
+ ['context', '.']
+ ]),
+ [
+ 'buildx',
+ 'build',
+ '--iidfile', '/tmp/.docker-build-push-jest/iidfile',
+ '--file', 'Dockerfile',
+ '.'
+ ]
+ ],
+ [
+ '0.4.2',
+ new Map([
+ ['context', '.'],
+ ['secrets', 'GIT_AUTH_TOKEN=abcdefghijklmno0123456789'],
+ ]),
+ [
+ 'buildx',
+ 'build',
+ '--iidfile', '/tmp/.docker-build-push-jest/iidfile',
+ '--secret', 'id=GIT_AUTH_TOKEN,src=/tmp/.docker-build-push-jest/.tmpname-jest',
+ '--file', 'Dockerfile',
+ '.'
+ ]
+ ],
+ [
+ '0.4.2',
+ new Map([
+ ['github-token', 'abcdefghijklmno0123456789'],
+ ['outputs', '.']
+ ]),
+ [
+ 'buildx',
+ 'build',
+ '--output', '.',
+ '--secret', 'id=GIT_AUTH_TOKEN,src=/tmp/.docker-build-push-jest/.tmpname-jest',
+ '--file', 'Dockerfile',
+ 'https://github.com/docker/build-push-action.git#test-jest'
+ ]
+ ],
+ [
+ '0.4.2',
+ new Map([
+ ['context', 'https://github.com/docker/build-push-action.git#heads/master'],
+ ['tag', 'localhost:5000/name/app:latest'],
+ ['platforms', 'linux/amd64,linux/arm64'],
+ ['secrets', 'GIT_AUTH_TOKEN=abcdefghijklmno0123456789'],
+ ['file', './test/Dockerfile'],
+ ['builder', 'builder-git-context-2'],
+ ['push', 'true']
+ ]),
+ [
+ 'buildx',
+ 'build',
+ '--platform', 'linux/amd64,linux/arm64',
+ '--iidfile', '/tmp/.docker-build-push-jest/iidfile',
+ '--secret', 'id=GIT_AUTH_TOKEN,src=/tmp/.docker-build-push-jest/.tmpname-jest',
+ '--file', './test/Dockerfile',
+ '--builder', 'builder-git-context-2',
+ '--push',
+ 'https://github.com/docker/build-push-action.git#heads/master'
+ ]
+ ]
+ ])(
+ 'given %p with %p as inputs, returns %p',
+ async (buildxVersion: string, inputs: Map, expected: Array) => {
+ await inputs.forEach((value: string, name: string) => {
+ setInput(name, value);
+ });
+ const defContext = context.defaultContext();
+ const inp = await context.getInputs(defContext);
+ console.log(inp);
+ const res = await context.getArgs(inp, defContext, buildxVersion);
+ console.log(res);
+ expect(res).toEqual(expected);
+ }
+ );
+});
+
describe('getInputList', () => {
it('handles single line correctly', async () => {
await setInput('foo', 'bar');
diff --git a/dist/index.js b/dist/index.js
index b230ccf..aec73e3 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -70,7 +70,7 @@ module.exports = globSync
globSync.GlobSync = GlobSync
var fs = __webpack_require__(747)
-var rp = __webpack_require__(863)
+var rp = __webpack_require__(290)
var minimatch = __webpack_require__(973)
var Minimatch = minimatch.Minimatch
var Glob = __webpack_require__(957).Glob
@@ -2377,27 +2377,30 @@ const fs = __importStar(__webpack_require__(747));
const os = __importStar(__webpack_require__(87));
const buildx = __importStar(__webpack_require__(295));
const context = __importStar(__webpack_require__(842));
+const exec = __importStar(__webpack_require__(757));
const stateHelper = __importStar(__webpack_require__(647));
const core = __importStar(__webpack_require__(186));
-const exec = __importStar(__webpack_require__(514));
function run() {
return __awaiter(this, void 0, void 0, function* () {
try {
if (os.platform() !== 'linux') {
- core.setFailed('Only supported on linux platform');
- return;
+ throw new Error(`Only supported on linux platform`);
}
if (!(yield buildx.isAvailable())) {
- core.setFailed(`Buildx is required. See https://github.com/docker/setup-buildx-action to set up buildx.`);
- return;
+ throw new Error(`Buildx is required. See https://github.com/docker/setup-buildx-action to set up buildx.`);
}
- stateHelper.setTmpDir(context.tmpDir);
+ stateHelper.setTmpDir(context.tmpDir());
const buildxVersion = yield buildx.getVersion();
core.info(`📣 Buildx version: ${buildxVersion}`);
- let inputs = yield context.getInputs();
+ const defContext = context.defaultContext();
+ let inputs = yield context.getInputs(defContext);
core.info(`🏃 Starting build...`);
- const args = yield context.getArgs(inputs, buildxVersion);
- yield exec.exec('docker', args);
+ const args = yield context.getArgs(inputs, defContext, buildxVersion);
+ yield exec.exec('docker', args).then(res => {
+ if (res.stderr != '' && !res.success) {
+ throw new Error(`buildx call failed with: ${res.stderr.match(/(.*)\s*$/)[0]}`);
+ }
+ });
const imageID = yield buildx.getImageID();
if (imageID) {
core.info('🛒 Extracting digest...');
@@ -5182,6 +5185,79 @@ function toCommandValue(input) {
exports.toCommandValue = toCommandValue;
//# sourceMappingURL=utils.js.map
+/***/ }),
+
+/***/ 290:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = realpath
+realpath.realpath = realpath
+realpath.sync = realpathSync
+realpath.realpathSync = realpathSync
+realpath.monkeypatch = monkeypatch
+realpath.unmonkeypatch = unmonkeypatch
+
+var fs = __webpack_require__(747)
+var origRealpath = fs.realpath
+var origRealpathSync = fs.realpathSync
+
+var version = process.version
+var ok = /^v[0-5]\./.test(version)
+var old = __webpack_require__(734)
+
+function newError (er) {
+ return er && er.syscall === 'realpath' && (
+ er.code === 'ELOOP' ||
+ er.code === 'ENOMEM' ||
+ er.code === 'ENAMETOOLONG'
+ )
+}
+
+function realpath (p, cache, cb) {
+ if (ok) {
+ return origRealpath(p, cache, cb)
+ }
+
+ if (typeof cache === 'function') {
+ cb = cache
+ cache = null
+ }
+ origRealpath(p, cache, function (er, result) {
+ if (newError(er)) {
+ old.realpath(p, cache, cb)
+ } else {
+ cb(er, result)
+ }
+ })
+}
+
+function realpathSync (p, cache) {
+ if (ok) {
+ return origRealpathSync(p, cache)
+ }
+
+ try {
+ return origRealpathSync(p, cache)
+ } catch (er) {
+ if (newError(er)) {
+ return old.realpathSync(p, cache)
+ } else {
+ throw er
+ }
+ }
+}
+
+function monkeypatch () {
+ fs.realpath = realpath
+ fs.realpathSync = realpathSync
+}
+
+function unmonkeypatch () {
+ fs.realpath = origRealpath
+ fs.realpathSync = origRealpathSync
+}
+
+
/***/ }),
/***/ 293:
@@ -5253,16 +5329,16 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.parseVersion = exports.getVersion = exports.isAvailable = exports.getSecret = exports.getImageID = exports.getImageIDFile = void 0;
+exports.parseVersion = exports.getVersion = exports.isAvailable = exports.hasGitAuthToken = exports.isLocalOrTarExporter = exports.getSecret = exports.getImageID = exports.getImageIDFile = void 0;
const fs_1 = __importDefault(__webpack_require__(747));
const path_1 = __importDefault(__webpack_require__(622));
-const tmp_1 = __importDefault(__webpack_require__(517));
+const sync_1 = __importDefault(__webpack_require__(750));
const semver = __importStar(__webpack_require__(383));
const context = __importStar(__webpack_require__(842));
const exec = __importStar(__webpack_require__(757));
function getImageIDFile() {
return __awaiter(this, void 0, void 0, function* () {
- return path_1.default.join(context.tmpDir, 'iidfile');
+ return path_1.default.join(context.tmpDir(), 'iidfile').split(path_1.default.sep).join(path_1.default.posix.sep);
});
}
exports.getImageIDFile = getImageIDFile;
@@ -5279,14 +5355,44 @@ exports.getImageID = getImageID;
function getSecret(kvp) {
return __awaiter(this, void 0, void 0, function* () {
const [key, value] = kvp.split('=');
- const secretFile = tmp_1.default.tmpNameSync({
- tmpdir: context.tmpDir
+ const secretFile = context.tmpNameSync({
+ tmpdir: context.tmpDir()
});
yield fs_1.default.writeFileSync(secretFile, value);
return `id=${key},src=${secretFile}`;
});
}
exports.getSecret = getSecret;
+function isLocalOrTarExporter(outputs) {
+ for (let output of sync_1.default(outputs.join(`\n`), {
+ delimiter: ',',
+ trim: true,
+ columns: false,
+ relax_column_count: true
+ })) {
+ // Local if no type is defined
+ // https://github.com/docker/buildx/blob/d2bf42f8b4784d83fde17acb3ed84703ddc2156b/build/output.go#L29-L43
+ if (output.length == 1 && !output[0].startsWith('type=')) {
+ return true;
+ }
+ for (let [key, value] of output.map(chunk => chunk.split('=').map(item => item.trim()))) {
+ if (key == 'type' && (value == 'local' || value == 'tar')) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+exports.isLocalOrTarExporter = isLocalOrTarExporter;
+function hasGitAuthToken(secrets) {
+ for (let secret of secrets) {
+ if (secret.startsWith('GIT_AUTH_TOKEN=')) {
+ return true;
+ }
+ }
+ return false;
+}
+exports.hasGitAuthToken = hasGitAuthToken;
function isAvailable() {
return __awaiter(this, void 0, void 0, function* () {
return yield exec.exec(`docker`, ['buildx'], true).then(res => {
@@ -5909,6 +6015,13 @@ module.exports = require("assert");
/***/ }),
+/***/ 373:
+/***/ (function(module) {
+
+module.exports = require("crypto");
+
+/***/ }),
+
/***/ 380:
/***/ (function(module, __unusedexports, __webpack_require__) {
@@ -5943,7 +6056,7 @@ module.exports = {
patch: __webpack_require__(866),
prerelease: __webpack_require__(16),
compare: __webpack_require__(309),
- rcompare: __webpack_require__(499),
+ rcompare: __webpack_require__(417),
compareLoose: __webpack_require__(804),
compareBuild: __webpack_require__(156),
sort: __webpack_require__(426),
@@ -5969,7 +6082,7 @@ module.exports = {
ltr: __webpack_require__(323),
intersects: __webpack_require__(8),
simplifyRange: __webpack_require__(561),
- subset: __webpack_require__(807),
+ subset: __webpack_require__(863),
}
@@ -6172,9 +6285,12 @@ module.exports = require("stream");
/***/ }),
/***/ 417:
-/***/ (function(module) {
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const compare = __webpack_require__(309)
+const rcompare = (a, b, loose) => compare(b, a, loose)
+module.exports = rcompare
-module.exports = require("crypto");
/***/ }),
@@ -9272,16 +9388,6 @@ function slice (args) {
module.exports = [["8740","䏰䰲䘃䖦䕸𧉧䵷䖳𧲱䳢𧳅㮕䜶䝄䱇䱀𤊿𣘗𧍒𦺋𧃒䱗𪍑䝏䗚䲅𧱬䴇䪤䚡𦬣爥𥩔𡩣𣸆𣽡晍囻"],["8767","綕夝𨮹㷴霴𧯯寛𡵞媤㘥𩺰嫑宷峼杮薓𩥅瑡璝㡵𡵓𣚞𦀡㻬"],["87a1","𥣞㫵竼龗𤅡𨤍𣇪𠪊𣉞䌊蒄龖鐯䤰蘓墖靊鈘秐稲晠権袝瑌篅枂稬剏遆㓦珄𥶹瓆鿇垳䤯呌䄱𣚎堘穲𧭥讏䚮𦺈䆁𥶙箮𢒼鿈𢓁𢓉𢓌鿉蔄𣖻䂴鿊䓡𪷿拁灮鿋"],["8840","㇀",4,"𠄌㇅𠃑𠃍㇆㇇𠃋𡿨㇈𠃊㇉㇊㇋㇌𠄎㇍㇎ĀÁǍÀĒÉĚÈŌÓǑÒÊ̄ẾÊ̌ỀÊāáǎàɑēéěèīíǐìōóǒòūúǔùǖǘǚ"],["88a1","ǜüê̄ếê̌ềêɡ⏚⏛"],["8940","𪎩𡅅"],["8943","攊"],["8946","丽滝鵎釟"],["894c","𧜵撑会伨侨兖兴农凤务动医华发变团声处备夲头学实実岚庆总斉柾栄桥济炼电纤纬纺织经统缆缷艺苏药视设询车轧轮"],["89a1","琑糼緍楆竉刧"],["89ab","醌碸酞肼"],["89b0","贋胶𠧧"],["89b5","肟黇䳍鷉鸌䰾𩷶𧀎鸊𪄳㗁"],["89c1","溚舾甙"],["89c5","䤑马骏龙禇𨑬𡷊𠗐𢫦两亁亀亇亿仫伷㑌侽㹈倃傈㑽㒓㒥円夅凛凼刅争剹劐匧㗇厩㕑厰㕓参吣㕭㕲㚁咓咣咴咹哐哯唘唣唨㖘唿㖥㖿嗗㗅"],["8a40","𧶄唥"],["8a43","𠱂𠴕𥄫喐𢳆㧬𠍁蹆𤶸𩓥䁓𨂾睺𢰸㨴䟕𨅝𦧲𤷪擝𠵼𠾴𠳕𡃴撍蹾𠺖𠰋𠽤𢲩𨉖𤓓"],["8a64","𠵆𩩍𨃩䟴𤺧𢳂骲㩧𩗴㿭㔆𥋇𩟔𧣈𢵄鵮頕"],["8a76","䏙𦂥撴哣𢵌𢯊𡁷㧻𡁯"],["8aa1","𦛚𦜖𧦠擪𥁒𠱃蹨𢆡𨭌𠜱"],["8aac","䠋𠆩㿺塳𢶍"],["8ab2","𤗈𠓼𦂗𠽌𠶖啹䂻䎺"],["8abb","䪴𢩦𡂝膪飵𠶜捹㧾𢝵跀嚡摼㹃"],["8ac9","𪘁𠸉𢫏𢳉"],["8ace","𡃈𣧂㦒㨆𨊛㕸𥹉𢃇噒𠼱𢲲𩜠㒼氽𤸻"],["8adf","𧕴𢺋𢈈𪙛𨳍𠹺𠰴𦠜羓𡃏𢠃𢤹㗻𥇣𠺌𠾍𠺪㾓𠼰𠵇𡅏𠹌"],["8af6","𠺫𠮩𠵈𡃀𡄽㿹𢚖搲𠾭"],["8b40","𣏴𧘹𢯎𠵾𠵿𢱑𢱕㨘𠺘𡃇𠼮𪘲𦭐𨳒𨶙𨳊閪哌苄喹"],["8b55","𩻃鰦骶𧝞𢷮煀腭胬尜𦕲脴㞗卟𨂽醶𠻺𠸏𠹷𠻻㗝𤷫㘉𠳖嚯𢞵𡃉𠸐𠹸𡁸𡅈𨈇𡑕𠹹𤹐𢶤婔𡀝𡀞𡃵𡃶垜𠸑"],["8ba1","𧚔𨋍𠾵𠹻𥅾㜃𠾶𡆀𥋘𪊽𤧚𡠺𤅷𨉼墙剨㘚𥜽箲孨䠀䬬鼧䧧鰟鮍𥭴𣄽嗻㗲嚉丨夂𡯁屮靑𠂆乛亻㔾尣彑忄㣺扌攵歺氵氺灬爫丬犭𤣩罒礻糹罓𦉪㓁"],["8bde","𦍋耂肀𦘒𦥑卝衤见𧢲讠贝钅镸长门𨸏韦页风飞饣𩠐鱼鸟黄歯龜丷𠂇阝户钢"],["8c40","倻淾𩱳龦㷉袏𤅎灷峵䬠𥇍㕙𥴰愢𨨲辧釶熑朙玺𣊁𪄇㲋𡦀䬐磤琂冮𨜏䀉橣𪊺䈣蘏𠩯稪𩥇𨫪靕灍匤𢁾鏴盙𨧣龧矝亣俰傼丯众龨吴綋墒壐𡶶庒庙忂𢜒斋"],["8ca1","𣏹椙橃𣱣泿"],["8ca7","爀𤔅玌㻛𤨓嬕璹讃𥲤𥚕窓篬糃繬苸薗龩袐龪躹龫迏蕟駠鈡龬𨶹𡐿䁱䊢娚"],["8cc9","顨杫䉶圽"],["8cce","藖𤥻芿𧄍䲁𦵴嵻𦬕𦾾龭龮宖龯曧繛湗秊㶈䓃𣉖𢞖䎚䔶"],["8ce6","峕𣬚諹屸㴒𣕑嵸龲煗䕘𤃬𡸣䱷㥸㑊𠆤𦱁諌侴𠈹妿腬顖𩣺弻"],["8d40","𠮟"],["8d42","𢇁𨥭䄂䚻𩁹㼇龳𪆵䃸㟖䛷𦱆䅼𨚲𧏿䕭㣔𥒚䕡䔛䶉䱻䵶䗪㿈𤬏㙡䓞䒽䇭崾嵈嵖㷼㠏嶤嶹㠠㠸幂庽弥徃㤈㤔㤿㥍惗愽峥㦉憷憹懏㦸戬抐拥挘㧸嚱"],["8da1","㨃揢揻搇摚㩋擀崕嘡龟㪗斆㪽旿晓㫲暒㬢朖㭂枤栀㭘桊梄㭲㭱㭻椉楃牜楤榟榅㮼槖㯝橥橴橱檂㯬檙㯲檫檵櫔櫶殁毁毪汵沪㳋洂洆洦涁㳯涤涱渕渘温溆𨧀溻滢滚齿滨滩漤漴㵆𣽁澁澾㵪㵵熷岙㶊瀬㶑灐灔灯灿炉𠌥䏁㗱𠻘"],["8e40","𣻗垾𦻓焾𥟠㙎榢𨯩孴穉𥣡𩓙穥穽𥦬窻窰竂竃燑𦒍䇊竚竝竪䇯咲𥰁笋筕笩𥌎𥳾箢筯莜𥮴𦱿篐萡箒箸𥴠㶭𥱥蒒篺簆簵𥳁籄粃𤢂粦晽𤕸糉糇糦籴糳糵糎"],["8ea1","繧䔝𦹄絝𦻖璍綉綫焵綳緒𤁗𦀩緤㴓緵𡟹緥𨍭縝𦄡𦅚繮纒䌫鑬縧罀罁罇礶𦋐駡羗𦍑羣𡙡𠁨䕜𣝦䔃𨌺翺𦒉者耈耝耨耯𪂇𦳃耻耼聡𢜔䦉𦘦𣷣𦛨朥肧𨩈脇脚墰𢛶汿𦒘𤾸擧𡒊舘𡡞橓𤩥𤪕䑺舩𠬍𦩒𣵾俹𡓽蓢荢𦬊𤦧𣔰𡝳𣷸芪椛芳䇛"],["8f40","蕋苐茚𠸖𡞴㛁𣅽𣕚艻苢茘𣺋𦶣𦬅𦮗𣗎㶿茝嗬莅䔋𦶥莬菁菓㑾𦻔橗蕚㒖𦹂𢻯葘𥯤葱㷓䓤檧葊𣲵祘蒨𦮖𦹷𦹃蓞萏莑䒠蒓蓤𥲑䉀𥳀䕃蔴嫲𦺙䔧蕳䔖枿蘖"],["8fa1","𨘥𨘻藁𧂈蘂𡖂𧃍䕫䕪蘨㙈𡢢号𧎚虾蝱𪃸蟮𢰧螱蟚蠏噡虬桖䘏衅衆𧗠𣶹𧗤衞袜䙛袴袵揁装睷𧜏覇覊覦覩覧覼𨨥觧𧤤𧪽誜瞓釾誐𧩙竩𧬺𣾏䜓𧬸煼謌謟𥐰𥕥謿譌譍誩𤩺讐讛誯𡛟䘕衏貛𧵔𧶏貫㜥𧵓賖𧶘𧶽贒贃𡤐賛灜贑𤳉㻐起"],["9040","趩𨀂𡀔𤦊㭼𨆼𧄌竧躭躶軃鋔輙輭𨍥𨐒辥錃𪊟𠩐辳䤪𨧞𨔽𣶻廸𣉢迹𪀔𨚼𨔁𢌥㦀𦻗逷𨔼𧪾遡𨕬𨘋邨𨜓郄𨛦邮都酧㫰醩釄粬𨤳𡺉鈎沟鉁鉢𥖹銹𨫆𣲛𨬌𥗛"],["90a1","𠴱錬鍫𨫡𨯫炏嫃𨫢𨫥䥥鉄𨯬𨰹𨯿鍳鑛躼閅閦鐦閠濶䊹𢙺𨛘𡉼𣸮䧟氜陻隖䅬隣𦻕懚隶磵𨫠隽双䦡𦲸𠉴𦐐𩂯𩃥𤫑𡤕𣌊霱虂霶䨏䔽䖅𤫩灵孁霛靜𩇕靗孊𩇫靟鐥僐𣂷𣂼鞉鞟鞱鞾韀韒韠𥑬韮琜𩐳響韵𩐝𧥺䫑頴頳顋顦㬎𧅵㵑𠘰𤅜"],["9140","𥜆飊颷飈飇䫿𦴧𡛓喰飡飦飬鍸餹𤨩䭲𩡗𩤅駵騌騻騐驘𥜥㛄𩂱𩯕髠髢𩬅髴䰎鬔鬭𨘀倴鬴𦦨㣃𣁽魐魀𩴾婅𡡣鮎𤉋鰂鯿鰌𩹨鷔𩾷𪆒𪆫𪃡𪄣𪇟鵾鶃𪄴鸎梈"],["91a1","鷄𢅛𪆓𪈠𡤻𪈳鴹𪂹𪊴麐麕麞麢䴴麪麯𤍤黁㭠㧥㴝伲㞾𨰫鼂鼈䮖鐤𦶢鼗鼖鼹嚟嚊齅馸𩂋韲葿齢齩竜龎爖䮾𤥵𤦻煷𤧸𤍈𤩑玞𨯚𡣺禟𨥾𨸶鍩鏳𨩄鋬鎁鏋𨥬𤒹爗㻫睲穃烐𤑳𤏸煾𡟯炣𡢾𣖙㻇𡢅𥐯𡟸㜢𡛻𡠹㛡𡝴𡣑𥽋㜣𡛀坛𤨥𡏾𡊨"],["9240","𡏆𡒶蔃𣚦蔃葕𤦔𧅥𣸱𥕜𣻻𧁒䓴𣛮𩦝𦼦柹㜳㰕㷧塬𡤢栐䁗𣜿𤃡𤂋𤄏𦰡哋嚞𦚱嚒𠿟𠮨𠸍鏆𨬓鎜仸儫㠙𤐶亼𠑥𠍿佋侊𥙑婨𠆫𠏋㦙𠌊𠐔㐵伩𠋀𨺳𠉵諚𠈌亘"],["92a1","働儍侢伃𤨎𣺊佂倮偬傁俌俥偘僼兙兛兝兞湶𣖕𣸹𣺿浲𡢄𣺉冨凃𠗠䓝𠒣𠒒𠒑赺𨪜𠜎剙劤𠡳勡鍮䙺熌𤎌𠰠𤦬𡃤槑𠸝瑹㻞璙琔瑖玘䮎𤪼𤂍叐㖄爏𤃉喴𠍅响𠯆圝鉝雴鍦埝垍坿㘾壋媙𨩆𡛺𡝯𡜐娬妸銏婾嫏娒𥥆𡧳𡡡𤊕㛵洅瑃娡𥺃"],["9340","媁𨯗𠐓鏠璌𡌃焅䥲鐈𨧻鎽㞠尞岞幞幈𡦖𡥼𣫮廍孏𡤃𡤄㜁𡢠㛝𡛾㛓脪𨩇𡶺𣑲𨦨弌弎𡤧𡞫婫𡜻孄蘔𧗽衠恾𢡠𢘫忛㺸𢖯𢖾𩂈𦽳懀𠀾𠁆𢘛憙憘恵𢲛𢴇𤛔𩅍"],["93a1","摱𤙥𢭪㨩𢬢𣑐𩣪𢹸挷𪑛撶挱揑𤧣𢵧护𢲡搻敫楲㯴𣂎𣊭𤦉𣊫唍𣋠𡣙𩐿曎𣊉𣆳㫠䆐𥖄𨬢𥖏𡛼𥕛𥐥磮𣄃𡠪𣈴㑤𣈏𣆂𤋉暎𦴤晫䮓昰𧡰𡷫晣𣋒𣋡昞𥡲㣑𣠺𣞼㮙𣞢𣏾瓐㮖枏𤘪梶栞㯄檾㡣𣟕𤒇樳橒櫉欅𡤒攑梘橌㯗橺歗𣿀𣲚鎠鋲𨯪𨫋"],["9440","銉𨀞𨧜鑧涥漋𤧬浧𣽿㶏渄𤀼娽渊塇洤硂焻𤌚𤉶烱牐犇犔𤞏𤜥兹𤪤𠗫瑺𣻸𣙟𤩊𤤗𥿡㼆㺱𤫟𨰣𣼵悧㻳瓌琼鎇琷䒟𦷪䕑疃㽣𤳙𤴆㽘畕癳𪗆㬙瑨𨫌𤦫𤦎㫻"],["94a1","㷍𤩎㻿𤧅𤣳釺圲鍂𨫣𡡤僟𥈡𥇧睸𣈲眎眏睻𤚗𣞁㩞𤣰琸璛㺿𤪺𤫇䃈𤪖𦆮錇𥖁砞碍碈磒珐祙𧝁𥛣䄎禛蒖禥樭𣻺稺秴䅮𡛦䄲鈵秱𠵌𤦌𠊙𣶺𡝮㖗啫㕰㚪𠇔𠰍竢婙𢛵𥪯𥪜娍𠉛磰娪𥯆竾䇹籝籭䈑𥮳𥺼𥺦糍𤧹𡞰粎籼粮檲緜縇緓罎𦉡"],["9540","𦅜𧭈綗𥺂䉪𦭵𠤖柖𠁎𣗏埄𦐒𦏸𤥢翝笧𠠬𥫩𥵃笌𥸎駦虅驣樜𣐿㧢𤧷𦖭騟𦖠蒀𧄧𦳑䓪脷䐂胆脉腂𦞴飃𦩂艢艥𦩑葓𦶧蘐𧈛媆䅿𡡀嬫𡢡嫤𡣘蚠蜨𣶏蠭𧐢娂"],["95a1","衮佅袇袿裦襥襍𥚃襔𧞅𧞄𨯵𨯙𨮜𨧹㺭蒣䛵䛏㟲訽訜𩑈彍鈫𤊄旔焩烄𡡅鵭貟賩𧷜妚矃姰䍮㛔踪躧𤰉輰轊䋴汘澻𢌡䢛潹溋𡟚鯩㚵𤤯邻邗啱䤆醻鐄𨩋䁢𨫼鐧𨰝𨰻蓥訫閙閧閗閖𨴴瑅㻂𤣿𤩂𤏪㻧𣈥随𨻧𨹦𨹥㻌𤧭𤩸𣿮琒瑫㻼靁𩂰"],["9640","桇䨝𩂓𥟟靝鍨𨦉𨰦𨬯𦎾銺嬑譩䤼珹𤈛鞛靱餸𠼦巁𨯅𤪲頟𩓚鋶𩗗釥䓀𨭐𤩧𨭤飜𨩅㼀鈪䤥萔餻饍𧬆㷽馛䭯馪驜𨭥𥣈檏騡嫾騯𩣱䮐𩥈馼䮽䮗鍽塲𡌂堢𤦸"],["96a1","𡓨硄𢜟𣶸棅㵽鑘㤧慐𢞁𢥫愇鱏鱓鱻鰵鰐魿鯏𩸭鮟𪇵𪃾鴡䲮𤄄鸘䲰鴌𪆴𪃭𪃳𩤯鶥蒽𦸒𦿟𦮂藼䔳𦶤𦺄𦷰萠藮𦸀𣟗𦁤秢𣖜𣙀䤭𤧞㵢鏛銾鍈𠊿碹鉷鑍俤㑀遤𥕝砽硔碶硋𡝗𣇉𤥁㚚佲濚濙瀞瀞吔𤆵垻壳垊鴖埗焴㒯𤆬燫𦱀𤾗嬨𡞵𨩉"],["9740","愌嫎娋䊼𤒈㜬䭻𨧼鎻鎸𡣖𠼝葲𦳀𡐓𤋺𢰦𤏁妔𣶷𦝁綨𦅛𦂤𤦹𤦋𨧺鋥珢㻩璴𨭣𡢟㻡𤪳櫘珳珻㻖𤨾𤪔𡟙𤩦𠎧𡐤𤧥瑈𤤖炥𤥶銄珦鍟𠓾錱𨫎𨨖鎆𨯧𥗕䤵𨪂煫"],["97a1","𤥃𠳿嚤𠘚𠯫𠲸唂秄𡟺緾𡛂𤩐𡡒䔮鐁㜊𨫀𤦭妰𡢿𡢃𧒄媡㛢𣵛㚰鉟婹𨪁𡡢鍴㳍𠪴䪖㦊僴㵩㵌𡎜煵䋻𨈘渏𩃤䓫浗𧹏灧沯㳖𣿭𣸭渂漌㵯𠏵畑㚼㓈䚀㻚䡱姄鉮䤾轁𨰜𦯀堒埈㛖𡑒烾𤍢𤩱𢿣𡊰𢎽梹楧𡎘𣓥𧯴𣛟𨪃𣟖𣏺𤲟樚𣚭𦲷萾䓟䓎"],["9840","𦴦𦵑𦲂𦿞漗𧄉茽𡜺菭𦲀𧁓𡟛妉媂𡞳婡婱𡤅𤇼㜭姯𡜼㛇熎鎐暚𤊥婮娫𤊓樫𣻹𧜶𤑛𤋊焝𤉙𨧡侰𦴨峂𤓎𧹍𤎽樌𤉖𡌄炦焳𤏩㶥泟勇𤩏繥姫崯㷳彜𤩝𡟟綤萦"],["98a1","咅𣫺𣌀𠈔坾𠣕𠘙㿥𡾞𪊶瀃𩅛嵰玏糓𨩙𩐠俈翧狍猐𧫴猸猹𥛶獁獈㺩𧬘遬燵𤣲珡臶㻊県㻑沢国琙琞琟㻢㻰㻴㻺瓓㼎㽓畂畭畲疍㽼痈痜㿀癍㿗癴㿜発𤽜熈嘣覀塩䀝睃䀹条䁅㗛瞘䁪䁯属瞾矋売砘点砜䂨砹硇硑硦葈𥔵礳栃礲䄃"],["9940","䄉禑禙辻稆込䅧窑䆲窼艹䇄竏竛䇏両筢筬筻簒簛䉠䉺类粜䊌粸䊔糭输烀𠳏総緔緐緽羮羴犟䎗耠耥笹耮耱联㷌垴炠肷胩䏭脌猪脎脒畠脔䐁㬹腖腙腚"],["99a1","䐓堺腼膄䐥膓䐭膥埯臁臤艔䒏芦艶苊苘苿䒰荗险榊萅烵葤惣蒈䔄蒾蓡蓸蔐蔸蕒䔻蕯蕰藠䕷虲蚒蚲蛯际螋䘆䘗袮裿褤襇覑𧥧訩訸誔誴豑賔賲贜䞘塟跃䟭仮踺嗘坔蹱嗵躰䠷軎転軤軭軲辷迁迊迌逳駄䢭飠鈓䤞鈨鉘鉫銱銮銿"],["9a40","鋣鋫鋳鋴鋽鍃鎄鎭䥅䥑麿鐗匁鐝鐭鐾䥪鑔鑹锭関䦧间阳䧥枠䨤靀䨵鞲韂噔䫤惨颹䬙飱塄餎餙冴餜餷饂饝饢䭰駅䮝騼鬏窃魩鮁鯝鯱鯴䱭鰠㝯𡯂鵉鰺"],["9aa1","黾噐鶓鶽鷀鷼银辶鹻麬麱麽黆铜黢黱黸竈齄𠂔𠊷𠎠椚铃妬𠓗塀铁㞹𠗕𠘕𠙶𡚺块煳𠫂𠫍𠮿呪吆𠯋咞𠯻𠰻𠱓𠱥𠱼惧𠲍噺𠲵𠳝𠳭𠵯𠶲𠷈楕鰯螥𠸄𠸎𠻗𠾐𠼭𠹳尠𠾼帋𡁜𡁏𡁶朞𡁻𡂈𡂖㙇𡂿𡃓𡄯𡄻卤蒭𡋣𡍵𡌶讁𡕷𡘙𡟃𡟇乸炻𡠭𡥪"],["9b40","𡨭𡩅𡰪𡱰𡲬𡻈拃𡻕𡼕熘桕𢁅槩㛈𢉼𢏗𢏺𢜪𢡱𢥏苽𢥧𢦓𢫕覥𢫨辠𢬎鞸𢬿顇骽𢱌"],["9b62","𢲈𢲷𥯨𢴈𢴒𢶷𢶕𢹂𢽴𢿌𣀳𣁦𣌟𣏞徱晈暿𧩹𣕧𣗳爁𤦺矗𣘚𣜖纇𠍆墵朎"],["9ba1","椘𣪧𧙗𥿢𣸑𣺹𧗾𢂚䣐䪸𤄙𨪚𤋮𤌍𤀻𤌴𤎖𤩅𠗊凒𠘑妟𡺨㮾𣳿𤐄𤓖垈𤙴㦛𤜯𨗨𩧉㝢𢇃譞𨭎駖𤠒𤣻𤨕爉𤫀𠱸奥𤺥𤾆𠝹軚𥀬劏圿煱𥊙𥐙𣽊𤪧喼𥑆𥑮𦭒釔㑳𥔿𧘲𥕞䜘𥕢𥕦𥟇𤤿𥡝偦㓻𣏌惞𥤃䝼𨥈𥪮𥮉𥰆𡶐垡煑澶𦄂𧰒遖𦆲𤾚譢𦐂𦑊"],["9c40","嵛𦯷輶𦒄𡤜諪𤧶𦒈𣿯𦔒䯀𦖿𦚵𢜛鑥𥟡憕娧晉侻嚹𤔡𦛼乪𤤴陖涏𦲽㘘襷𦞙𦡮𦐑𦡞營𦣇筂𩃀𠨑𦤦鄄𦤹穅鷰𦧺騦𦨭㙟𦑩𠀡禃𦨴𦭛崬𣔙菏𦮝䛐𦲤画补𦶮墶"],["9ca1","㜜𢖍𧁋𧇍㱔𧊀𧊅銁𢅺𧊋錰𧋦𤧐氹钟𧑐𠻸蠧裵𢤦𨑳𡞱溸𤨪𡠠㦤㚹尐秣䔿暶𩲭𩢤襃𧟌𧡘囖䃟𡘊㦡𣜯𨃨𡏅熭荦𧧝𩆨婧䲷𧂯𨦫𧧽𧨊𧬋𧵦𤅺筃祾𨀉澵𪋟樃𨌘厢𦸇鎿栶靝𨅯𨀣𦦵𡏭𣈯𨁈嶅𨰰𨂃圕頣𨥉嶫𤦈斾槕叒𤪥𣾁㰑朶𨂐𨃴𨄮𡾡𨅏"],["9d40","𨆉𨆯𨈚𨌆𨌯𨎊㗊𨑨𨚪䣺揦𨥖砈鉕𨦸䏲𨧧䏟𨧨𨭆𨯔姸𨰉輋𨿅𩃬筑𩄐𩄼㷷𩅞𤫊运犏嚋𩓧𩗩𩖰𩖸𩜲𩣑𩥉𩥪𩧃𩨨𩬎𩵚𩶛纟𩻸𩼣䲤镇𪊓熢𪋿䶑递𪗋䶜𠲜达嗁"],["9da1","辺𢒰边𤪓䔉繿潖檱仪㓤𨬬𧢝㜺躀𡟵𨀤𨭬𨮙𧨾𦚯㷫𧙕𣲷𥘵𥥖亚𥺁𦉘嚿𠹭踎孭𣺈𤲞揞拐𡟶𡡻攰嘭𥱊吚𥌑㷆𩶘䱽嘢嘞罉𥻘奵𣵀蝰东𠿪𠵉𣚺脗鵞贘瘻鱅癎瞹鍅吲腈苷嘥脲萘肽嗪祢噃吖𠺝㗎嘅嗱曱𨋢㘭甴嗰喺咗啲𠱁𠲖廐𥅈𠹶𢱢"],["9e40","𠺢麫絚嗞𡁵抝靭咔賍燶酶揼掹揾啩𢭃鱲𢺳冚㓟𠶧冧呍唞唓癦踭𦢊疱肶蠄螆裇膶萜𡃁䓬猄𤜆宐茋𦢓噻𢛴𧴯𤆣𧵳𦻐𧊶酰𡇙鈈𣳼𪚩𠺬𠻹牦𡲢䝎𤿂𧿹𠿫䃺"],["9ea1","鱝攟𢶠䣳𤟠𩵼𠿬𠸊恢𧖣𠿭"],["9ead","𦁈𡆇熣纎鵐业丄㕷嬍沲卧㚬㧜卽㚥𤘘墚𤭮舭呋垪𥪕𠥹"],["9ec5","㩒𢑥獴𩺬䴉鯭𣳾𩼰䱛𤾩𩖞𩿞葜𣶶𧊲𦞳𣜠挮紥𣻷𣸬㨪逈勌㹴㙺䗩𠒎癀嫰𠺶硺𧼮墧䂿噼鮋嵴癔𪐴麅䳡痹㟻愙𣃚𤏲"],["9ef5","噝𡊩垧𤥣𩸆刴𧂮㖭汊鵼"],["9f40","籖鬹埞𡝬屓擓𩓐𦌵𧅤蚭𠴨𦴢𤫢𠵱"],["9f4f","凾𡼏嶎霃𡷑麁遌笟鬂峑箣扨挵髿篏鬪籾鬮籂粆鰕篼鬉鼗鰛𤤾齚啳寃俽麘俲剠㸆勑坧偖妷帒韈鶫轜呩鞴饀鞺匬愰"],["9fa1","椬叚鰊鴂䰻陁榀傦畆𡝭駚剳"],["9fae","酙隁酜"],["9fb2","酑𨺗捿𦴣櫊嘑醎畺抅𠏼獏籰𥰡𣳽"],["9fc1","𤤙盖鮝个𠳔莾衂"],["9fc9","届槀僭坺刟巵从氱𠇲伹咜哚劚趂㗾弌㗳"],["9fdb","歒酼龥鮗頮颴骺麨麄煺笔"],["9fe7","毺蠘罸"],["9feb","嘠𪙊蹷齓"],["9ff0","跔蹏鸜踁抂𨍽踨蹵竓𤩷稾磘泪詧瘇"],["a040","𨩚鼦泎蟖痃𪊲硓咢贌狢獱謭猂瓱賫𤪻蘯徺袠䒷"],["a055","𡠻𦸅"],["a058","詾𢔛"],["a05b","惽癧髗鵄鍮鮏蟵"],["a063","蠏賷猬霡鮰㗖犲䰇籑饊𦅙慙䰄麖慽"],["a073","坟慯抦戹拎㩜懢厪𣏵捤栂㗒"],["a0a1","嵗𨯂迚𨸹"],["a0a6","僙𡵆礆匲阸𠼻䁥"],["a0ae","矾"],["a0b0","糂𥼚糚稭聦聣絍甅瓲覔舚朌聢𧒆聛瓰脃眤覉𦟌畓𦻑螩蟎臈螌詉貭譃眫瓸蓚㘵榲趦"],["a0d4","覩瑨涹蟁𤀑瓧㷛煶悤憜㳑煢恷"],["a0e2","罱𨬭牐惩䭾删㰘𣳇𥻗𧙖𥔱𡥄𡋾𩤃𦷜𧂭峁𦆭𨨏𣙷𠃮𦡆𤼎䕢嬟𦍌齐麦𦉫"],["a3c0","␀",31,"␡"],["c6a1","①",9,"⑴",9,"ⅰ",9,"丶丿亅亠冂冖冫勹匸卩厶夊宀巛⼳广廴彐彡攴无疒癶辵隶¨ˆヽヾゝゞ〃仝々〆〇ー[]✽ぁ",23],["c740","す",58,"ァアィイ"],["c7a1","ゥ",81,"А",5,"ЁЖ",4],["c840","Л",26,"ёж",25,"⇧↸↹㇏𠃌乚𠂊刂䒑"],["c8a1","龰冈龱𧘇"],["c8cd","¬¦'"㈱№℡゛゜⺀⺄⺆⺇⺈⺊⺌⺍⺕⺜⺝⺥⺧⺪⺬⺮⺶⺼⺾⻆⻊⻌⻍⻏⻖⻗⻞⻣"],["c8f5","ʃɐɛɔɵœøŋʊɪ"],["f9fe","■"],["fa40","𠕇鋛𠗟𣿅蕌䊵珯况㙉𤥂𨧤鍄𡧛苮𣳈砼杄拟𤤳𨦪𠊠𦮳𡌅侫𢓭倈𦴩𧪄𣘀𤪱𢔓倩𠍾徤𠎀𠍇滛𠐟偽儁㑺儎顬㝃萖𤦤𠒇兠𣎴兪𠯿𢃼𠋥𢔰𠖎𣈳𡦃宂蝽𠖳𣲙冲冸"],["faa1","鴴凉减凑㳜凓𤪦决凢卂凭菍椾𣜭彻刋刦刼劵剗劔効勅簕蕂勠蘍𦬓包𨫞啉滙𣾀𠥔𣿬匳卄𠯢泋𡜦栛珕恊㺪㣌𡛨燝䒢卭却𨚫卾卿𡖖𡘓矦厓𨪛厠厫厮玧𥝲㽙玜叁叅汉义埾叙㪫𠮏叠𣿫𢶣叶𠱷吓灹唫晗浛呭𦭓𠵴啝咏咤䞦𡜍𠻝㶴𠵍"],["fb40","𨦼𢚘啇䳭启琗喆喩嘅𡣗𤀺䕒𤐵暳𡂴嘷曍𣊊暤暭噍噏磱囱鞇叾圀囯园𨭦㘣𡉏坆𤆥汮炋坂㚱𦱾埦𡐖堃𡑔𤍣堦𤯵塜墪㕡壠壜𡈼壻寿坃𪅐𤉸鏓㖡够梦㛃湙"],["fba1","𡘾娤啓𡚒蔅姉𠵎𦲁𦴪𡟜姙𡟻𡞲𦶦浱𡠨𡛕姹𦹅媫婣㛦𤦩婷㜈媖瑥嫓𦾡𢕔㶅𡤑㜲𡚸広勐孶斈孼𧨎䀄䡝𠈄寕慠𡨴𥧌𠖥寳宝䴐尅𡭄尓珎尔𡲥𦬨屉䣝岅峩峯嶋𡷹𡸷崐崘嵆𡺤岺巗苼㠭𤤁𢁉𢅳芇㠶㯂帮檊幵幺𤒼𠳓厦亷廐厨𡝱帉廴𨒂"],["fc40","廹廻㢠廼栾鐛弍𠇁弢㫞䢮𡌺强𦢈𢏐彘𢑱彣鞽𦹮彲鍀𨨶徧嶶㵟𥉐𡽪𧃸𢙨釖𠊞𨨩怱暅𡡷㥣㷇㘹垐𢞴祱㹀悞悤悳𤦂𤦏𧩓璤僡媠慤萤慂慈𦻒憁凴𠙖憇宪𣾷"],["fca1","𢡟懓𨮝𩥝懐㤲𢦀𢣁怣慜攞掋𠄘担𡝰拕𢸍捬𤧟㨗搸揸𡎎𡟼撐澊𢸶頔𤂌𥜝擡擥鑻㩦携㩗敍漖𤨨𤨣斅敭敟𣁾斵𤥀䬷旑䃘𡠩无旣忟𣐀昘𣇷𣇸晄𣆤𣆥晋𠹵晧𥇦晳晴𡸽𣈱𨗴𣇈𥌓矅𢣷馤朂𤎜𤨡㬫槺𣟂杞杧杢𤇍𩃭柗䓩栢湐鈼栁𣏦𦶠桝"],["fd40","𣑯槡樋𨫟楳棃𣗍椁椀㴲㨁𣘼㮀枬楡𨩊䋼椶榘㮡𠏉荣傐槹𣙙𢄪橅𣜃檝㯳枱櫈𩆜㰍欝𠤣惞欵歴𢟍溵𣫛𠎵𡥘㝀吡𣭚毡𣻼毜氷𢒋𤣱𦭑汚舦汹𣶼䓅𣶽𤆤𤤌𤤀"],["fda1","𣳉㛥㳫𠴲鮃𣇹𢒑羏样𦴥𦶡𦷫涖浜湼漄𤥿𤂅𦹲蔳𦽴凇沜渝萮𨬡港𣸯瑓𣾂秌湏媑𣁋濸㜍澝𣸰滺𡒗𤀽䕕鏰潄潜㵎潴𩅰㴻澟𤅄濓𤂑𤅕𤀹𣿰𣾴𤄿凟𤅖𤅗𤅀𦇝灋灾炧炁烌烕烖烟䄄㷨熴熖𤉷焫煅媈煊煮岜𤍥煏鍢𤋁焬𤑚𤨧𤨢熺𨯨炽爎"],["fe40","鑂爕夑鑃爤鍁𥘅爮牀𤥴梽牕牗㹕𣁄栍漽犂猪猫𤠣𨠫䣭𨠄猨献珏玪𠰺𦨮珉瑉𤇢𡛧𤨤昣㛅𤦷𤦍𤧻珷琕椃𤨦琹𠗃㻗瑜𢢭瑠𨺲瑇珤瑶莹瑬㜰瑴鏱樬璂䥓𤪌"],["fea1","𤅟𤩹𨮏孆𨰃𡢞瓈𡦈甎瓩甞𨻙𡩋寗𨺬鎅畍畊畧畮𤾂㼄𤴓疎瑝疞疴瘂瘬癑癏癯癶𦏵皐臯㟸𦤑𦤎皡皥皷盌𦾟葢𥂝𥅽𡸜眞眦着撯𥈠睘𣊬瞯𨥤𨥨𡛁矴砉𡍶𤨒棊碯磇磓隥礮𥗠磗礴碱𧘌辸袄𨬫𦂃𢘜禆褀椂禀𥡗禝𧬹礼禩渪𧄦㺨秆𩄍秔"]];
-/***/ }),
-
-/***/ 499:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-const compare = __webpack_require__(309)
-const rcompare = (a, b, loose) => compare(b, a, loose)
-module.exports = rcompare
-
-
/***/ }),
/***/ 508:
@@ -9374,7 +9480,7 @@ exports.exec = exec;
const fs = __webpack_require__(747);
const os = __webpack_require__(87);
const path = __webpack_require__(622);
-const crypto = __webpack_require__(417);
+const crypto = __webpack_require__(373);
const _c = { fs: fs.constants, os: os.constants };
const rimraf = __webpack_require__(959);
@@ -12381,6 +12487,38 @@ module.exports = validRange
module.exports = require("fs");
+/***/ }),
+
+/***/ 750:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+
+const parse = __webpack_require__(830)
+
+module.exports = function(data, options={}){
+ if(typeof data === 'string'){
+ data = Buffer.from(data)
+ }
+ const records = options && options.objname ? {} : []
+ const parser = new parse.Parser(options)
+ parser.push = function(record){
+ if(record === null){
+ return
+ }
+ if(options.objname === undefined)
+ records.push(record)
+ else{
+ records[record[0]] = record[1]
+ }
+ }
+ const err1 = parser.__parse(data, false)
+ if(err1 !== undefined) throw err1
+ const err2 = parser.__parse(undefined, true)
+ if(err2 !== undefined) throw err2
+ return records
+}
+
+
/***/ }),
/***/ 757:
@@ -12741,168 +12879,6 @@ const compareLoose = (a, b) => compare(a, b, true)
module.exports = compareLoose
-/***/ }),
-
-/***/ 807:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-const Range = __webpack_require__(828)
-const { ANY } = __webpack_require__(532)
-const satisfies = __webpack_require__(55)
-const compare = __webpack_require__(309)
-
-// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff:
-// - Every simple range `r1, r2, ...` is a subset of some `R1, R2, ...`
-//
-// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff:
-// - If c is only the ANY comparator
-// - If C is only the ANY comparator, return true
-// - Else return false
-// - Let EQ be the set of = comparators in c
-// - If EQ is more than one, return true (null set)
-// - Let GT be the highest > or >= comparator in c
-// - Let LT be the lowest < or <= comparator in c
-// - If GT and LT, and GT.semver > LT.semver, return true (null set)
-// - If EQ
-// - If GT, and EQ does not satisfy GT, return true (null set)
-// - If LT, and EQ does not satisfy LT, return true (null set)
-// - If EQ satisfies every C, return true
-// - Else return false
-// - If GT
-// - If GT is lower than any > or >= comp in C, return false
-// - If GT is >=, and GT.semver does not satisfy every C, return false
-// - If LT
-// - If LT.semver is greater than that of any > comp in C, return false
-// - If LT is <=, and LT.semver does not satisfy every C, return false
-// - If any C is a = range, and GT or LT are set, return false
-// - Else return true
-
-const subset = (sub, dom, options) => {
- sub = new Range(sub, options)
- dom = new Range(dom, options)
- let sawNonNull = false
-
- OUTER: for (const simpleSub of sub.set) {
- for (const simpleDom of dom.set) {
- const isSub = simpleSubset(simpleSub, simpleDom, options)
- sawNonNull = sawNonNull || isSub !== null
- if (isSub)
- continue OUTER
- }
- // the null set is a subset of everything, but null simple ranges in
- // a complex range should be ignored. so if we saw a non-null range,
- // then we know this isn't a subset, but if EVERY simple range was null,
- // then it is a subset.
- if (sawNonNull)
- return false
- }
- return true
-}
-
-const simpleSubset = (sub, dom, options) => {
- if (sub.length === 1 && sub[0].semver === ANY)
- return dom.length === 1 && dom[0].semver === ANY
-
- const eqSet = new Set()
- let gt, lt
- for (const c of sub) {
- if (c.operator === '>' || c.operator === '>=')
- gt = higherGT(gt, c, options)
- else if (c.operator === '<' || c.operator === '<=')
- lt = lowerLT(lt, c, options)
- else
- eqSet.add(c.semver)
- }
-
- if (eqSet.size > 1)
- return null
-
- let gtltComp
- if (gt && lt) {
- gtltComp = compare(gt.semver, lt.semver, options)
- if (gtltComp > 0)
- return null
- else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<='))
- return null
- }
-
- // will iterate one or zero times
- for (const eq of eqSet) {
- if (gt && !satisfies(eq, String(gt), options))
- return null
-
- if (lt && !satisfies(eq, String(lt), options))
- return null
-
- for (const c of dom) {
- if (!satisfies(eq, String(c), options))
- return false
- }
- return true
- }
-
- let higher, lower
- let hasDomLT, hasDomGT
- for (const c of dom) {
- hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>='
- hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<='
- if (gt) {
- if (c.operator === '>' || c.operator === '>=') {
- higher = higherGT(gt, c, options)
- if (higher === c)
- return false
- } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options))
- return false
- }
- if (lt) {
- if (c.operator === '<' || c.operator === '<=') {
- lower = lowerLT(lt, c, options)
- if (lower === c)
- return false
- } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options))
- return false
- }
- if (!c.operator && (lt || gt) && gtltComp !== 0)
- return false
- }
-
- // if there was a < or >, and nothing in the dom, then must be false
- // UNLESS it was limited by another range in the other direction.
- // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0
- if (gt && hasDomLT && !lt && gtltComp !== 0)
- return false
-
- if (lt && hasDomGT && !gt && gtltComp !== 0)
- return false
-
- return true
-}
-
-// >=1.2.3 is lower than >1.2.3
-const higherGT = (a, b, options) => {
- if (!a)
- return b
- const comp = compare(a.semver, b.semver, options)
- return comp > 0 ? a
- : comp < 0 ? b
- : b.operator === '>' && a.operator === '>=' ? b
- : a
-}
-
-// <=1.2.3 is higher than <1.2.3
-const lowerLT = (a, b, options) => {
- if (!a)
- return b
- const comp = compare(a.semver, b.semver, options)
- return comp < 0 ? a
- : comp > 0 ? b
- : b.operator === '<' && a.operator === '<=' ? b
- : a
-}
-
-module.exports = subset
-
-
/***/ }),
/***/ 818:
@@ -13404,6 +13380,1169 @@ const testSet = (set, version, options) => {
}
+/***/ }),
+
+/***/ 830:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+
+/*
+CSV Parse
+
+Please look at the [project documentation](https://csv.js.org/parse/) for
+additional information.
+*/
+
+const { Transform } = __webpack_require__(413)
+const ResizeableBuffer = __webpack_require__(942)
+
+const tab = 9
+const nl = 10
+const np = 12
+const cr = 13
+const space = 32
+const bom_utf8 = Buffer.from([239, 187, 191])
+
+class Parser extends Transform {
+ constructor(opts = {}){
+ super({...{readableObjectMode: true}, ...opts})
+ const options = {}
+ // Merge with user options
+ for(let opt in opts){
+ options[underscore(opt)] = opts[opt]
+ }
+ // Normalize option `bom`
+ if(options.bom === undefined || options.bom === null || options.bom === false){
+ options.bom = false
+ }else if(options.bom !== true){
+ throw new CsvError('CSV_INVALID_OPTION_BOM', [
+ 'Invalid option bom:', 'bom must be true,',
+ `got ${JSON.stringify(options.bom)}`
+ ])
+ }
+ // Normalize option `cast`
+ let fnCastField = null
+ if(options.cast === undefined || options.cast === null || options.cast === false || options.cast === ''){
+ options.cast = undefined
+ }else if(typeof options.cast === 'function'){
+ fnCastField = options.cast
+ options.cast = true
+ }else if(options.cast !== true){
+ throw new CsvError('CSV_INVALID_OPTION_CAST', [
+ 'Invalid option cast:', 'cast must be true or a function,',
+ `got ${JSON.stringify(options.cast)}`
+ ])
+ }
+ // Normalize option `cast_date`
+ if(options.cast_date === undefined || options.cast_date === null || options.cast_date === false || options.cast_date === ''){
+ options.cast_date = false
+ }else if(options.cast_date === true){
+ options.cast_date = function(value){
+ const date = Date.parse(value)
+ return !isNaN(date) ? new Date(date) : value
+ }
+ }else if(typeof options.cast_date !== 'function'){
+ throw new CsvError('CSV_INVALID_OPTION_CAST_DATE', [
+ 'Invalid option cast_date:', 'cast_date must be true or a function,',
+ `got ${JSON.stringify(options.cast_date)}`
+ ])
+ }
+ // Normalize option `columns`
+ let fnFirstLineToHeaders = null
+ if(options.columns === true){
+ // Fields in the first line are converted as-is to columns
+ fnFirstLineToHeaders = undefined
+ }else if(typeof options.columns === 'function'){
+ fnFirstLineToHeaders = options.columns
+ options.columns = true
+ }else if(Array.isArray(options.columns)){
+ options.columns = normalizeColumnsArray(options.columns)
+ }else if(options.columns === undefined || options.columns === null || options.columns === false){
+ options.columns = false
+ }else{
+ throw new CsvError('CSV_INVALID_OPTION_COLUMNS', [
+ 'Invalid option columns:',
+ 'expect an object, a function or true,',
+ `got ${JSON.stringify(options.columns)}`
+ ])
+ }
+ // Normalize option `columns_duplicates_to_array`
+ if(options.columns_duplicates_to_array === undefined || options.columns_duplicates_to_array === null || options.columns_duplicates_to_array === false){
+ options.columns_duplicates_to_array = false
+ }else if(options.columns_duplicates_to_array !== true){
+ throw new CsvError('CSV_INVALID_OPTION_COLUMNS_DUPLICATES_TO_ARRAY', [
+ 'Invalid option columns_duplicates_to_array:',
+ 'expect an boolean,',
+ `got ${JSON.stringify(options.columns_duplicates_to_array)}`
+ ])
+ }
+ // Normalize option `comment`
+ if(options.comment === undefined || options.comment === null || options.comment === false || options.comment === ''){
+ options.comment = null
+ }else{
+ if(typeof options.comment === 'string'){
+ options.comment = Buffer.from(options.comment)
+ }
+ if(!Buffer.isBuffer(options.comment)){
+ throw new CsvError('CSV_INVALID_OPTION_COMMENT', [
+ 'Invalid option comment:',
+ 'comment must be a buffer or a string,',
+ `got ${JSON.stringify(options.comment)}`
+ ])
+ }
+ }
+ // Normalize option `delimiter`
+ const delimiter_json = JSON.stringify(options.delimiter)
+ if(!Array.isArray(options.delimiter)) options.delimiter = [options.delimiter]
+ if(options.delimiter.length === 0){
+ throw new CsvError('CSV_INVALID_OPTION_DELIMITER', [
+ 'Invalid option delimiter:',
+ 'delimiter must be a non empty string or buffer or array of string|buffer,',
+ `got ${delimiter_json}`
+ ])
+ }
+ options.delimiter = options.delimiter.map(function(delimiter){
+ if(delimiter === undefined || delimiter === null || delimiter === false){
+ return Buffer.from(',')
+ }
+ if(typeof delimiter === 'string'){
+ delimiter = Buffer.from(delimiter)
+ }
+ if( !Buffer.isBuffer(delimiter) || delimiter.length === 0){
+ throw new CsvError('CSV_INVALID_OPTION_DELIMITER', [
+ 'Invalid option delimiter:',
+ 'delimiter must be a non empty string or buffer or array of string|buffer,',
+ `got ${delimiter_json}`
+ ])
+ }
+ return delimiter
+ })
+ // Normalize option `escape`
+ if(options.escape === undefined || options.escape === true){
+ options.escape = Buffer.from('"')
+ }else if(typeof options.escape === 'string'){
+ options.escape = Buffer.from(options.escape)
+ }else if (options.escape === null || options.escape === false){
+ options.escape = null
+ }
+ if(options.escape !== null){
+ if(!Buffer.isBuffer(options.escape)){
+ throw new Error(`Invalid Option: escape must be a buffer, a string or a boolean, got ${JSON.stringify(options.escape)}`)
+ }else if(options.escape.length !== 1){
+ throw new Error(`Invalid Option Length: escape must be one character, got ${options.escape.length}`)
+ }else{
+ options.escape = options.escape[0]
+ }
+ }
+ // Normalize option `from`
+ if(options.from === undefined || options.from === null){
+ options.from = 1
+ }else{
+ if(typeof options.from === 'string' && /\d+/.test(options.from)){
+ options.from = parseInt(options.from)
+ }
+ if(Number.isInteger(options.from)){
+ if(options.from < 0){
+ throw new Error(`Invalid Option: from must be a positive integer, got ${JSON.stringify(opts.from)}`)
+ }
+ }else{
+ throw new Error(`Invalid Option: from must be an integer, got ${JSON.stringify(options.from)}`)
+ }
+ }
+ // Normalize option `from_line`
+ if(options.from_line === undefined || options.from_line === null){
+ options.from_line = 1
+ }else{
+ if(typeof options.from_line === 'string' && /\d+/.test(options.from_line)){
+ options.from_line = parseInt(options.from_line)
+ }
+ if(Number.isInteger(options.from_line)){
+ if(options.from_line <= 0){
+ throw new Error(`Invalid Option: from_line must be a positive integer greater than 0, got ${JSON.stringify(opts.from_line)}`)
+ }
+ }else{
+ throw new Error(`Invalid Option: from_line must be an integer, got ${JSON.stringify(opts.from_line)}`)
+ }
+ }
+ // Normalize option `info`
+ if(options.info === undefined || options.info === null || options.info === false){
+ options.info = false
+ }else if(options.info !== true){
+ throw new Error(`Invalid Option: info must be true, got ${JSON.stringify(options.info)}`)
+ }
+ // Normalize option `max_record_size`
+ if(options.max_record_size === undefined || options.max_record_size === null || options.max_record_size === false){
+ options.max_record_size = 0
+ }else if(Number.isInteger(options.max_record_size) && options.max_record_size >= 0){
+ // Great, nothing to do
+ }else if(typeof options.max_record_size === 'string' && /\d+/.test(options.max_record_size)){
+ options.max_record_size = parseInt(options.max_record_size)
+ }else{
+ throw new Error(`Invalid Option: max_record_size must be a positive integer, got ${JSON.stringify(options.max_record_size)}`)
+ }
+ // Normalize option `objname`
+ if(options.objname === undefined || options.objname === null || options.objname === false){
+ options.objname = undefined
+ }else if(Buffer.isBuffer(options.objname)){
+ if(options.objname.length === 0){
+ throw new Error(`Invalid Option: objname must be a non empty buffer`)
+ }
+ options.objname = options.objname.toString()
+ }else if(typeof options.objname === 'string'){
+ if(options.objname.length === 0){
+ throw new Error(`Invalid Option: objname must be a non empty string`)
+ }
+ // Great, nothing to do
+ }else{
+ throw new Error(`Invalid Option: objname must be a string or a buffer, got ${options.objname}`)
+ }
+ // Normalize option `on_record`
+ if(options.on_record === undefined || options.on_record === null){
+ options.on_record = undefined
+ }else if(typeof options.on_record !== 'function'){
+ throw new CsvError('CSV_INVALID_OPTION_ON_RECORD', [
+ 'Invalid option `on_record`:',
+ 'expect a function,',
+ `got ${JSON.stringify(options.on_record)}`
+ ])
+ }
+ // Normalize option `quote`
+ if(options.quote === null || options.quote === false || options.quote === ''){
+ options.quote = null
+ }else{
+ if(options.quote === undefined || options.quote === true){
+ options.quote = Buffer.from('"')
+ }else if(typeof options.quote === 'string'){
+ options.quote = Buffer.from(options.quote)
+ }
+ if(!Buffer.isBuffer(options.quote)){
+ throw new Error(`Invalid Option: quote must be a buffer or a string, got ${JSON.stringify(options.quote)}`)
+ }else if(options.quote.length !== 1){
+ throw new Error(`Invalid Option Length: quote must be one character, got ${options.quote.length}`)
+ }else{
+ options.quote = options.quote[0]
+ }
+ }
+ // Normalize option `raw`
+ if(options.raw === undefined || options.raw === null || options.raw === false){
+ options.raw = false
+ }else if(options.raw !== true){
+ throw new Error(`Invalid Option: raw must be true, got ${JSON.stringify(options.raw)}`)
+ }
+ // Normalize option `record_delimiter`
+ if(!options.record_delimiter){
+ options.record_delimiter = []
+ }else if(!Array.isArray(options.record_delimiter)){
+ options.record_delimiter = [options.record_delimiter]
+ }
+ options.record_delimiter = options.record_delimiter.map( function(rd){
+ if(typeof rd === 'string'){
+ rd = Buffer.from(rd)
+ }
+ return rd
+ })
+ // Normalize option `relax`
+ if(typeof options.relax === 'boolean'){
+ // Great, nothing to do
+ }else if(options.relax === undefined || options.relax === null){
+ options.relax = false
+ }else{
+ throw new Error(`Invalid Option: relax must be a boolean, got ${JSON.stringify(options.relax)}`)
+ }
+ // Normalize option `relax_column_count`
+ if(typeof options.relax_column_count === 'boolean'){
+ // Great, nothing to do
+ }else if(options.relax_column_count === undefined || options.relax_column_count === null){
+ options.relax_column_count = false
+ }else{
+ throw new Error(`Invalid Option: relax_column_count must be a boolean, got ${JSON.stringify(options.relax_column_count)}`)
+ }
+ if(typeof options.relax_column_count_less === 'boolean'){
+ // Great, nothing to do
+ }else if(options.relax_column_count_less === undefined || options.relax_column_count_less === null){
+ options.relax_column_count_less = false
+ }else{
+ throw new Error(`Invalid Option: relax_column_count_less must be a boolean, got ${JSON.stringify(options.relax_column_count_less)}`)
+ }
+ if(typeof options.relax_column_count_more === 'boolean'){
+ // Great, nothing to do
+ }else if(options.relax_column_count_more === undefined || options.relax_column_count_more === null){
+ options.relax_column_count_more = false
+ }else{
+ throw new Error(`Invalid Option: relax_column_count_more must be a boolean, got ${JSON.stringify(options.relax_column_count_more)}`)
+ }
+ // Normalize option `skip_empty_lines`
+ if(typeof options.skip_empty_lines === 'boolean'){
+ // Great, nothing to do
+ }else if(options.skip_empty_lines === undefined || options.skip_empty_lines === null){
+ options.skip_empty_lines = false
+ }else{
+ throw new Error(`Invalid Option: skip_empty_lines must be a boolean, got ${JSON.stringify(options.skip_empty_lines)}`)
+ }
+ // Normalize option `skip_lines_with_empty_values`
+ if(typeof options.skip_lines_with_empty_values === 'boolean'){
+ // Great, nothing to do
+ }else if(options.skip_lines_with_empty_values === undefined || options.skip_lines_with_empty_values === null){
+ options.skip_lines_with_empty_values = false
+ }else{
+ throw new Error(`Invalid Option: skip_lines_with_empty_values must be a boolean, got ${JSON.stringify(options.skip_lines_with_empty_values)}`)
+ }
+ // Normalize option `skip_lines_with_error`
+ if(typeof options.skip_lines_with_error === 'boolean'){
+ // Great, nothing to do
+ }else if(options.skip_lines_with_error === undefined || options.skip_lines_with_error === null){
+ options.skip_lines_with_error = false
+ }else{
+ throw new Error(`Invalid Option: skip_lines_with_error must be a boolean, got ${JSON.stringify(options.skip_lines_with_error)}`)
+ }
+ // Normalize option `rtrim`
+ if(options.rtrim === undefined || options.rtrim === null || options.rtrim === false){
+ options.rtrim = false
+ }else if(options.rtrim !== true){
+ throw new Error(`Invalid Option: rtrim must be a boolean, got ${JSON.stringify(options.rtrim)}`)
+ }
+ // Normalize option `ltrim`
+ if(options.ltrim === undefined || options.ltrim === null || options.ltrim === false){
+ options.ltrim = false
+ }else if(options.ltrim !== true){
+ throw new Error(`Invalid Option: ltrim must be a boolean, got ${JSON.stringify(options.ltrim)}`)
+ }
+ // Normalize option `trim`
+ if(options.trim === undefined || options.trim === null || options.trim === false){
+ options.trim = false
+ }else if(options.trim !== true){
+ throw new Error(`Invalid Option: trim must be a boolean, got ${JSON.stringify(options.trim)}`)
+ }
+ // Normalize options `trim`, `ltrim` and `rtrim`
+ if(options.trim === true && opts.ltrim !== false){
+ options.ltrim = true
+ }else if(options.ltrim !== true){
+ options.ltrim = false
+ }
+ if(options.trim === true && opts.rtrim !== false){
+ options.rtrim = true
+ }else if(options.rtrim !== true){
+ options.rtrim = false
+ }
+ // Normalize option `to`
+ if(options.to === undefined || options.to === null){
+ options.to = -1
+ }else{
+ if(typeof options.to === 'string' && /\d+/.test(options.to)){
+ options.to = parseInt(options.to)
+ }
+ if(Number.isInteger(options.to)){
+ if(options.to <= 0){
+ throw new Error(`Invalid Option: to must be a positive integer greater than 0, got ${JSON.stringify(opts.to)}`)
+ }
+ }else{
+ throw new Error(`Invalid Option: to must be an integer, got ${JSON.stringify(opts.to)}`)
+ }
+ }
+ // Normalize option `to_line`
+ if(options.to_line === undefined || options.to_line === null){
+ options.to_line = -1
+ }else{
+ if(typeof options.to_line === 'string' && /\d+/.test(options.to_line)){
+ options.to_line = parseInt(options.to_line)
+ }
+ if(Number.isInteger(options.to_line)){
+ if(options.to_line <= 0){
+ throw new Error(`Invalid Option: to_line must be a positive integer greater than 0, got ${JSON.stringify(opts.to_line)}`)
+ }
+ }else{
+ throw new Error(`Invalid Option: to_line must be an integer, got ${JSON.stringify(opts.to_line)}`)
+ }
+ }
+ this.info = {
+ comment_lines: 0,
+ empty_lines: 0,
+ invalid_field_length: 0,
+ lines: 1,
+ records: 0
+ }
+ this.options = options
+ this.state = {
+ bomSkipped: false,
+ castField: fnCastField,
+ commenting: false,
+ enabled: options.from_line === 1,
+ escaping: false,
+ escapeIsQuote: options.escape === options.quote,
+ expectedRecordLength: options.columns === null ? 0 : options.columns.length,
+ field: new ResizeableBuffer(20),
+ firstLineToHeaders: fnFirstLineToHeaders,
+ info: Object.assign({}, this.info),
+ previousBuf: undefined,
+ quoting: false,
+ stop: false,
+ rawBuffer: new ResizeableBuffer(100),
+ record: [],
+ recordHasError: false,
+ record_length: 0,
+ recordDelimiterMaxLength: options.record_delimiter.length === 0 ? 2 : Math.max(...options.record_delimiter.map( (v) => v.length)),
+ trimChars: [Buffer.from(' ')[0], Buffer.from('\t')[0]],
+ wasQuoting: false,
+ wasRowDelimiter: false
+ }
+ }
+ // Implementation of `Transform._transform`
+ _transform(buf, encoding, callback){
+ if(this.state.stop === true){
+ return
+ }
+ const err = this.__parse(buf, false)
+ if(err !== undefined){
+ this.state.stop = true
+ }
+ callback(err)
+ }
+ // Implementation of `Transform._flush`
+ _flush(callback){
+ if(this.state.stop === true){
+ return
+ }
+ const err = this.__parse(undefined, true)
+ callback(err)
+ }
+ // Central parser implementation
+ __parse(nextBuf, end){
+ const {bom, comment, escape, from_line, info, ltrim, max_record_size, quote, raw, relax, rtrim, skip_empty_lines, to, to_line} = this.options
+ let {record_delimiter} = this.options
+ const {bomSkipped, previousBuf, rawBuffer, escapeIsQuote} = this.state
+ let buf
+ if(previousBuf === undefined){
+ if(nextBuf === undefined){
+ // Handle empty string
+ this.push(null)
+ return
+ }else{
+ buf = nextBuf
+ }
+ }else if(previousBuf !== undefined && nextBuf === undefined){
+ buf = previousBuf
+ }else{
+ buf = Buffer.concat([previousBuf, nextBuf])
+ }
+ // Handle UTF BOM
+ if(bomSkipped === false){
+ if(bom === false){
+ this.state.bomSkipped = true
+ }else if(buf.length < 3){
+ // No enough data
+ if(end === false){
+ // Wait for more data
+ this.state.previousBuf = buf
+ return
+ }
+ // skip BOM detect because data length < 3
+ }else{
+ if(bom_utf8.compare(buf, 0, 3) === 0){
+ // Skip BOM
+ buf = buf.slice(3)
+ }
+ this.state.bomSkipped = true
+ }
+ }
+ const bufLen = buf.length
+ let pos
+ for(pos = 0; pos < bufLen; pos++){
+ // Ensure we get enough space to look ahead
+ // There should be a way to move this out of the loop
+ if(this.__needMoreData(pos, bufLen, end)){
+ break
+ }
+ if(this.state.wasRowDelimiter === true){
+ this.info.lines++
+ if(info === true && this.state.record.length === 0 && this.state.field.length === 0 && this.state.wasQuoting === false){
+ this.state.info = Object.assign({}, this.info)
+ }
+ this.state.wasRowDelimiter = false
+ }
+ if(to_line !== -1 && this.info.lines > to_line){
+ this.state.stop = true
+ this.push(null)
+ return
+ }
+ // Auto discovery of record_delimiter, unix, mac and windows supported
+ if(this.state.quoting === false && record_delimiter.length === 0){
+ const record_delimiterCount = this.__autoDiscoverRowDelimiter(buf, pos)
+ if(record_delimiterCount){
+ record_delimiter = this.options.record_delimiter
+ }
+ }
+ const chr = buf[pos]
+ if(raw === true){
+ rawBuffer.append(chr)
+ }
+ if((chr === cr || chr === nl) && this.state.wasRowDelimiter === false ){
+ this.state.wasRowDelimiter = true
+ }
+ // Previous char was a valid escape char
+ // treat the current char as a regular char
+ if(this.state.escaping === true){
+ this.state.escaping = false
+ }else{
+ // Escape is only active inside quoted fields
+ // We are quoting, the char is an escape chr and there is a chr to escape
+ if(escape !== null && this.state.quoting === true && chr === escape && pos + 1 < bufLen){
+ if(escapeIsQuote){
+ if(buf[pos+1] === quote){
+ this.state.escaping = true
+ continue
+ }
+ }else{
+ this.state.escaping = true
+ continue
+ }
+ }
+ // Not currently escaping and chr is a quote
+ // TODO: need to compare bytes instead of single char
+ if(this.state.commenting === false && chr === quote){
+ if(this.state.quoting === true){
+ const nextChr = buf[pos+1]
+ const isNextChrTrimable = rtrim && this.__isCharTrimable(nextChr)
+ // const isNextChrComment = nextChr === comment
+ const isNextChrComment = comment !== null && this.__compareBytes(comment, buf, pos+1, nextChr)
+ const isNextChrDelimiter = this.__isDelimiter(nextChr, buf, pos+1)
+ const isNextChrRowDelimiter = record_delimiter.length === 0 ? this.__autoDiscoverRowDelimiter(buf, pos+1) : this.__isRecordDelimiter(nextChr, buf, pos+1)
+ // Escape a quote
+ // Treat next char as a regular character
+ // TODO: need to compare bytes instead of single char
+ if(escape !== null && chr === escape && nextChr === quote){
+ pos++
+ }else if(!nextChr || isNextChrDelimiter || isNextChrRowDelimiter || isNextChrComment || isNextChrTrimable){
+ this.state.quoting = false
+ this.state.wasQuoting = true
+ continue
+ }else if(relax === false){
+ const err = this.__error(
+ new CsvError('CSV_INVALID_CLOSING_QUOTE', [
+ 'Invalid Closing Quote:',
+ `got "${String.fromCharCode(nextChr)}"`,
+ `at line ${this.info.lines}`,
+ 'instead of delimiter, row delimiter, trimable character',
+ '(if activated) or comment',
+ ], this.__context())
+ )
+ if(err !== undefined) return err
+ }else{
+ this.state.quoting = false
+ this.state.wasQuoting = true
+ // continue
+ this.state.field.prepend(quote)
+ }
+ }else{
+ if(this.state.field.length !== 0){
+ // In relax mode, treat opening quote preceded by chrs as regular
+ if( relax === false ){
+ const err = this.__error(
+ new CsvError('INVALID_OPENING_QUOTE', [
+ 'Invalid Opening Quote:',
+ `a quote is found inside a field at line ${this.info.lines}`,
+ ], this.__context(), {
+ field: this.state.field,
+ })
+ )
+ if(err !== undefined) return err
+ }
+ }else{
+ this.state.quoting = true
+ continue
+ }
+ }
+ }
+ if(this.state.quoting === false){
+ let recordDelimiterLength = this.__isRecordDelimiter(chr, buf, pos)
+ if(recordDelimiterLength !== 0){
+ // Do not emit comments which take a full line
+ const skipCommentLine = this.state.commenting && (this.state.wasQuoting === false && this.state.record.length === 0 && this.state.field.length === 0)
+ if(skipCommentLine){
+ this.info.comment_lines++
+ // Skip full comment line
+ }else{
+ // Skip if line is empty and skip_empty_lines activated
+ if(skip_empty_lines === true && this.state.wasQuoting === false && this.state.record.length === 0 && this.state.field.length === 0){
+ this.info.empty_lines++
+ pos += recordDelimiterLength - 1
+ continue
+ }
+ // Activate records emition if above from_line
+ if(this.state.enabled === false && this.info.lines + (this.state.wasRowDelimiter === true ? 1: 0 ) >= from_line){
+ this.state.enabled = true
+ this.__resetField()
+ this.__resetRow()
+ pos += recordDelimiterLength - 1
+ continue
+ }else{
+ const errField = this.__onField()
+ if(errField !== undefined) return errField
+ const errRecord = this.__onRow()
+ if(errRecord !== undefined) return errRecord
+ }
+ if(to !== -1 && this.info.records >= to){
+ this.state.stop = true
+ this.push(null)
+ return
+ }
+ }
+ this.state.commenting = false
+ pos += recordDelimiterLength - 1
+ continue
+ }
+ if(this.state.commenting){
+ continue
+ }
+ const commentCount = comment === null ? 0 : this.__compareBytes(comment, buf, pos, chr)
+ if(commentCount !== 0){
+ this.state.commenting = true
+ continue
+ }
+ let delimiterLength = this.__isDelimiter(chr, buf, pos)
+ if(delimiterLength !== 0){
+ const errField = this.__onField()
+ if(errField !== undefined) return errField
+ pos += delimiterLength - 1
+ continue
+ }
+ }
+ }
+ if(this.state.commenting === false){
+ if(max_record_size !== 0 && this.state.record_length + this.state.field.length > max_record_size){
+ const err = this.__error(
+ new CsvError('CSV_MAX_RECORD_SIZE', [
+ 'Max Record Size:',
+ 'record exceed the maximum number of tolerated bytes',
+ `of ${max_record_size}`,
+ `at line ${this.info.lines}`,
+ ], this.__context())
+ )
+ if(err !== undefined) return err
+ }
+ }
+
+ const lappend = ltrim === false || this.state.quoting === true || this.state.field.length !== 0 || !this.__isCharTrimable(chr)
+ // rtrim in non quoting is handle in __onField
+ const rappend = rtrim === false || this.state.wasQuoting === false
+ if( lappend === true && rappend === true ){
+ this.state.field.append(chr)
+ }else if(rtrim === true && !this.__isCharTrimable(chr)){
+ const err = this.__error(
+ new CsvError('CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE', [
+ 'Invalid Closing Quote:',
+ 'found non trimable byte after quote',
+ `at line ${this.info.lines}`,
+ ], this.__context())
+ )
+ if(err !== undefined) return err
+ }
+ }
+ if(end === true){
+ // Ensure we are not ending in a quoting state
+ if(this.state.quoting === true){
+ const err = this.__error(
+ new CsvError('CSV_QUOTE_NOT_CLOSED', [
+ 'Quote Not Closed:',
+ `the parsing is finished with an opening quote at line ${this.info.lines}`,
+ ], this.__context())
+ )
+ if(err !== undefined) return err
+ }else{
+ // Skip last line if it has no characters
+ if(this.state.wasQuoting === true || this.state.record.length !== 0 || this.state.field.length !== 0){
+ const errField = this.__onField()
+ if(errField !== undefined) return errField
+ const errRecord = this.__onRow()
+ if(errRecord !== undefined) return errRecord
+ }else if(this.state.wasRowDelimiter === true){
+ this.info.empty_lines++
+ }else if(this.state.commenting === true){
+ this.info.comment_lines++
+ }
+ }
+ }else{
+ this.state.previousBuf = buf.slice(pos)
+ }
+ if(this.state.wasRowDelimiter === true){
+ this.info.lines++
+ this.state.wasRowDelimiter = false
+ }
+ }
+ // Helper to test if a character is a space or a line delimiter
+ __isCharTrimable(chr){
+ return chr === space || chr === tab || chr === cr || chr === nl || chr === np
+ }
+ __onRow(){
+ const {columns, columns_duplicates_to_array, info, from, relax_column_count, relax_column_count_less, relax_column_count_more, raw, skip_lines_with_empty_values} = this.options
+ const {enabled, record} = this.state
+ if(enabled === false){
+ return this.__resetRow()
+ }
+ // Convert the first line into column names
+ const recordLength = record.length
+ if(columns === true){
+ if(isRecordEmpty(record)){
+ this.__resetRow()
+ return
+ }
+ return this.__firstLineToColumns(record)
+ }
+ if(columns === false && this.info.records === 0){
+ this.state.expectedRecordLength = recordLength
+ }
+ if(recordLength !== this.state.expectedRecordLength){
+ if(relax_column_count === true ||
+ (relax_column_count_less === true && recordLength < this.state.expectedRecordLength) ||
+ (relax_column_count_more === true && recordLength > this.state.expectedRecordLength) ){
+ this.info.invalid_field_length++
+ }else{
+ if(columns === false){
+ const err = this.__error(
+ new CsvError('CSV_INCONSISTENT_RECORD_LENGTH', [
+ 'Invalid Record Length:',
+ `expect ${this.state.expectedRecordLength},`,
+ `got ${recordLength} on line ${this.info.lines}`,
+ ], this.__context(), {
+ record: record,
+ })
+ )
+ if(err !== undefined) return err
+ }else{
+ const err = this.__error(
+ // CSV_INVALID_RECORD_LENGTH_DONT_MATCH_COLUMNS
+ new CsvError('CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH', [
+ 'Invalid Record Length:',
+ `columns length is ${columns.length},`, // rename columns
+ `got ${recordLength} on line ${this.info.lines}`,
+ ], this.__context(), {
+ record: record,
+ })
+ )
+ if(err !== undefined) return err
+ }
+ }
+ }
+ if(skip_lines_with_empty_values === true){
+ if(isRecordEmpty(record)){
+ this.__resetRow()
+ return
+ }
+ }
+ if(this.state.recordHasError === true){
+ this.__resetRow()
+ this.state.recordHasError = false
+ return
+ }
+ this.info.records++
+ if(from === 1 || this.info.records >= from){
+ if(columns !== false){
+ const obj = {}
+ // Transform record array to an object
+ for(let i = 0, l = record.length; i < l; i++){
+ if(columns[i] === undefined || columns[i].disabled) continue
+ // obj[columns[i].name] = record[i]
+ // Turn duplicate columns into an array
+ if (columns_duplicates_to_array === true && obj[columns[i].name]) {
+ if (Array.isArray(obj[columns[i].name])) {
+ obj[columns[i].name] = obj[columns[i].name].concat(record[i])
+ } else {
+ obj[columns[i].name] = [obj[columns[i].name], record[i]]
+ }
+ } else {
+ obj[columns[i].name] = record[i]
+ }
+ }
+ const {objname} = this.options
+ if(objname === undefined){
+ if(raw === true || info === true){
+ const err = this.__push(Object.assign(
+ {record: obj},
+ (raw === true ? {raw: this.state.rawBuffer.toString()}: {}),
+ (info === true ? {info: this.state.info}: {})
+ ))
+ if(err){
+ return err
+ }
+ }else{
+ const err = this.__push(obj)
+ if(err){
+ return err
+ }
+ }
+ }else{
+ if(raw === true || info === true){
+ const err = this.__push(Object.assign(
+ {record: [obj[objname], obj]},
+ raw === true ? {raw: this.state.rawBuffer.toString()}: {},
+ info === true ? {info: this.state.info}: {}
+ ))
+ if(err){
+ return err
+ }
+ }else{
+ const err = this.__push([obj[objname], obj])
+ if(err){
+ return err
+ }
+ }
+ }
+ }else{
+ if(raw === true || info === true){
+ const err = this.__push(Object.assign(
+ {record: record},
+ raw === true ? {raw: this.state.rawBuffer.toString()}: {},
+ info === true ? {info: this.state.info}: {}
+ ))
+ if(err){
+ return err
+ }
+ }else{
+ const err = this.__push(record)
+ if(err){
+ return err
+ }
+ }
+ }
+ }
+ this.__resetRow()
+ }
+ __firstLineToColumns(record){
+ const {firstLineToHeaders} = this.state
+ try{
+ const headers = firstLineToHeaders === undefined ? record : firstLineToHeaders.call(null, record)
+ if(!Array.isArray(headers)){
+ return this.__error(
+ new CsvError('CSV_INVALID_COLUMN_MAPPING', [
+ 'Invalid Column Mapping:',
+ 'expect an array from column function,',
+ `got ${JSON.stringify(headers)}`
+ ], this.__context(), {
+ headers: headers,
+ })
+ )
+ }
+ const normalizedHeaders = normalizeColumnsArray(headers)
+ this.state.expectedRecordLength = normalizedHeaders.length
+ this.options.columns = normalizedHeaders
+ this.__resetRow()
+ return
+ }catch(err){
+ return err
+ }
+ }
+ __resetRow(){
+ if(this.options.raw === true){
+ this.state.rawBuffer.reset()
+ }
+ this.state.record = []
+ this.state.record_length = 0
+ }
+ __onField(){
+ const {cast, rtrim, max_record_size} = this.options
+ const {enabled, wasQuoting} = this.state
+ // Short circuit for the from_line options
+ if(enabled === false){ /* this.options.columns !== true && */
+ return this.__resetField()
+ }
+ let field = this.state.field.toString()
+ if(rtrim === true && wasQuoting === false){
+ field = field.trimRight()
+ }
+ if(cast === true){
+ const [err, f] = this.__cast(field)
+ if(err !== undefined) return err
+ field = f
+ }
+ this.state.record.push(field)
+ // Increment record length if record size must not exceed a limit
+ if(max_record_size !== 0 && typeof field === 'string'){
+ this.state.record_length += field.length
+ }
+ this.__resetField()
+ }
+ __resetField(){
+ this.state.field.reset()
+ this.state.wasQuoting = false
+ }
+ __push(record){
+ const {on_record} = this.options
+ if(on_record !== undefined){
+ const context = this.__context()
+ try{
+ record = on_record.call(null, record, context)
+ }catch(err){
+ return err
+ }
+ if(record === undefined || record === null){ return }
+ }
+ this.push(record)
+ }
+ // Return a tuple with the error and the casted value
+ __cast(field){
+ const {columns, relax_column_count} = this.options
+ const isColumns = Array.isArray(columns)
+ // Dont loose time calling cast
+ // because the final record is an object
+ // and this field can't be associated to a key present in columns
+ if( isColumns === true && relax_column_count && this.options.columns.length <= this.state.record.length ){
+ return [undefined, undefined]
+ }
+ const context = this.__context()
+ if(this.state.castField !== null){
+ try{
+ return [undefined, this.state.castField.call(null, field, context)]
+ }catch(err){
+ return [err]
+ }
+ }
+ if(this.__isFloat(field)){
+ return [undefined, parseFloat(field)]
+ }else if(this.options.cast_date !== false){
+ return [undefined, this.options.cast_date.call(null, field, context)]
+ }
+ return [undefined, field]
+ }
+ // Keep it in case we implement the `cast_int` option
+ // __isInt(value){
+ // // return Number.isInteger(parseInt(value))
+ // // return !isNaN( parseInt( obj ) );
+ // return /^(\-|\+)?[1-9][0-9]*$/.test(value)
+ // }
+ __isFloat(value){
+ return (value - parseFloat( value ) + 1) >= 0 // Borrowed from jquery
+ }
+ __compareBytes(sourceBuf, targetBuf, pos, firtByte){
+ if(sourceBuf[0] !== firtByte) return 0
+ const sourceLength = sourceBuf.length
+ for(let i = 1; i < sourceLength; i++){
+ if(sourceBuf[i] !== targetBuf[pos+i]) return 0
+ }
+ return sourceLength
+ }
+ __needMoreData(i, bufLen, end){
+ if(end){
+ return false
+ }
+ const {comment, delimiter} = this.options
+ const {quoting, recordDelimiterMaxLength} = this.state
+ const numOfCharLeft = bufLen - i - 1
+ const requiredLength = Math.max(
+ // Skip if the remaining buffer smaller than comment
+ comment ? comment.length : 0,
+ // Skip if the remaining buffer smaller than row delimiter
+ recordDelimiterMaxLength,
+ // Skip if the remaining buffer can be row delimiter following the closing quote
+ // 1 is for quote.length
+ quoting ? (1 + recordDelimiterMaxLength) : 0,
+ // Skip if the remaining buffer can be delimiter
+ delimiter.length,
+ // Skip if the remaining buffer can be escape sequence
+ // 1 is for escape.length
+ 1
+ )
+ return numOfCharLeft < requiredLength
+ }
+ __isDelimiter(chr, buf, pos){
+ const {delimiter} = this.options
+ loop1: for(let i = 0; i < delimiter.length; i++){
+ const del = delimiter[i]
+ if(del[0] === chr){
+ for(let j = 1; j < del.length; j++){
+ if(del[j] !== buf[pos+j]) continue loop1
+ }
+ return del.length
+ }
+ }
+ return 0
+ }
+ __isRecordDelimiter(chr, buf, pos){
+ const {record_delimiter} = this.options
+ const recordDelimiterLength = record_delimiter.length
+ loop1: for(let i = 0; i < recordDelimiterLength; i++){
+ const rd = record_delimiter[i]
+ const rdLength = rd.length
+ if(rd[0] !== chr){
+ continue
+ }
+ for(let j = 1; j < rdLength; j++){
+ if(rd[j] !== buf[pos+j]){
+ continue loop1
+ }
+ }
+ return rd.length
+ }
+ return 0
+ }
+ __autoDiscoverRowDelimiter(buf, pos){
+ const chr = buf[pos]
+ if(chr === cr){
+ if(buf[pos+1] === nl){
+ this.options.record_delimiter.push(Buffer.from('\r\n'))
+ this.state.recordDelimiterMaxLength = 2
+ return 2
+ }else{
+ this.options.record_delimiter.push(Buffer.from('\r'))
+ this.state.recordDelimiterMaxLength = 1
+ return 1
+ }
+ }else if(chr === nl){
+ this.options.record_delimiter.push(Buffer.from('\n'))
+ this.state.recordDelimiterMaxLength = 1
+ return 1
+ }
+ return 0
+ }
+ __error(msg){
+ const {skip_lines_with_error} = this.options
+ const err = typeof msg === 'string' ? new Error(msg) : msg
+ if(skip_lines_with_error){
+ this.state.recordHasError = true
+ this.emit('skip', err)
+ return undefined
+ }else{
+ return err
+ }
+ }
+ __context(){
+ const {columns} = this.options
+ const isColumns = Array.isArray(columns)
+ return {
+ column: isColumns === true ?
+ ( columns.length > this.state.record.length ?
+ columns[this.state.record.length].name :
+ null
+ ) :
+ this.state.record.length,
+ empty_lines: this.info.empty_lines,
+ header: columns === true,
+ index: this.state.record.length,
+ invalid_field_length: this.info.invalid_field_length,
+ quoting: this.state.wasQuoting,
+ lines: this.info.lines,
+ records: this.info.records
+ }
+ }
+}
+
+const parse = function(){
+ let data, options, callback
+ for(let i in arguments){
+ const argument = arguments[i]
+ const type = typeof argument
+ if(data === undefined && (typeof argument === 'string' || Buffer.isBuffer(argument))){
+ data = argument
+ }else if(options === undefined && isObject(argument)){
+ options = argument
+ }else if(callback === undefined && type === 'function'){
+ callback = argument
+ }else{
+ throw new CsvError('CSV_INVALID_ARGUMENT', [
+ 'Invalid argument:',
+ `got ${JSON.stringify(argument)} at index ${i}`
+ ])
+ }
+ }
+ const parser = new Parser(options)
+ if(callback){
+ const records = options === undefined || options.objname === undefined ? [] : {}
+ parser.on('readable', function(){
+ let record
+ while((record = this.read()) !== null){
+ if(options === undefined || options.objname === undefined){
+ records.push(record)
+ }else{
+ records[record[0]] = record[1]
+ }
+ }
+ })
+ parser.on('error', function(err){
+ callback(err, undefined, parser.info)
+ })
+ parser.on('end', function(){
+ callback(undefined, records, parser.info)
+ })
+ }
+ if(data !== undefined){
+ // Give a chance for events to be registered later
+ if(typeof setImmediate === 'function'){
+ setImmediate(function(){
+ parser.write(data)
+ parser.end()
+ })
+ }else{
+ parser.write(data)
+ parser.end()
+ }
+ }
+ return parser
+}
+
+class CsvError extends Error {
+ constructor(code, message, ...contexts) {
+ if(Array.isArray(message)) message = message.join(' ')
+ super(message)
+ if(Error.captureStackTrace !== undefined){
+ Error.captureStackTrace(this, CsvError)
+ }
+ this.code = code
+ for(const context of contexts){
+ for(const key in context){
+ const value = context[key]
+ this[key] = Buffer.isBuffer(value) ? value.toString() : value == null ? value : JSON.parse(JSON.stringify(value))
+ }
+ }
+ }
+}
+
+parse.Parser = Parser
+
+parse.CsvError = CsvError
+
+module.exports = parse
+
+const underscore = function(str){
+ return str.replace(/([A-Z])/g, function(_, match){
+ return '_' + match.toLowerCase()
+ })
+}
+
+const isObject = function(obj){
+ return (typeof obj === 'object' && obj !== null && !Array.isArray(obj))
+}
+
+const isRecordEmpty = function(record){
+ return record.every( (field) => field == null || field.toString && field.toString().trim() === '' )
+}
+
+const normalizeColumnsArray = function(columns){
+ const normalizedColumns = [];
+ for(let i = 0, l = columns.length; i < l; i++){
+ const column = columns[i]
+ if(column === undefined || column === null || column === false){
+ normalizedColumns[i] = { disabled: true }
+ }else if(typeof column === 'string'){
+ normalizedColumns[i] = { name: column }
+ }else if(isObject(column)){
+ if(typeof column.name !== 'string'){
+ throw new CsvError('CSV_OPTION_COLUMNS_MISSING_NAME', [
+ 'Option columns missing name:',
+ `property "name" is required at position ${i}`,
+ 'when column is an object literal'
+ ])
+ }
+ normalizedColumns[i] = column
+ }else{
+ throw new CsvError('CSV_INVALID_COLUMN_DEFINITION', [
+ 'Invalid column definition:',
+ 'expect a string or a literal object,',
+ `got ${JSON.stringify(column)} at position ${i}`
+ ])
+ }
+ }
+ return normalizedColumns;
+}
+
+
/***/ }),
/***/ 832:
@@ -13703,23 +14842,42 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.asyncForEach = exports.getInputList = exports.getArgs = exports.getInputs = exports.tmpDir = void 0;
+exports.asyncForEach = exports.getInputList = exports.getArgs = exports.getInputs = exports.tmpNameSync = exports.tmpDir = exports.defaultContext = void 0;
const fs = __importStar(__webpack_require__(747));
const os = __importStar(__webpack_require__(87));
const path = __importStar(__webpack_require__(622));
const semver = __importStar(__webpack_require__(383));
+const tmp = __importStar(__webpack_require__(517));
const buildx = __importStar(__webpack_require__(295));
const core = __importStar(__webpack_require__(186));
const github = __importStar(__webpack_require__(438));
-exports.tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'docker-build-push-'));
-const defaultContext = `https://github.com/${github.context.repo.owner}/${github.context.repo.repo}.git#${github.context.ref.replace(/^refs\//, '')}`;
-function getInputs() {
+let _defaultContext, _tmpDir;
+function defaultContext() {
+ var _a, _b;
+ if (!_defaultContext) {
+ _defaultContext = `https://github.com/${github.context.repo.owner}/${github.context.repo.repo}.git#${(_b = (_a = github.context) === null || _a === void 0 ? void 0 : _a.ref) === null || _b === void 0 ? void 0 : _b.replace(/^refs\//, '')}`;
+ }
+ return _defaultContext;
+}
+exports.defaultContext = defaultContext;
+function tmpDir() {
+ if (!_tmpDir) {
+ _tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'docker-build-push-')).split(path.sep).join(path.posix.sep);
+ }
+ return _tmpDir;
+}
+exports.tmpDir = tmpDir;
+function tmpNameSync(options) {
+ return tmp.tmpNameSync(options);
+}
+exports.tmpNameSync = tmpNameSync;
+function getInputs(defaultContext) {
return __awaiter(this, void 0, void 0, function* () {
return {
context: core.getInput('context') || defaultContext,
file: core.getInput('file') || 'Dockerfile',
- buildArgs: yield getInputList('build-args'),
- labels: yield getInputList('labels'),
+ buildArgs: yield getInputList('build-args', true),
+ labels: yield getInputList('labels', true),
tags: yield getInputList('tags'),
pull: /true/i.test(core.getInput('pull')),
target: core.getInput('target'),
@@ -13738,17 +14896,17 @@ function getInputs() {
});
}
exports.getInputs = getInputs;
-function getArgs(inputs, buildxVersion) {
+function getArgs(inputs, defaultContext, buildxVersion) {
return __awaiter(this, void 0, void 0, function* () {
let args = ['buildx'];
- args.push.apply(args, yield getBuildArgs(inputs, buildxVersion));
+ args.push.apply(args, yield getBuildArgs(inputs, defaultContext, buildxVersion));
args.push.apply(args, yield getCommonArgs(inputs));
args.push(inputs.context);
return args;
});
}
exports.getArgs = getArgs;
-function getBuildArgs(inputs, buildxVersion) {
+function getBuildArgs(inputs, defaultContext, buildxVersion) {
return __awaiter(this, void 0, void 0, function* () {
let args = ['build'];
yield exports.asyncForEach(inputs.buildArgs, (buildArg) => __awaiter(this, void 0, void 0, function* () {
@@ -13769,26 +14927,23 @@ function getBuildArgs(inputs, buildxVersion) {
if (inputs.platforms.length > 0) {
args.push('--platform', inputs.platforms.join(','));
}
- if (inputs.platforms.length == 0 || semver.satisfies(buildxVersion, '>=0.4.2')) {
- args.push('--iidfile', yield buildx.getImageIDFile());
- }
yield exports.asyncForEach(inputs.outputs, (output) => __awaiter(this, void 0, void 0, function* () {
args.push('--output', output);
}));
+ if (!buildx.isLocalOrTarExporter(inputs.outputs) &&
+ (inputs.platforms.length == 0 || semver.satisfies(buildxVersion, '>=0.4.2'))) {
+ args.push('--iidfile', yield buildx.getImageIDFile());
+ }
yield exports.asyncForEach(inputs.cacheFrom, (cacheFrom) => __awaiter(this, void 0, void 0, function* () {
args.push('--cache-from', cacheFrom);
}));
yield exports.asyncForEach(inputs.cacheTo, (cacheTo) => __awaiter(this, void 0, void 0, function* () {
args.push('--cache-to', cacheTo);
}));
- let hasGitAuthToken = false;
yield exports.asyncForEach(inputs.secrets, (secret) => __awaiter(this, void 0, void 0, function* () {
- if (secret.startsWith('GIT_AUTH_TOKEN=')) {
- hasGitAuthToken = true;
- }
args.push('--secret', yield buildx.getSecret(secret));
}));
- if (inputs.githubToken && !hasGitAuthToken && inputs.context == defaultContext) {
+ if (inputs.githubToken && !buildx.hasGitAuthToken(inputs.secrets) && inputs.context == defaultContext) {
args.push('--secret', yield buildx.getSecret(`GIT_AUTH_TOKEN=${inputs.githubToken}`));
}
if (inputs.file) {
@@ -13856,73 +15011,162 @@ module.exports = clean
/***/ 863:
/***/ (function(module, __unusedexports, __webpack_require__) {
-module.exports = realpath
-realpath.realpath = realpath
-realpath.sync = realpathSync
-realpath.realpathSync = realpathSync
-realpath.monkeypatch = monkeypatch
-realpath.unmonkeypatch = unmonkeypatch
+const Range = __webpack_require__(828)
+const { ANY } = __webpack_require__(532)
+const satisfies = __webpack_require__(55)
+const compare = __webpack_require__(309)
-var fs = __webpack_require__(747)
-var origRealpath = fs.realpath
-var origRealpathSync = fs.realpathSync
+// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff:
+// - Every simple range `r1, r2, ...` is a subset of some `R1, R2, ...`
+//
+// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff:
+// - If c is only the ANY comparator
+// - If C is only the ANY comparator, return true
+// - Else return false
+// - Let EQ be the set of = comparators in c
+// - If EQ is more than one, return true (null set)
+// - Let GT be the highest > or >= comparator in c
+// - Let LT be the lowest < or <= comparator in c
+// - If GT and LT, and GT.semver > LT.semver, return true (null set)
+// - If EQ
+// - If GT, and EQ does not satisfy GT, return true (null set)
+// - If LT, and EQ does not satisfy LT, return true (null set)
+// - If EQ satisfies every C, return true
+// - Else return false
+// - If GT
+// - If GT is lower than any > or >= comp in C, return false
+// - If GT is >=, and GT.semver does not satisfy every C, return false
+// - If LT
+// - If LT.semver is greater than that of any > comp in C, return false
+// - If LT is <=, and LT.semver does not satisfy every C, return false
+// - If any C is a = range, and GT or LT are set, return false
+// - Else return true
-var version = process.version
-var ok = /^v[0-5]\./.test(version)
-var old = __webpack_require__(734)
+const subset = (sub, dom, options) => {
+ sub = new Range(sub, options)
+ dom = new Range(dom, options)
+ let sawNonNull = false
-function newError (er) {
- return er && er.syscall === 'realpath' && (
- er.code === 'ELOOP' ||
- er.code === 'ENOMEM' ||
- er.code === 'ENAMETOOLONG'
- )
-}
-
-function realpath (p, cache, cb) {
- if (ok) {
- return origRealpath(p, cache, cb)
- }
-
- if (typeof cache === 'function') {
- cb = cache
- cache = null
- }
- origRealpath(p, cache, function (er, result) {
- if (newError(er)) {
- old.realpath(p, cache, cb)
- } else {
- cb(er, result)
+ OUTER: for (const simpleSub of sub.set) {
+ for (const simpleDom of dom.set) {
+ const isSub = simpleSubset(simpleSub, simpleDom, options)
+ sawNonNull = sawNonNull || isSub !== null
+ if (isSub)
+ continue OUTER
}
- })
+ // the null set is a subset of everything, but null simple ranges in
+ // a complex range should be ignored. so if we saw a non-null range,
+ // then we know this isn't a subset, but if EVERY simple range was null,
+ // then it is a subset.
+ if (sawNonNull)
+ return false
+ }
+ return true
}
-function realpathSync (p, cache) {
- if (ok) {
- return origRealpathSync(p, cache)
+const simpleSubset = (sub, dom, options) => {
+ if (sub.length === 1 && sub[0].semver === ANY)
+ return dom.length === 1 && dom[0].semver === ANY
+
+ const eqSet = new Set()
+ let gt, lt
+ for (const c of sub) {
+ if (c.operator === '>' || c.operator === '>=')
+ gt = higherGT(gt, c, options)
+ else if (c.operator === '<' || c.operator === '<=')
+ lt = lowerLT(lt, c, options)
+ else
+ eqSet.add(c.semver)
}
- try {
- return origRealpathSync(p, cache)
- } catch (er) {
- if (newError(er)) {
- return old.realpathSync(p, cache)
- } else {
- throw er
+ if (eqSet.size > 1)
+ return null
+
+ let gtltComp
+ if (gt && lt) {
+ gtltComp = compare(gt.semver, lt.semver, options)
+ if (gtltComp > 0)
+ return null
+ else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<='))
+ return null
+ }
+
+ // will iterate one or zero times
+ for (const eq of eqSet) {
+ if (gt && !satisfies(eq, String(gt), options))
+ return null
+
+ if (lt && !satisfies(eq, String(lt), options))
+ return null
+
+ for (const c of dom) {
+ if (!satisfies(eq, String(c), options))
+ return false
}
+ return true
}
+
+ let higher, lower
+ let hasDomLT, hasDomGT
+ for (const c of dom) {
+ hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>='
+ hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<='
+ if (gt) {
+ if (c.operator === '>' || c.operator === '>=') {
+ higher = higherGT(gt, c, options)
+ if (higher === c)
+ return false
+ } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options))
+ return false
+ }
+ if (lt) {
+ if (c.operator === '<' || c.operator === '<=') {
+ lower = lowerLT(lt, c, options)
+ if (lower === c)
+ return false
+ } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options))
+ return false
+ }
+ if (!c.operator && (lt || gt) && gtltComp !== 0)
+ return false
+ }
+
+ // if there was a < or >, and nothing in the dom, then must be false
+ // UNLESS it was limited by another range in the other direction.
+ // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0
+ if (gt && hasDomLT && !lt && gtltComp !== 0)
+ return false
+
+ if (lt && hasDomGT && !gt && gtltComp !== 0)
+ return false
+
+ return true
}
-function monkeypatch () {
- fs.realpath = realpath
- fs.realpathSync = realpathSync
+// >=1.2.3 is lower than >1.2.3
+const higherGT = (a, b, options) => {
+ if (!a)
+ return b
+ const comp = compare(a.semver, b.semver, options)
+ return comp > 0 ? a
+ : comp < 0 ? b
+ : b.operator === '>' && a.operator === '>=' ? b
+ : a
}
-function unmonkeypatch () {
- fs.realpath = origRealpath
- fs.realpathSync = origRealpathSync
+// <=1.2.3 is higher than <1.2.3
+const lowerLT = (a, b, options) => {
+ if (!a)
+ return b
+ const comp = compare(a.semver, b.semver, options)
+ return comp < 0 ? a
+ : comp > 0 ? b
+ : b.operator === '<' && a.operator === '<=' ? b
+ : a
}
+module.exports = subset
+
/***/ }),
@@ -14816,6 +16060,59 @@ function wrappy (fn, cb) {
}
+/***/ }),
+
+/***/ 942:
+/***/ (function(module) {
+
+
+
+class ResizeableBuffer{
+ constructor(size=100){
+ this.size = size
+ this.length = 0
+ this.buf = Buffer.alloc(size)
+ }
+ prepend(val){
+ const length = this.length++
+ if(length === this.size){
+ this.resize()
+ }
+ const buf = this.clone()
+ this.buf[0] = val
+ buf.copy(this.buf,1, 0, length)
+ }
+ append(val){
+ const length = this.length++
+ if(length === this.size){
+ this.resize()
+ }
+ this.buf[length] = val
+ }
+ clone(){
+ return Buffer.from(this.buf.slice(0, this.length))
+ }
+ resize(){
+ const length = this.length
+ this.size = this.size * 2
+ const buf = Buffer.alloc(this.size)
+ this.buf.copy(buf,0, 0, length)
+ this.buf = buf
+ }
+ toString(){
+ return this.buf.slice(0, this.length).toString()
+ }
+ toJSON(){
+ return this.toString()
+ }
+ reset(){
+ this.length = 0
+ }
+}
+
+module.exports = ResizeableBuffer
+
+
/***/ }),
/***/ 957:
@@ -14864,7 +16161,7 @@ function wrappy (fn, cb) {
module.exports = glob
var fs = __webpack_require__(747)
-var rp = __webpack_require__(863)
+var rp = __webpack_require__(290)
var minimatch = __webpack_require__(973)
var Minimatch = minimatch.Minimatch
var inherits = __webpack_require__(124)
diff --git a/jest.config.js b/jest.config.js
index 1673d42..70177b3 100644
--- a/jest.config.js
+++ b/jest.config.js
@@ -1,5 +1,5 @@
module.exports = {
- clearMocks: true,
+ clearMocks: false,
moduleFileExtensions: ['js', 'ts'],
setupFiles: ["dotenv/config"],
testEnvironment: 'node',
diff --git a/package.json b/package.json
index 9851f37..d6b6f51 100644
--- a/package.json
+++ b/package.json
@@ -31,10 +31,12 @@
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.4",
"@actions/github": "^4.0.0",
+ "csv-parse": "^4.12.0",
"semver": "^7.3.2",
"tmp": "^0.2.1"
},
"devDependencies": {
+ "@types/csv-parse": "^1.2.2",
"@types/jest": "^26.0.3",
"@types/node": "^14.0.14",
"@types/tmp": "^0.2.0",
diff --git a/src/buildx.ts b/src/buildx.ts
index 34da1c0..32f5fc3 100644
--- a/src/buildx.ts
+++ b/src/buildx.ts
@@ -1,12 +1,12 @@
import fs from 'fs';
import path from 'path';
-import tmp from 'tmp';
+import csvparse from 'csv-parse/lib/sync';
import * as semver from 'semver';
import * as context from './context';
import * as exec from './exec';
export async function getImageIDFile(): Promise {
- return path.join(context.tmpDir, 'iidfile');
+ return path.join(context.tmpDir(), 'iidfile').split(path.sep).join(path.posix.sep);
}
export async function getImageID(): Promise {
@@ -19,13 +19,43 @@ export async function getImageID(): Promise {
export async function getSecret(kvp: string): Promise {
const [key, value] = kvp.split('=');
- const secretFile = tmp.tmpNameSync({
- tmpdir: context.tmpDir
+ const secretFile = context.tmpNameSync({
+ tmpdir: context.tmpDir()
});
await fs.writeFileSync(secretFile, value);
return `id=${key},src=${secretFile}`;
}
+export function isLocalOrTarExporter(outputs: string[]): Boolean {
+ for (let output of csvparse(outputs.join(`\n`), {
+ delimiter: ',',
+ trim: true,
+ columns: false,
+ relax_column_count: true
+ })) {
+ // Local if no type is defined
+ // https://github.com/docker/buildx/blob/d2bf42f8b4784d83fde17acb3ed84703ddc2156b/build/output.go#L29-L43
+ if (output.length == 1 && !output[0].startsWith('type=')) {
+ return true;
+ }
+ for (let [key, value] of output.map(chunk => chunk.split('=').map(item => item.trim()))) {
+ if (key == 'type' && (value == 'local' || value == 'tar')) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+export function hasGitAuthToken(secrets: string[]): Boolean {
+ for (let secret of secrets) {
+ if (secret.startsWith('GIT_AUTH_TOKEN=')) {
+ return true;
+ }
+ }
+ return false;
+}
+
export async function isAvailable(): Promise {
return await exec.exec(`docker`, ['buildx'], true).then(res => {
if (res.stderr != '' && !res.success) {
diff --git a/src/context.ts b/src/context.ts
index b090ff4..94dcdae 100644
--- a/src/context.ts
+++ b/src/context.ts
@@ -2,14 +2,12 @@ import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as semver from 'semver';
+import * as tmp from 'tmp';
import * as buildx from './buildx';
import * as core from '@actions/core';
import * as github from '@actions/github';
-export const tmpDir: string = fs.mkdtempSync(path.join(os.tmpdir(), 'docker-build-push-'));
-const defaultContext: string = `https://github.com/${github.context.repo.owner}/${
- github.context.repo.repo
-}.git#${github.context.ref.replace(/^refs\//, '')}`;
+let _defaultContext, _tmpDir: string;
export interface Inputs {
context: string;
@@ -32,12 +30,32 @@ export interface Inputs {
githubToken: string;
}
-export async function getInputs(): Promise {
+export function defaultContext(): string {
+ if (!_defaultContext) {
+ _defaultContext = `https://github.com/${github.context.repo.owner}/${
+ github.context.repo.repo
+ }.git#${github.context?.ref?.replace(/^refs\//, '')}`;
+ }
+ return _defaultContext;
+}
+
+export function tmpDir(): string {
+ if (!_tmpDir) {
+ _tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'docker-build-push-')).split(path.sep).join(path.posix.sep);
+ }
+ return _tmpDir;
+}
+
+export function tmpNameSync(options?: tmp.TmpNameOptions): string {
+ return tmp.tmpNameSync(options);
+}
+
+export async function getInputs(defaultContext: string): Promise {
return {
context: core.getInput('context') || defaultContext,
file: core.getInput('file') || 'Dockerfile',
- buildArgs: await getInputList('build-args'),
- labels: await getInputList('labels'),
+ buildArgs: await getInputList('build-args', true),
+ labels: await getInputList('labels', true),
tags: await getInputList('tags'),
pull: /true/i.test(core.getInput('pull')),
target: core.getInput('target'),
@@ -55,15 +73,15 @@ export async function getInputs(): Promise {
};
}
-export async function getArgs(inputs: Inputs, buildxVersion: string): Promise> {
+export async function getArgs(inputs: Inputs, defaultContext: string, buildxVersion: string): Promise> {
let args: Array = ['buildx'];
- args.push.apply(args, await getBuildArgs(inputs, buildxVersion));
+ args.push.apply(args, await getBuildArgs(inputs, defaultContext, buildxVersion));
args.push.apply(args, await getCommonArgs(inputs));
args.push(inputs.context);
return args;
}
-async function getBuildArgs(inputs: Inputs, buildxVersion: string): Promise> {
+async function getBuildArgs(inputs: Inputs, defaultContext: string, buildxVersion: string): Promise> {
let args: Array = ['build'];
await asyncForEach(inputs.buildArgs, async buildArg => {
args.push('--build-arg', buildArg);
@@ -83,26 +101,25 @@ async function getBuildArgs(inputs: Inputs, buildxVersion: string): Promise 0) {
args.push('--platform', inputs.platforms.join(','));
}
- if (inputs.platforms.length == 0 || semver.satisfies(buildxVersion, '>=0.4.2')) {
- args.push('--iidfile', await buildx.getImageIDFile());
- }
await asyncForEach(inputs.outputs, async output => {
args.push('--output', output);
});
+ if (
+ !buildx.isLocalOrTarExporter(inputs.outputs) &&
+ (inputs.platforms.length == 0 || semver.satisfies(buildxVersion, '>=0.4.2'))
+ ) {
+ args.push('--iidfile', await buildx.getImageIDFile());
+ }
await asyncForEach(inputs.cacheFrom, async cacheFrom => {
args.push('--cache-from', cacheFrom);
});
await asyncForEach(inputs.cacheTo, async cacheTo => {
args.push('--cache-to', cacheTo);
});
- let hasGitAuthToken: boolean = false;
await asyncForEach(inputs.secrets, async secret => {
- if (secret.startsWith('GIT_AUTH_TOKEN=')) {
- hasGitAuthToken = true;
- }
args.push('--secret', await buildx.getSecret(secret));
});
- if (inputs.githubToken && !hasGitAuthToken && inputs.context == defaultContext) {
+ if (inputs.githubToken && !buildx.hasGitAuthToken(inputs.secrets) && inputs.context == defaultContext) {
args.push('--secret', await buildx.getSecret(`GIT_AUTH_TOKEN=${inputs.githubToken}`));
}
if (inputs.file) {
diff --git a/src/exec.ts b/src/exec.ts
index 3d0c4ce..00257e1 100644
--- a/src/exec.ts
+++ b/src/exec.ts
@@ -7,7 +7,7 @@ export interface ExecResult {
stderr: string;
}
-export const exec = async (command: string, args: string[] = [], silent: boolean): Promise => {
+export const exec = async (command: string, args: string[] = [], silent?: boolean): Promise => {
let stdout: string = '';
let stderr: string = '';
diff --git a/src/main.ts b/src/main.ts
index 0b51543..4a0fae7 100644
--- a/src/main.ts
+++ b/src/main.ts
@@ -2,31 +2,34 @@ import * as fs from 'fs';
import * as os from 'os';
import * as buildx from './buildx';
import * as context from './context';
+import * as exec from './exec';
import * as stateHelper from './state-helper';
import * as core from '@actions/core';
-import * as exec from '@actions/exec';
async function run(): Promise {
try {
if (os.platform() !== 'linux') {
- core.setFailed('Only supported on linux platform');
- return;
+ throw new Error(`Only supported on linux platform`);
}
if (!(await buildx.isAvailable())) {
- core.setFailed(`Buildx is required. See https://github.com/docker/setup-buildx-action to set up buildx.`);
- return;
+ throw new Error(`Buildx is required. See https://github.com/docker/setup-buildx-action to set up buildx.`);
}
- stateHelper.setTmpDir(context.tmpDir);
+ stateHelper.setTmpDir(context.tmpDir());
const buildxVersion = await buildx.getVersion();
core.info(`📣 Buildx version: ${buildxVersion}`);
- let inputs: context.Inputs = await context.getInputs();
+ const defContext = context.defaultContext();
+ let inputs: context.Inputs = await context.getInputs(defContext);
core.info(`🏃 Starting build...`);
- const args: string[] = await context.getArgs(inputs, buildxVersion);
- await exec.exec('docker', args);
+ const args: string[] = await context.getArgs(inputs, defContext, buildxVersion);
+ await exec.exec('docker', args).then(res => {
+ if (res.stderr != '' && !res.success) {
+ throw new Error(`buildx call failed with: ${res.stderr.match(/(.*)\s*$/)![0]}`);
+ }
+ });
const imageID = await buildx.getImageID();
if (imageID) {
diff --git a/test/Dockerfile-multi-golang b/test/Dockerfile-multi-golang
deleted file mode 100644
index c673b57..0000000
--- a/test/Dockerfile-multi-golang
+++ /dev/null
@@ -1,30 +0,0 @@
-FROM --platform=${BUILDPLATFORM:-linux/amd64} tonistiigi/xx:golang AS xgo
-FROM --platform=${BUILDPLATFORM:-linux/amd64} golang:1.13-alpine AS builder
-
-ENV CGO_ENABLED 0
-ENV GO111MODULE on
-ENV GOPROXY https://goproxy.io
-COPY --from=xgo / /
-
-ARG TARGETPLATFORM
-RUN go env
-
-RUN apk --update --no-cache add \
- build-base \
- gcc \
- git \
- && rm -rf /tmp/* /var/cache/apk/*
-
-WORKDIR /app
-
-ENV DIUN_VERSION="v4.4.0"
-
-RUN git clone --branch ${DIUN_VERSION} https://github.com/crazy-max/diun .
-RUN go mod download
-RUN go build -ldflags "-w -s -X 'main.version=test'" -v -o diun cmd/main.go
-
-FROM --platform=${TARGETPLATFORM:-linux/amd64} alpine:latest
-
-COPY --from=builder /app/diun /usr/local/bin/diun
-COPY --from=builder /usr/local/go/lib/time/zoneinfo.zip /usr/local/go/lib/time/zoneinfo.zip
-RUN diun --version
diff --git a/yarn.lock b/yarn.lock
index 89de20e..b4fce76 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -636,6 +636,13 @@
resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0"
integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==
+"@types/csv-parse@^1.2.2":
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/@types/csv-parse/-/csv-parse-1.2.2.tgz#713486235759d615dc8e6a6a979170ada76701d5"
+ integrity sha512-k33tLtRKTQxf7hQfMlkWoS2TQYsnpk1ibZN+rzbuCkeBs8m23nHTeDTF1wb/e7/MSLdtgCzqu3oM1I101kd6yw==
+ dependencies:
+ csv-parse "*"
+
"@types/graceful-fs@^4.1.2":
version "4.1.3"
resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.3.tgz#039af35fe26bec35003e8d86d2ee9c586354348f"
@@ -1229,6 +1236,11 @@ cssstyle@^2.2.0:
dependencies:
cssom "~0.3.6"
+csv-parse@*, csv-parse@^4.12.0:
+ version "4.12.0"
+ resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-4.12.0.tgz#fd42d6291bbaadd51d3009f6cadbb3e53b4ce026"
+ integrity sha512-wPQl3H79vWLPI8cgKFcQXl0NBgYYEqVnT1i6/So7OjMpsI540oD7p93r3w6fDSyPvwkTepG05F69/7AViX2lXg==
+
dashdash@^1.12.0:
version "1.14.1"
resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0"