1
0
Fork 0
mirror of https://github.com/docker/build-push-action.git synced 2025-03-30 19:50:07 +02:00

Merge branch 'master' into AddDockerfilePathToImageLabel

This commit is contained in:
Jyotsna 2020-11-27 13:54:17 +05:30 committed by GitHub
commit 021466127d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 698 additions and 183 deletions

View file

@ -15,7 +15,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
with:
path: action
-
@ -41,7 +41,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
with:
path: action
-
@ -95,7 +95,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
with:
path: action
-
@ -121,6 +121,14 @@ jobs:
localhost:5000/name/app:1.0.0
secrets: |
GIT_AUTH_TOKEN=${{ github.token }}
"MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc"
FOO=bar
"EMPTYLINE=aaaa
bbbb
ccc"
-
name: Inspect
run: |
@ -156,7 +164,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
@ -203,7 +211,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
@ -250,7 +258,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Build
id: docker_build
@ -282,7 +290,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Build
uses: ./
@ -319,7 +327,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
@ -372,7 +380,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
@ -470,7 +478,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
@ -538,7 +546,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1

83
.github/workflows/e2e.yml vendored Normal file
View file

@ -0,0 +1,83 @@
name: e2e
on:
workflow_dispatch:
schedule:
- cron: '0 10 * * *' # everyday at 10am
push:
branches:
- master
tags:
- v*
jobs:
docker:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
-
registry: ''
slug: ghactionstest/ghactionstest
username_secret: DOCKERHUB_USERNAME
password_secret: DOCKERHUB_TOKEN
-
registry: ghcr.io
slug: ghcr.io/docker-ghactiontest/test
username_secret: GHCR_USERNAME
password_secret: GHCR_PAT
-
registry: registry.gitlab.com
slug: registry.gitlab.com/test1716/test
username_secret: GITLAB_USERNAME
password_secret: GITLAB_TOKEN
steps:
-
name: Checkout
uses: actions/checkout@v2
-
name: Docker meta
id: docker_meta
uses: crazy-max/ghaction-docker-meta@v1
with:
images: ${{ matrix.slug }}
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
registry: ${{ matrix.registry }}
username: ${{ secrets[matrix.username_secret] }}
password: ${{ secrets[matrix.password_secret] }}
-
name: Build and push
uses: ./
with:
context: ./test
file: ./test/Dockerfile-multi
platforms: linux/386,linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64,linux/ppc64le,linux/s390x
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}
-
name: Inspect image
if: github.event_name != 'pull_request'
run: |
docker pull ${{ matrix.slug }}:${{ steps.docker_meta.outputs.version }}
docker image inspect ${{ matrix.slug }}:${{ steps.docker_meta.outputs.version }}
-
name: Check manifest
if: github.event_name != 'pull_request'
run: |
docker buildx imagetools inspect ${{ matrix.slug }}:${{ steps.docker_meta.outputs.version }}
-
name: Dump context
if: always()
uses: crazy-max/ghaction-dump-context@v1

View file

@ -25,7 +25,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Docker meta
id: docker_meta

View file

@ -14,7 +14,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Run Labeler
uses: crazy-max/ghaction-github-labeler@v3.1.0
uses: crazy-max/ghaction-github-labeler@v3

View file

@ -14,7 +14,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Validate
run: docker buildx bake validate
@ -27,7 +27,7 @@ jobs:
steps:
-
name: Checkout
uses: actions/checkout@v2.3.3
uses: actions/checkout@v2
-
name: Install
run: yarn install
@ -36,7 +36,7 @@ jobs:
run: yarn run test
-
name: Upload coverage
uses: codecov/codecov-action@v1.0.14
uses: codecov/codecov-action@v1
if: success()
with:
token: ${{ secrets.CODECOV_TOKEN }}

View file

@ -44,6 +44,8 @@ ___
* [Customizing](#customizing)
* [inputs](#inputs)
* [outputs](#outputs)
* [Notes](#notes)
* [Multi-line secret value](#multi-line-secret-value)
* [Troubleshooting](#troubleshooting)
* [Keep up-to-date with GitHub Dependabot](#keep-up-to-date-with-github-dependabot)
* [Limitation](#limitation)
@ -606,7 +608,7 @@ Following inputs can be used as `step.with` keys
|---------------------|----------|------------------------------------|
| `builder` | String | Builder instance (see [setup-buildx](https://github.com/docker/setup-buildx-action) action) |
| `context` | String | Build's context is the set of files located in the specified [`PATH` or `URL`](https://docs.docker.com/engine/reference/commandline/build/) (default [Git context](#git-context)) |
| `file` | String | Path to the Dockerfile (default `Dockerfile`) |
| `file` | String | Path to the Dockerfile (default `./Dockerfile`) |
| `build-args` | List | List of build-time variables |
| `labels` | List | List of metadata for an image |
| `tags` | List/CSV | List of tags |
@ -631,6 +633,36 @@ Following outputs are available
|---------------|---------|---------------------------------------|
| `digest` | String | Image content-addressable identifier also called a digest |
## Notes
### Multi-line secret value
To handle multi-line value for a secret, you will need to place the key-value pair between quotes:
```yaml
secrets: |
"MYSECRET=${{ secrets.GPG_KEY }}"
GIT_AUTH_TOKEN=abcdefghi,jklmno=0123456789
"MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc"
FOO=bar
"EMPTYLINE=aaaa
bbbb
ccc"
```
| Key | Value |
|--------------------|--------------------------------------------------|
| `MYSECRET` | `***********************` |
| `GIT_AUTH_TOKEN` | `abcdefghi,jklmno=0123456789` |
| `MYSECRET` | `aaaaaaaa\nbbbbbbb\nccccccccc` |
| `FOO` | `bar` |
| `EMPTYLINE` | `aaaa\n\nbbbb\nccc` |
> Note: all quote signs need to be doubled for escaping.
## Troubleshooting
See [TROUBLESHOOTING.md](TROUBLESHOOTING.md)

View file

@ -13,6 +13,7 @@
* Add [`outputs`](https://github.com/docker/buildx#-o---outputpath-typetypekeyvalue) input
* Add [`cache-from`](https://github.com/docker/buildx#--cache-fromnametypetypekeyvalue) input (`cache_froms` removed)
* Add [`cache-to`](https://github.com/docker/buildx#--cache-tonametypetypekeyvalue) input
* Rename `build_args` input to `build-args` for consistency with other Docker build tools
* Add `secrets` input
* Review `tags` input
* Remove `repository` input. See [Simple workflow](#simple-workflow) for migration
@ -139,7 +140,7 @@ steps:
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.prep.outputs.tags }}
labels: |
org.opencontainers.image.source=${{ github.event.repository.clone_url }}
org.opencontainers.image.source=${{ github.event.repository.html_url }}
org.opencontainers.image.created=${{ steps.prep.outputs.created }}
org.opencontainers.image.revision=${{ github.sha }}
```

View file

@ -1,9 +1,10 @@
import * as fs from 'fs';
import * as path from 'path';
import * as semver from 'semver';
import * as buildx from '../src/buildx';
import * as docker from '../src/docker';
import * as context from '../src/context';
import * as docker from '../src/docker';
const tmpNameSync = path.join('/tmp/.docker-build-push-jest', '.tmpname-jest').split(path.sep).join(path.posix.sep);
const digest = 'sha256:bfb45ab72e46908183546477a08f8867fc40cebadd00af54b071b097aed127a9';
@ -118,15 +119,23 @@ describe('parseVersion', () => {
describe('getSecret', () => {
test.each([
['A_SECRET', 'abcdef0123456789'],
['GIT_AUTH_TOKEN', 'abcdefghijklmno=0123456789'],
['MY_KEY', 'c3RyaW5nLXdpdGgtZXF1YWxzCg==']
])('given %p key and %p secret', async (key, secret) => {
const secretArgs = await buildx.getSecret(`${key}=${secret}`);
console.log(`secretArgs: ${secretArgs}`);
expect(secretArgs).toEqual(`id=${key},src=${tmpNameSync}`);
const secretContent = await fs.readFileSync(tmpNameSync, 'utf-8');
console.log(`secretValue: ${secretContent}`);
expect(secretContent).toEqual(secret);
['A_SECRET=abcdef0123456789', 'A_SECRET', 'abcdef0123456789', false],
['GIT_AUTH_TOKEN=abcdefghijklmno=0123456789', 'GIT_AUTH_TOKEN', 'abcdefghijklmno=0123456789', false],
['MY_KEY=c3RyaW5nLXdpdGgtZXF1YWxzCg==', 'MY_KEY', 'c3RyaW5nLXdpdGgtZXF1YWxzCg==', false],
['aaaaaaaa', '', '', true],
['aaaaaaaa=', '', '', true],
['=bbbbbbb', '', '', true]
])('given %p key and %p secret', async (kvp, key, secret, invalid) => {
try {
const secretArgs = await buildx.getSecret(kvp);
expect(true).toBe(!invalid);
console.log(`secretArgs: ${secretArgs}`);
expect(secretArgs).toEqual(`id=${key},src=${tmpNameSync}`);
const secretContent = await fs.readFileSync(tmpNameSync, 'utf-8');
console.log(`secretValue: ${secretContent}`);
expect(secretContent).toEqual(secret);
} catch (err) {
expect(true).toBe(invalid);
}
});
});

View file

@ -1,7 +1,115 @@
import * as fs from 'fs';
import * as path from 'path';
import * as context from '../src/context';
const pgp = `-----BEGIN PGP PRIVATE KEY BLOCK-----
lQdGBF6tzaABEACjFbX7PFEG6vDPN2MPyxYW7/3o/sonORj4HXUFjFxxJxktJ3x3
N1ayHPJ1lqIeoiY7jVbq0ZdEVGkd3YsKG9ZMdZkzGzY6PQPC/+M8OnzOiOPwUdWc
+Tdhh115LvVz0MMKYiab6Sn9cgxj9On3LCQKpjvMDpPo9Ttf6v2GQIw8h2ACvdzQ
71LtIELS/I+dLbfZiwpUu2fhQT13EJkEnYMOYwM5jNUd66P9itUc7MrOWjkicrKP
oF1dQaCM+tuKuxvD8WLdiwU5x60NoGkJHHUehKQXl2dVzjpqEqHKEBJt9tfJ9lpE
YIisgwB8o3pes0fgCehjW2zI95/o9+ayJ6nl4g5+mSvWRXEu66h71nwM0Yuvquk8
3me7qhYfDrDdCwcxS5BS1hwakTgUQLD99FZjbx1j8sq96I65O0GRdyU2PR8KIjwu
JrkTH4ZlKxK3FQghUhFoA5GkiDb+eClmRMSni5qg+81T4XChmUkEprA3eWCHL+Ma
xRNNxLS+r6hH9HG5JBxpV3iaTI9HHpnQKhEeaLXqsUTDZliN9hP7Ywo8bpUB8j2d
oWYwDV4dPyMKr6Fb8RDCh2q5gJGbVp8w/NmmBTeL+IP2fFggJkRfyumv3Ul7x66L
tBFQ4rYo4JUUrGweSTneG6REIgxH66hIrNl6Vo/D1ZyknTe1dMOu/BTkkQARAQAB
/gcDAqra8KO+h3bfyu90vxTL1ro4x/x9il7VBcWlIR4cBP7Imgxv+T4hwPIu8P1x
lOlxLNWegFOV0idoTy1o3VLLBev/F+IlspX4A+2XEIddR6nZnKFi0Lv2L4TKgE9E
VJJTszmviDIRLMLN9dWzDfA8hj5tR5Inot92CHRF414AS22JHvlhbFSLQnjqsN+C
n1cQpNOJhkxsSfZsxjnFa/70y/u8v0o8mzyLZmk9HpzRHGzoz8IfpLp8OTqBR9u6
zzoKLy16zZO55OKbj7h8uVZvDUq9l8iDICpqWMdZqBJIl56MBexYKgYxh3YO/8v2
oXli+8Xuaq5QLiCN3yT7IbKoYzplnFfaJwFiMh7R1iPLXaYAZ0qdRijlbtseTK1m
oHNkwUbxVzjkh4LfE8UpmMwZn5ZjWni3230SoiXuKy0OHkGvwGvWWAL1mEuoYuUI
mFMcH5MnixP8oQYZKDj2IR/yEeOpdU6B/tr3Tk1NidLf7pUMqG7Ff1NU6dAUeBpa
9xahITMjHvrhgMISY4IYZep5cEnVw8lQTpUJtW/ePMzrFhu3sA7oNdj9joW/VMfz
H7MHwwavtICsYqoqV3lnjX4EC9dW6o8PTUg2u956dmtK7KAyUK/+w2aLNGT28ChN
jhRYHvHzB9Kw5asqI/lTM49eqslBqYQMTTjdBphkYuSZQzNMf291j/ZmoLhD1A1a
S8tUnNygKV4D1cJYgSXfzhFoU8ib/0SPo+KqQ+CzGS+wxXg6WNBA6wepTjpnVVx3
4JADP8IJcDC3P0iwAreWjSy15F1cvemFFB0SLNUkyZGzsxtKzbM1+8khl68+eazC
LzRj0rxfIF5znWjX1QFhKxCk6eF0IWDY0+b3DBkmChME9YDXJ3TthcqA7JgcX4JI
M4/wdqhgerJYOmj+i2Q0M+Bu02icOJYMwTMMsDVl7XGHkaCuRgZ54eZAUH7JFwUm
1Ct3tcaqiTMmz0ngHVqBTauzgqKDvzwdVqdfg05H364nJMay/3omR6GayIb5CwSo
xdNVwG3myPPradT9MP09mDr4ys2zcnQmCkvTVBF6cMZ1Eh6PQQ8CyQWv0zkaBnqj
JrM1hRpgW4ZlRosSIjCaaJjolN5QDcXBM9TbW9ww+ZYstazN2bV1ZQ7BEjlHQPa1
BhzMsvqkbETHsIpDNF52gZKn3Q9eIX05BeadzpHUb5/XOheIHVIdhSaTlgl/qQW5
hQgPGSzSV6KhXEY7aevTdvOgq++WiELkjfz2f2lQFesTjFoQWEvxVDUmLxHtEhaN
DOuh4H3mX5Opn3pLQmqWVhJTbFdx+g5qQd0NCW4mDaTFWTRLFLZQsSJxDSeg9xrY
gmaii8NhMZRwquADW+6iU6KfraBhngi7HRz4TfqPr9ma/KUY464cqim1fnwXejyx
jsb5YHR9R66i+F6P/ysF5w+QuVdDt1fnf9GLay0r6qxpA8ft2vGPcDs4806Huj+7
Aq5VeJaNkCuh3GR3xVnCFAz/7AtkO6xKuZm8B3q904UuMdSmkhWbaobIuF/B2B6S
eawIXQHEOplK3ic26d8Ckf4gbjeORfELcMAEi5nGXpTThCdmxQApCLxAYYnTfQT1
xhlDwT9xPEabo98mIwJJsAU5VsTDYW+qfo4qIx8gYoSKc9Xu3yVh3n+9k43Gcm5V
9lvK1slijf+TzODZt/jsmkF8mPjXyP5KOI+xQp/m4PxW3pp57YrYj/Rnwga+8DKX
jMsW7mLAAZ/e+PY6z/s3x1Krfk+Bb5Ph4mI0zjw5weQdtyEToRgveda0GEpvZSBU
ZXN0ZXIgPGpvZUBmb28uYmFyPokCNgQQAQgAIAUCXq3NoAYLCQcIAwIEFQgKAgQW
AgEAAhkBAhsDAh4BAAoJEH2FHrctc72gxtQP/AulaClIcn/kDt43mhYnyLglPfbo
AqPlU26chXolBg0Wo0frFY3aIs5SrcWEf8aR4XLwCFGyi3vya0CUxjghN5tZBYqo
vswbT00zP3ohxxlJFCRRR9bc7OZXCgTddtfVf6EKrUAzIkbWyAhaJnwJy/1UGpSw
SEO/KpastrVKf3sv1wqOeFQ4DFyjaNda+xv3dVWS8db7KogqJiPFZXrQK3FKVIxS
fxRSmKaYN7//d+xwVAEY++RrnL/o8B2kV6N68cCpQWJELyYnJzis9LBcWd/3wiYh
efTyY+ePKUjcB+kEZnyJfLc7C2hll2e7UJ0fxv+k8vHReRhrNWmGRXsjNRxiw3U0
hfvxD/C8nyqAbeTHp4XDX78Tc3XCysAqIYboIL+RyewDMjjLj5vzUYAdUdtyNaD7
C6M2R6pN1GAt52CJmC/Z6F7W7GFGoYOdEkVdMQDsjCwScyEUNlGj9Zagw5M2EgSe
6gaHgMgTzsMzCc4W6WV5RcS55cfDNOXtxPsMJTt4FmXrjl11prBzpMfpU5a9zxDZ
oi54ZZ8VPE6jsT4Lzw3sni3c83wm28ArM20AzZ1vh7fk3Sfd0u4Yaz7s9JlEm5+D
34tEyli28+QjCQc18EfQUiJqiYEJRxJXJ3esvMHfYi45pV/Eh5DgRW1305fUJV/6
+rGpg0NejsHoZdZPnQdGBF6tzaABEAC4mVXTkVk6Kdfa4r5zlzsoIrR27laUlMkb
OBMt+aokqS+BEbmTnMg6xIAmcUT5uvGAc8S/WhrPoYfc15fTUyHIz8ZbDoAg0LO6
0Io4VkAvNJNEnsSV9VdLBh/XYlc4K49JqKyWTL4/FJFAGbsmHY3b+QU90AS6FYRv
KeBAoiyebrjx0vmzb8E8h3xthVLN+AfMlR1ickY62zvnpkbncSMY/skur1D2KfbF
3sFprty2pEtjFcyB5+18l2IyyHGOlEUw1PZdOAV4/Myh1EZRgYBPs80lYTJALCVF
IdOakH33WJCImtNZB0AbDTABG+JtMjQGscOa0qzf1Y/7tlhgCrynBBdaIJTx95TD
21BUHcHOu5yTIS6Ulysxfkv611+BiOKHgdq7DVGP78VuzA7bCjlP1+vHqIt3cnIa
t2tEyuZ/XF4uc3/i4g0uP9r7AmtET7Z6SKECWjpVv+UEgLx5Cv+ql+LSKYQMvU9a
i3B1F9fatn3FSLVYrL4aRxu4TSw9POb0/lgDNmN3lGQOsjGCZPibkHjgPEVxKuiq
9Oi38/VTQ0ZKAmHwBTq1WTZIrPrCW0/YMQ6yIJZulwQ9Yx1cgzYzEfg04fPXlXMi
vkvNpKbYIICzqj0/DVztz9wgpW6mnd0A2VX2dqbMM0fJUCHA6pj8AvXY4R+9Q4rj
eWRK9ycInQARAQAB/gcDApjt7biRO0PEyrrAiUwDMsJL4/CVMu11qUWEPjKe2Grh
ZTW3N+m3neKPRULu+LUtndUcEdVWUCoDzAJ7MwihZtV5vKST/5Scd2inonOaJqoA
nS3wnEMN/Sc93HAZiZnFx3NKjQVNCwbuEs45mXkkcjLm2iadrTL8fL4acsu5IsvD
LbDwVOPeNnHKl6Hr20e39fK0FuJEyH49JM6U3B1/8385sJB8+E24+hvSF81aMddh
Ne4Bc3ZYiYaKxe1quPNKC0CQhAZiT7LsMfkInXr0hY1I+kISNXEJ1dPYOEWiv0Ze
jD5Pupn34okKNEeBCx+dK8BmUCi6Jgs7McUA7hN0D/YUS++5fuR55UQq2j8Ui0tS
P8GDr86upH3PgEL0STh9fYfJ7TesxurwonWjlmmT62Myl4Pr+RmpS6PXOnhtcADm
eGLpzhTveFj4JBLMpyYHgBTqcs12zfprATOpsI/89kmQoGCZpG6+AbfSHqNNPdy2
eqUCBhOZlIIda1z/cexmU3f/gBqyflFf8fkvmlO4AvI8aMH3OpgHdWnzh+AB51xj
kmdD/oWel9v7Dz4HoZUfwFaLZ0fE3P9voD8e+sCwqQwVqRY4L/BOYPD5noVOKgOj
ABNKu5uKrobj6rFUi6DTUCjFGcmoF1Sc06xFNaagUNggRbmlC/dz22RWdDUYv5ra
N6TxIDkGC0cK6ujyK0nes3DN0aHjgwWuMXDYkN3UckiebI4Cv/eF9jvUKOSiIcy1
RtxdazZS4dYg2LBMeJKVkPi5elsNyw2812nEY3du/nEkQYXfYgWOF27OR+g4Y9Yw
1BiqJ1TTjbQnd/khOCrrbzDH1mw00+1XVsT6wjObuYqqxPPS87UrqmMf6OdoYfPm
zEOnNLBnsJ5VQM3A3pcT40RfdBrZRO8LjGhzKTreyq3C+jz0RLa5HNE8GgOhGyck
ME4h+RhXlE8KGM+tTo6PA1NJSrEt+8kZzxjP4rIEn0aVthCkNXK12inuXtnHm0ao
iLUlQOsfPFEnzl0TUPd7+z7j/wB+XiKU/AyEUuB0mvdxdKtqXvajahOyhLjzHQhz
ZnNlgANGtiqcSoJmkJ8yAvhrtQX51fQLftxbArRW1RYk/5l+Gy3azR+gUC17M6JN
jrUYxn0zlAxDGFH7gACHUONwVekcuEffHzgu2lk7MyO1Y+lPnwabqjG0eWWHuU00
hskJlXyhj7DeR12bwjYkyyjG62GvOH02g3OMvUgNGH+K321Dz539csCh/xwtg7Wt
U3YAphU7htQ1dPDfk1IRs7DQo2L+ZTE57vmL5m0l6fTataEWBPUXkygfQFUJOM6Q
yY76UEZww1OSDujNeY171NSTzXCVkUeAdAMXgjaHXWLK2QUQUoXbYX/Kr7Vvt9Fu
Jh6eGjjp7dSjQ9+DW8CAB8vxd93gsQQGWYjmGu8khkEmx6OdZhmSbDbe915LQTb9
sPhk2s5/Szsvr5W2JJ2321JI6KXBJMZvPC5jEBWmRzOYkRd2vloft+CSMfXF+Zfd
nYtc6R3dvb9vcjo+a9wFtfcoDsO0MaPSM+9GB25MamdatmGX6iLOy9Re1UABwUi/
VhTWNkP5uzqx0sDwHEIa2rYOwxpIZDwwjM3oOASCW1DDBQ0BI9KNjfIeL3ubx2mS
2x8hFU9qSK4umoDNbzOqGPSlkdbiPcNjF2ZcSN1qQZiYdwLL5dw6APNyBVjxTN1J
gkCdJ/HwAY+r93Lbl5g8gz8d0vJEyfn//34sn9u+toSTw55GcG9Ks1kSKIeDNh0h
MiPm3HmJAh8EGAEIAAkFAl6tzaACGwwACgkQfYUety1zvaBV9hAAgliX36pXJ59g
3I9/4R68e/fGg0FMM6D+01yCeiKApOYRrJ0cYKn7ITDYmHhlGGpBAie90UsqX12h
hdLP7LoQx7sjTyzQt6JmpA8krIwi2ON7FKBkdYb8IYx4mE/5vKnYT4/SFnwTmnZY
+m+NzK2U/qmhq8JyO8gozdAKJUcgz49IVv2Ij0tQ4qaPbyPwQxIDyKnT758nJhB1
jTqo+oWtER8q3okzIlqcArqn5rDaNJx+DRYL4E/IddyHQAiUWUka8usIUqeW5reu
zoPUE2CCfOJSGArkqHQQqMx0WEzjQTwAPaHrQbera4SbiV/o4CLCV/u5p1Qnig+Q
iUsakmlD299t//125LIQEa5qzd9hRC7u1uJS7VdW8eGIEcZ0/XT/sr+z23z0kpZH
D3dXPX0BwM4IP9xu31CNg10x0rKwjbxy8VaskFEelpqpu+gpAnxqMd1evpeUHcOd
r5RgPgkNFfba9Nbxf7uEX+HOmsOM+kdtSmdGIvsBZjVnW31nnoDMp49jG4OynjrH
cRuoM9sxdr6UDqb22CZ3/e0YN4UaZM3YDWMVaP/QBVgvIFcdByqNWezpd9T4ZUII
MZlaV1uRnHg6B/zTzhIdMM80AXz6Uv6kw4S+Lt7HlbrnMT7uKLuvzH7cle0hcIUa
PejgXO0uIRolYQ3sz2tMGhx1MfBqH64=
=WbwB
-----END PGP PRIVATE KEY BLOCK-----`;
jest.spyOn(context, 'defaultContext').mockImplementation((): string => {
return 'https://github.com/docker/build-push-action.git#test-jest';
});
@ -162,7 +270,15 @@ describe('getArgs', () => {
['context', 'https://github.com/docker/build-push-action.git#heads/master'],
['tag', 'localhost:5000/name/app:latest'],
['platforms', 'linux/amd64,linux/arm64'],
['secrets', 'GIT_AUTH_TOKEN=abcdefghijklmno=0123456789'],
['secrets', `GIT_AUTH_TOKEN=abcdefghi,jklmno=0123456789
"MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc"
FOO=bar
"EMPTYLINE=aaaa
bbbb
ccc"`],
['file', './test/Dockerfile'],
['builder', 'builder-git-context-2'],
['push', 'true'],
@ -175,6 +291,9 @@ describe('getArgs', () => {
'--platform', 'linux/amd64,linux/arm64',
'--iidfile', '/tmp/.docker-build-push-jest/iidfile',
'--secret', 'id=GIT_AUTH_TOKEN,src=/tmp/.docker-build-push-jest/.tmpname-jest',
'--secret', 'id=MYSECRET,src=/tmp/.docker-build-push-jest/.tmpname-jest',
'--secret', 'id=FOO,src=/tmp/.docker-build-push-jest/.tmpname-jest',
'--secret', 'id=EMPTYLINE,src=/tmp/.docker-build-push-jest/.tmpname-
'--file', './test/Dockerfile',
'--builder', 'builder-git-context-2',
'--push',
@ -221,7 +340,7 @@ describe('getArgs', () => {
['context', '.'],
['load', 'true'],
['trace-data', 'true']
]),
]),
[
'buildx',
'build',
@ -230,6 +349,41 @@ describe('getArgs', () => {
'--file', 'Dockerfile',
'--load',
'.'
]
]
,
[
'0.4.2',
new Map<string, string>([
['context', 'https://github.com/docker/build-push-action.git#heads/master'],
['tag', 'localhost:5000/name/app:latest'],
['platforms', 'linux/amd64,linux/arm64'],
['secrets', `GIT_AUTH_TOKEN=abcdefghi,jklmno=0123456789
MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc
FOO=bar
EMPTYLINE=aaaa
bbbb
ccc`],
['file', './test/Dockerfile'],
['builder', 'builder-git-context-2'],
['push', 'true']
]),
[
'buildx',
'build',
'--platform', 'linux/amd64,linux/arm64',
'--iidfile', '/tmp/.docker-build-push-jest/iidfile',
'--secret', 'id=GIT_AUTH_TOKEN,src=/tmp/.docker-build-push-jest/.tmpname-jest',
'--secret', 'id=MYSECRET,src=/tmp/.docker-build-push-jest/.tmpname-jest',
'--secret', 'id=FOO,src=/tmp/.docker-build-push-jest/.tmpname-jest',
'--secret', 'id=EMPTYLINE,src=/tmp/.docker-build-push-jest/.tmpname-jest',
'--file', './test/Dockerfile',
'--builder', 'builder-git-context-2',
'--push',
'https://github.com/docker/build-push-action.git#heads/master'
]
]
])(
@ -249,68 +403,167 @@ describe('getArgs', () => {
});
describe('getInputList', () => {
it('handles single line correctly', async () => {
it('single line correctly', async () => {
await setInput('foo', 'bar');
const res = await context.getInputList('foo');
console.log(res);
expect(res).toEqual(['bar']);
});
it('handles multiple lines correctly', async () => {
it('multiline correctly', async () => {
setInput('foo', 'bar\nbaz');
const res = await context.getInputList('foo');
console.log(res);
expect(res).toEqual(['bar', 'baz']);
});
it('remove empty lines correctly', async () => {
it('empty lines correctly', async () => {
setInput('foo', 'bar\n\nbaz');
const res = await context.getInputList('foo');
console.log(res);
expect(res).toEqual(['bar', 'baz']);
});
it('handles comma correctly', async () => {
it('comma correctly', async () => {
setInput('foo', 'bar,baz');
const res = await context.getInputList('foo');
console.log(res);
expect(res).toEqual(['bar', 'baz']);
});
it('remove empty result correctly', async () => {
it('empty result correctly', async () => {
setInput('foo', 'bar,baz,');
const res = await context.getInputList('foo');
console.log(res);
expect(res).toEqual(['bar', 'baz']);
});
it('handles different new lines correctly', async () => {
it('different new lines correctly', async () => {
setInput('foo', 'bar\r\nbaz');
const res = await context.getInputList('foo');
console.log(res);
expect(res).toEqual(['bar', 'baz']);
});
it('handles different new lines and comma correctly', async () => {
it('different new lines and comma correctly', async () => {
setInput('foo', 'bar\r\nbaz,bat');
const res = await context.getInputList('foo');
console.log(res);
expect(res).toEqual(['bar', 'baz', 'bat']);
});
it('handles multiple lines and ignoring comma correctly', async () => {
it('multiline and ignoring comma correctly', async () => {
setInput('cache-from', 'user/app:cache\ntype=local,src=path/to/dir');
const res = await context.getInputList('cache-from', true);
console.log(res);
expect(res).toEqual(['user/app:cache', 'type=local,src=path/to/dir']);
});
it('handles different new lines and ignoring comma correctly', async () => {
it('different new lines and ignoring comma correctly', async () => {
setInput('cache-from', 'user/app:cache\r\ntype=local,src=path/to/dir');
const res = await context.getInputList('cache-from', true);
console.log(res);
expect(res).toEqual(['user/app:cache', 'type=local,src=path/to/dir']);
});
it('multiline values', async () => {
setInput(
'secrets',
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
"MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc"
FOO=bar`
);
const res = await context.getInputList('secrets', true);
console.log(res);
expect(res).toEqual([
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
`MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc`,
'FOO=bar'
]);
});
it('multiline values with empty lines', async () => {
setInput(
'secrets',
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
"MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc"
FOO=bar
"EMPTYLINE=aaaa
bbbb
ccc"`
);
const res = await context.getInputList('secrets', true);
console.log(res);
expect(res).toEqual([
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
`MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc`,
'FOO=bar',
`EMPTYLINE=aaaa
bbbb
ccc`
]);
});
it('multiline values without quotes', async () => {
setInput(
'secrets',
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc
FOO=bar`
);
const res = await context.getInputList('secrets', true);
console.log(res);
expect(res).toEqual([
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
'MYSECRET=aaaaaaaa',
'bbbbbbb',
'ccccccccc',
'FOO=bar'
]);
});
it('large multiline values', async () => {
setInput(
'secrets',
`"GPG_KEY=${pgp}"
FOO=bar`
);
const res = await context.getInputList('secrets', true);
console.log(res);
expect(res).toEqual([`GPG_KEY=${pgp}`, 'FOO=bar']);
});
it('multiline values escape quotes', async () => {
setInput(
'secrets',
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
"MYSECRET=aaaaaaaa
bbbb""bbb
ccccccccc"
FOO=bar`
);
const res = await context.getInputList('secrets', true);
console.log(res);
expect(res).toEqual([
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
`MYSECRET=aaaaaaaa
bbbb\"bbb
ccccccccc`,
'FOO=bar'
]);
});
});
describe('asyncForEach', () => {

360
dist/index.js generated vendored
View file

@ -4224,9 +4224,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.parseVersion = exports.getVersion = exports.isAvailable = exports.hasGitAuthToken = exports.isLocalOrTarExporter = exports.getSecret = exports.getImageID = exports.getImageIDFile = void 0;
const sync_1 = __importDefault(__webpack_require__(750));
const fs_1 = __importDefault(__webpack_require__(747));
const path_1 = __importDefault(__webpack_require__(622));
const sync_1 = __importDefault(__webpack_require__(750));
const semver = __importStar(__webpack_require__(383));
const context = __importStar(__webpack_require__(842));
const exec = __importStar(__webpack_require__(757));
@ -4251,6 +4251,9 @@ function getSecret(kvp) {
const delimiterIndex = kvp.indexOf('=');
const key = kvp.substring(0, delimiterIndex);
const value = kvp.substring(delimiterIndex + 1);
if (key.length == 0 || value.length == 0) {
throw new Error(`${kvp} is not a valid secret`);
}
const secretFile = context.tmpNameSync({
tmpdir: context.tmpDir()
});
@ -4264,7 +4267,7 @@ function isLocalOrTarExporter(outputs) {
delimiter: ',',
trim: true,
columns: false,
relax_column_count: true
relaxColumnCount: true
})) {
// Local if no type is defined
// https://github.com/docker/buildx/blob/d2bf42f8b4784d83fde17acb3ed84703ddc2156b/build/output.go#L29-L43
@ -10817,16 +10820,44 @@ const nl = 10
const np = 12
const cr = 13
const space = 32
const bom_utf8 = Buffer.from([239, 187, 191])
const boms = {
// Note, the following are equals:
// Buffer.from("\ufeff")
// Buffer.from([239, 187, 191])
// Buffer.from('EFBBBF', 'hex')
'utf8': Buffer.from([239, 187, 191]),
// Note, the following are equals:
// Buffer.from "\ufeff", 'utf16le
// Buffer.from([255, 254])
'utf16le': Buffer.from([255, 254])
}
class Parser extends Transform {
constructor(opts = {}){
super({...{readableObjectMode: true}, ...opts})
super({...{readableObjectMode: true}, ...opts, encoding: null})
this.__originalOptions = opts
this.__normalizeOptions(opts)
}
__normalizeOptions(opts){
const options = {}
// Merge with user options
for(let opt in opts){
options[underscore(opt)] = opts[opt]
}
// Normalize option `encoding`
// Note: defined first because other options depends on it
// to convert chars/strings into buffers.
if(options.encoding === undefined || options.encoding === true){
options.encoding = 'utf8'
}else if(options.encoding === null || options.encoding === false){
options.encoding = null
}else if(typeof options.encoding !== 'string' && options.encoding !== null){
throw new CsvError('CSV_INVALID_OPTION_ENCODING', [
'Invalid option encoding:',
'encoding must be a string or null to return a buffer,',
`got ${JSON.stringify(options.encoding)}`
], options)
}
// Normalize option `bom`
if(options.bom === undefined || options.bom === null || options.bom === false){
options.bom = false
@ -10834,7 +10865,7 @@ class Parser extends Transform {
throw new CsvError('CSV_INVALID_OPTION_BOM', [
'Invalid option bom:', 'bom must be true,',
`got ${JSON.stringify(options.bom)}`
])
], options)
}
// Normalize option `cast`
let fnCastField = null
@ -10847,7 +10878,7 @@ class Parser extends Transform {
throw new CsvError('CSV_INVALID_OPTION_CAST', [
'Invalid option cast:', 'cast must be true or a function,',
`got ${JSON.stringify(options.cast)}`
])
], options)
}
// Normalize option `cast_date`
if(options.cast_date === undefined || options.cast_date === null || options.cast_date === false || options.cast_date === ''){
@ -10861,7 +10892,7 @@ class Parser extends Transform {
throw new CsvError('CSV_INVALID_OPTION_CAST_DATE', [
'Invalid option cast_date:', 'cast_date must be true or a function,',
`got ${JSON.stringify(options.cast_date)}`
])
], options)
}
// Normalize option `columns`
let fnFirstLineToHeaders = null
@ -10880,7 +10911,7 @@ class Parser extends Transform {
'Invalid option columns:',
'expect an object, a function or true,',
`got ${JSON.stringify(options.columns)}`
])
], options)
}
// Normalize option `columns_duplicates_to_array`
if(options.columns_duplicates_to_array === undefined || options.columns_duplicates_to_array === null || options.columns_duplicates_to_array === false){
@ -10890,21 +10921,21 @@ class Parser extends Transform {
'Invalid option columns_duplicates_to_array:',
'expect an boolean,',
`got ${JSON.stringify(options.columns_duplicates_to_array)}`
])
], options)
}
// Normalize option `comment`
if(options.comment === undefined || options.comment === null || options.comment === false || options.comment === ''){
options.comment = null
}else{
if(typeof options.comment === 'string'){
options.comment = Buffer.from(options.comment)
options.comment = Buffer.from(options.comment, options.encoding)
}
if(!Buffer.isBuffer(options.comment)){
throw new CsvError('CSV_INVALID_OPTION_COMMENT', [
'Invalid option comment:',
'comment must be a buffer or a string,',
`got ${JSON.stringify(options.comment)}`
])
], options)
}
}
// Normalize option `delimiter`
@ -10915,39 +10946,35 @@ class Parser extends Transform {
'Invalid option delimiter:',
'delimiter must be a non empty string or buffer or array of string|buffer,',
`got ${delimiter_json}`
])
], options)
}
options.delimiter = options.delimiter.map(function(delimiter){
if(delimiter === undefined || delimiter === null || delimiter === false){
return Buffer.from(',')
return Buffer.from(',', options.encoding)
}
if(typeof delimiter === 'string'){
delimiter = Buffer.from(delimiter)
delimiter = Buffer.from(delimiter, options.encoding)
}
if( !Buffer.isBuffer(delimiter) || delimiter.length === 0){
throw new CsvError('CSV_INVALID_OPTION_DELIMITER', [
'Invalid option delimiter:',
'delimiter must be a non empty string or buffer or array of string|buffer,',
`got ${delimiter_json}`
])
], options)
}
return delimiter
})
// Normalize option `escape`
if(options.escape === undefined || options.escape === true){
options.escape = Buffer.from('"')
options.escape = Buffer.from('"', options.encoding)
}else if(typeof options.escape === 'string'){
options.escape = Buffer.from(options.escape)
options.escape = Buffer.from(options.escape, options.encoding)
}else if (options.escape === null || options.escape === false){
options.escape = null
}
if(options.escape !== null){
if(!Buffer.isBuffer(options.escape)){
throw new Error(`Invalid Option: escape must be a buffer, a string or a boolean, got ${JSON.stringify(options.escape)}`)
}else if(options.escape.length !== 1){
throw new Error(`Invalid Option Length: escape must be one character, got ${options.escape.length}`)
}else{
options.escape = options.escape[0]
}
}
// Normalize option `from`
@ -11003,7 +11030,11 @@ class Parser extends Transform {
if(options.objname.length === 0){
throw new Error(`Invalid Option: objname must be a non empty buffer`)
}
options.objname = options.objname.toString()
if(options.encoding === null){
// Don't call `toString`, leave objname as a buffer
}else{
options.objname = options.objname.toString(options.encoding)
}
}else if(typeof options.objname === 'string'){
if(options.objname.length === 0){
throw new Error(`Invalid Option: objname must be a non empty string`)
@ -11020,23 +11051,19 @@ class Parser extends Transform {
'Invalid option `on_record`:',
'expect a function,',
`got ${JSON.stringify(options.on_record)}`
])
], options)
}
// Normalize option `quote`
if(options.quote === null || options.quote === false || options.quote === ''){
options.quote = null
}else{
if(options.quote === undefined || options.quote === true){
options.quote = Buffer.from('"')
options.quote = Buffer.from('"', options.encoding)
}else if(typeof options.quote === 'string'){
options.quote = Buffer.from(options.quote)
options.quote = Buffer.from(options.quote, options.encoding)
}
if(!Buffer.isBuffer(options.quote)){
throw new Error(`Invalid Option: quote must be a buffer or a string, got ${JSON.stringify(options.quote)}`)
}else if(options.quote.length !== 1){
throw new Error(`Invalid Option Length: quote must be one character, got ${options.quote.length}`)
}else{
options.quote = options.quote[0]
}
}
// Normalize option `raw`
@ -11053,7 +11080,7 @@ class Parser extends Transform {
}
options.record_delimiter = options.record_delimiter.map( function(rd){
if(typeof rd === 'string'){
rd = Buffer.from(rd)
rd = Buffer.from(rd, options.encoding)
}
return rd
})
@ -11182,13 +11209,24 @@ class Parser extends Transform {
bomSkipped: false,
castField: fnCastField,
commenting: false,
// Current error encountered by a record
error: undefined,
enabled: options.from_line === 1,
escaping: false,
escapeIsQuote: options.escape === options.quote,
// escapeIsQuote: options.escape === options.quote,
escapeIsQuote: Buffer.isBuffer(options.escape) && Buffer.isBuffer(options.quote) && Buffer.compare(options.escape, options.quote) === 0,
expectedRecordLength: options.columns === null ? 0 : options.columns.length,
field: new ResizeableBuffer(20),
firstLineToHeaders: fnFirstLineToHeaders,
info: Object.assign({}, this.info),
needMoreDataSize: Math.max(
// Skip if the remaining buffer smaller than comment
options.comment !== null ? options.comment.length : 0,
// Skip if the remaining buffer can be delimiter
...options.delimiter.map( (delimiter) => delimiter.length),
// Skip if the remaining buffer can be escape sequence
options.quote !== null ? options.quote.length : 0,
),
previousBuf: undefined,
quoting: false,
stop: false,
@ -11197,7 +11235,7 @@ class Parser extends Transform {
recordHasError: false,
record_length: 0,
recordDelimiterMaxLength: options.record_delimiter.length === 0 ? 2 : Math.max(...options.record_delimiter.map( (v) => v.length)),
trimChars: [Buffer.from(' ')[0], Buffer.from('\t')[0]],
trimChars: [Buffer.from(' ', options.encoding)[0], Buffer.from('\t', options.encoding)[0]],
wasQuoting: false,
wasRowDelimiter: false
}
@ -11251,11 +11289,15 @@ class Parser extends Transform {
this.state.previousBuf = buf
return
}
// skip BOM detect because data length < 3
}else{
if(bom_utf8.compare(buf, 0, 3) === 0){
// Skip BOM
buf = buf.slice(3)
for(let encoding in boms){
if(boms[encoding].compare(buf, 0, boms[encoding].length) === 0){
// Skip BOM
buf = buf.slice(boms[encoding].length)
// Renormalize original options with the new encoding
this.__normalizeOptions({...this.__originalOptions, encoding: encoding})
break
}
}
this.state.bomSkipped = true
}
@ -11301,35 +11343,37 @@ class Parser extends Transform {
}else{
// Escape is only active inside quoted fields
// We are quoting, the char is an escape chr and there is a chr to escape
if(escape !== null && this.state.quoting === true && chr === escape && pos + 1 < bufLen){
// if(escape !== null && this.state.quoting === true && chr === escape && pos + 1 < bufLen){
if(escape !== null && this.state.quoting === true && this.__isEscape(buf, pos, chr) && pos + escape.length < bufLen){
if(escapeIsQuote){
if(buf[pos+1] === quote){
if(this.__isQuote(buf, pos+escape.length)){
this.state.escaping = true
pos += escape.length - 1
continue
}
}else{
this.state.escaping = true
pos += escape.length - 1
continue
}
}
// Not currently escaping and chr is a quote
// TODO: need to compare bytes instead of single char
if(this.state.commenting === false && chr === quote){
if(this.state.commenting === false && this.__isQuote(buf, pos)){
if(this.state.quoting === true){
const nextChr = buf[pos+1]
const nextChr = buf[pos+quote.length]
const isNextChrTrimable = rtrim && this.__isCharTrimable(nextChr)
// const isNextChrComment = nextChr === comment
const isNextChrComment = comment !== null && this.__compareBytes(comment, buf, pos+1, nextChr)
const isNextChrDelimiter = this.__isDelimiter(nextChr, buf, pos+1)
const isNextChrRowDelimiter = record_delimiter.length === 0 ? this.__autoDiscoverRowDelimiter(buf, pos+1) : this.__isRecordDelimiter(nextChr, buf, pos+1)
const isNextChrComment = comment !== null && this.__compareBytes(comment, buf, pos+quote.length, nextChr)
const isNextChrDelimiter = this.__isDelimiter(buf, pos+quote.length, nextChr)
const isNextChrRowDelimiter = record_delimiter.length === 0 ? this.__autoDiscoverRowDelimiter(buf, pos+quote.length) : this.__isRecordDelimiter(nextChr, buf, pos+quote.length)
// Escape a quote
// Treat next char as a regular character
// TODO: need to compare bytes instead of single char
if(escape !== null && chr === escape && nextChr === quote){
pos++
if(escape !== null && this.__isEscape(buf, pos, chr) && this.__isQuote(buf, pos + escape.length)){
pos += escape.length - 1
}else if(!nextChr || isNextChrDelimiter || isNextChrRowDelimiter || isNextChrComment || isNextChrTrimable){
this.state.quoting = false
this.state.wasQuoting = true
pos += quote.length - 1
continue
}else if(relax === false){
const err = this.__error(
@ -11339,14 +11383,14 @@ class Parser extends Transform {
`at line ${this.info.lines}`,
'instead of delimiter, row delimiter, trimable character',
'(if activated) or comment',
], this.__context())
], this.options, this.__context())
)
if(err !== undefined) return err
}else{
this.state.quoting = false
this.state.wasQuoting = true
// continue
this.state.field.prepend(quote)
pos += quote.length - 1
}
}else{
if(this.state.field.length !== 0){
@ -11356,7 +11400,7 @@ class Parser extends Transform {
new CsvError('INVALID_OPENING_QUOTE', [
'Invalid Opening Quote:',
`a quote is found inside a field at line ${this.info.lines}`,
], this.__context(), {
], this.options, this.__context(), {
field: this.state.field,
})
)
@ -11364,6 +11408,7 @@ class Parser extends Transform {
}
}else{
this.state.quoting = true
pos += quote.length - 1
continue
}
}
@ -11414,7 +11459,7 @@ class Parser extends Transform {
this.state.commenting = true
continue
}
let delimiterLength = this.__isDelimiter(chr, buf, pos)
let delimiterLength = this.__isDelimiter(buf, pos, chr)
if(delimiterLength !== 0){
const errField = this.__onField()
if(errField !== undefined) return errField
@ -11431,7 +11476,7 @@ class Parser extends Transform {
'record exceed the maximum number of tolerated bytes',
`of ${max_record_size}`,
`at line ${this.info.lines}`,
], this.__context())
], this.options, this.__context())
)
if(err !== undefined) return err
}
@ -11448,7 +11493,7 @@ class Parser extends Transform {
'Invalid Closing Quote:',
'found non trimable byte after quote',
`at line ${this.info.lines}`,
], this.__context())
], this.options, this.__context())
)
if(err !== undefined) return err
}
@ -11460,7 +11505,7 @@ class Parser extends Transform {
new CsvError('CSV_QUOTE_NOT_CLOSED', [
'Quote Not Closed:',
`the parsing is finished with an opening quote at line ${this.info.lines}`,
], this.__context())
], this.options, this.__context())
)
if(err !== undefined) return err
}else{
@ -11489,7 +11534,7 @@ class Parser extends Transform {
return chr === space || chr === tab || chr === cr || chr === nl || chr === np
}
__onRow(){
const {columns, columns_duplicates_to_array, info, from, relax_column_count, relax_column_count_less, relax_column_count_more, raw, skip_lines_with_empty_values} = this.options
const {columns, columns_duplicates_to_array, encoding, info, from, relax_column_count, relax_column_count_less, relax_column_count_more, raw, skip_lines_with_empty_values} = this.options
const {enabled, record} = this.state
if(enabled === false){
return this.__resetRow()
@ -11507,35 +11552,38 @@ class Parser extends Transform {
this.state.expectedRecordLength = recordLength
}
if(recordLength !== this.state.expectedRecordLength){
const err = columns === false ?
this.__error(
// Todo: rename CSV_INCONSISTENT_RECORD_LENGTH to
// CSV_RECORD_INCONSISTENT_FIELDS_LENGTH
new CsvError('CSV_INCONSISTENT_RECORD_LENGTH', [
'Invalid Record Length:',
`expect ${this.state.expectedRecordLength},`,
`got ${recordLength} on line ${this.info.lines}`,
], this.options, this.__context(), {
record: record,
})
)
:
this.__error(
// Todo: rename CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH to
// CSV_RECORD_INCONSISTENT_COLUMNS
new CsvError('CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH', [
'Invalid Record Length:',
`columns length is ${columns.length},`, // rename columns
`got ${recordLength} on line ${this.info.lines}`,
], this.options, this.__context(), {
record: record,
})
)
if(relax_column_count === true ||
(relax_column_count_less === true && recordLength < this.state.expectedRecordLength) ||
(relax_column_count_more === true && recordLength > this.state.expectedRecordLength) ){
this.info.invalid_field_length++
}else{
if(columns === false){
const err = this.__error(
new CsvError('CSV_INCONSISTENT_RECORD_LENGTH', [
'Invalid Record Length:',
`expect ${this.state.expectedRecordLength},`,
`got ${recordLength} on line ${this.info.lines}`,
], this.__context(), {
record: record,
})
)
if(err !== undefined) return err
}else{
const err = this.__error(
// CSV_INVALID_RECORD_LENGTH_DONT_MATCH_COLUMNS
new CsvError('CSV_RECORD_DONT_MATCH_COLUMNS_LENGTH', [
'Invalid Record Length:',
`columns length is ${columns.length},`, // rename columns
`got ${recordLength} on line ${this.info.lines}`,
], this.__context(), {
record: record,
})
)
if(err !== undefined) return err
}
this.state.error = err
// Error is undefined with skip_lines_with_error
}else if(err !== undefined){
return err
}
}
if(skip_lines_with_empty_values === true){
@ -11556,7 +11604,6 @@ class Parser extends Transform {
// Transform record array to an object
for(let i = 0, l = record.length; i < l; i++){
if(columns[i] === undefined || columns[i].disabled) continue
// obj[columns[i].name] = record[i]
// Turn duplicate columns into an array
if (columns_duplicates_to_array === true && obj[columns[i].name]) {
if (Array.isArray(obj[columns[i].name])) {
@ -11573,7 +11620,7 @@ class Parser extends Transform {
if(raw === true || info === true){
const err = this.__push(Object.assign(
{record: obj},
(raw === true ? {raw: this.state.rawBuffer.toString()}: {}),
(raw === true ? {raw: this.state.rawBuffer.toString(encoding)}: {}),
(info === true ? {info: this.state.info}: {})
))
if(err){
@ -11589,7 +11636,7 @@ class Parser extends Transform {
if(raw === true || info === true){
const err = this.__push(Object.assign(
{record: [obj[objname], obj]},
raw === true ? {raw: this.state.rawBuffer.toString()}: {},
raw === true ? {raw: this.state.rawBuffer.toString(encoding)}: {},
info === true ? {info: this.state.info}: {}
))
if(err){
@ -11606,7 +11653,7 @@ class Parser extends Transform {
if(raw === true || info === true){
const err = this.__push(Object.assign(
{record: record},
raw === true ? {raw: this.state.rawBuffer.toString()}: {},
raw === true ? {raw: this.state.rawBuffer.toString(encoding)}: {},
info === true ? {info: this.state.info}: {}
))
if(err){
@ -11632,7 +11679,7 @@ class Parser extends Transform {
'Invalid Column Mapping:',
'expect an array from column function,',
`got ${JSON.stringify(headers)}`
], this.__context(), {
], this.options, this.__context(), {
headers: headers,
})
)
@ -11650,17 +11697,18 @@ class Parser extends Transform {
if(this.options.raw === true){
this.state.rawBuffer.reset()
}
this.state.error = undefined
this.state.record = []
this.state.record_length = 0
}
__onField(){
const {cast, rtrim, max_record_size} = this.options
const {cast, encoding, rtrim, max_record_size} = this.options
const {enabled, wasQuoting} = this.state
// Short circuit for the from_line options
if(enabled === false){ /* this.options.columns !== true && */
return this.__resetField()
}
let field = this.state.field.toString()
let field = this.state.field.toString(encoding)
if(rtrim === true && wasQuoting === false){
field = field.trimRight()
}
@ -11727,38 +11775,30 @@ class Parser extends Transform {
__isFloat(value){
return (value - parseFloat( value ) + 1) >= 0 // Borrowed from jquery
}
__compareBytes(sourceBuf, targetBuf, pos, firtByte){
if(sourceBuf[0] !== firtByte) return 0
__compareBytes(sourceBuf, targetBuf, targetPos, firstByte){
if(sourceBuf[0] !== firstByte) return 0
const sourceLength = sourceBuf.length
for(let i = 1; i < sourceLength; i++){
if(sourceBuf[i] !== targetBuf[pos+i]) return 0
if(sourceBuf[i] !== targetBuf[targetPos+i]) return 0
}
return sourceLength
}
__needMoreData(i, bufLen, end){
if(end){
return false
}
const {comment, delimiter} = this.options
const {quoting, recordDelimiterMaxLength} = this.state
if(end) return false
const {quote} = this.options
const {quoting, needMoreDataSize, recordDelimiterMaxLength} = this.state
const numOfCharLeft = bufLen - i - 1
const requiredLength = Math.max(
// Skip if the remaining buffer smaller than comment
comment ? comment.length : 0,
// Skip if the remaining buffer smaller than row delimiter
needMoreDataSize,
// Skip if the remaining buffer smaller than record delimiter
recordDelimiterMaxLength,
// Skip if the remaining buffer can be row delimiter following the closing quote
// 1 is for quote.length
quoting ? (1 + recordDelimiterMaxLength) : 0,
// Skip if the remaining buffer can be delimiter
delimiter.length,
// Skip if the remaining buffer can be escape sequence
// 1 is for escape.length
1
quoting ? (quote.length + recordDelimiterMaxLength) : 0,
)
return numOfCharLeft < requiredLength
}
__isDelimiter(chr, buf, pos){
__isDelimiter(buf, pos, chr){
const {delimiter} = this.options
loop1: for(let i = 0; i < delimiter.length; i++){
const del = delimiter[i]
@ -11789,20 +11829,46 @@ class Parser extends Transform {
}
return 0
}
__isEscape(buf, pos, chr){
const {escape} = this.options
if(escape === null) return false
const l = escape.length
if(escape[0] === chr){
for(let i = 0; i < l; i++){
if(escape[i] !== buf[pos+i]){
return false
}
}
return true
}
return false
}
__isQuote(buf, pos){
const {quote} = this.options
if(quote === null) return false
const l = quote.length
for(let i = 0; i < l; i++){
if(quote[i] !== buf[pos+i]){
return false
}
}
return true
}
__autoDiscoverRowDelimiter(buf, pos){
const {encoding} = this.options
const chr = buf[pos]
if(chr === cr){
if(buf[pos+1] === nl){
this.options.record_delimiter.push(Buffer.from('\r\n'))
this.options.record_delimiter.push(Buffer.from('\r\n', encoding))
this.state.recordDelimiterMaxLength = 2
return 2
}else{
this.options.record_delimiter.push(Buffer.from('\r'))
this.options.record_delimiter.push(Buffer.from('\r', encoding))
this.state.recordDelimiterMaxLength = 1
return 1
}
}else if(chr === nl){
this.options.record_delimiter.push(Buffer.from('\n'))
this.options.record_delimiter.push(Buffer.from('\n', encoding))
this.state.recordDelimiterMaxLength = 1
return 1
}
@ -11830,6 +11896,7 @@ class Parser extends Transform {
) :
this.state.record.length,
empty_lines: this.info.empty_lines,
error: this.state.error,
header: columns === true,
index: this.state.record.length,
invalid_field_length: this.info.invalid_field_length,
@ -11855,7 +11922,7 @@ const parse = function(){
throw new CsvError('CSV_INVALID_ARGUMENT', [
'Invalid argument:',
`got ${JSON.stringify(argument)} at index ${i}`
])
], this.options)
}
}
const parser = new Parser(options)
@ -11894,7 +11961,7 @@ const parse = function(){
}
class CsvError extends Error {
constructor(code, message, ...contexts) {
constructor(code, message, options, ...contexts) {
if(Array.isArray(message)) message = message.join(' ')
super(message)
if(Error.captureStackTrace !== undefined){
@ -11904,7 +11971,7 @@ class CsvError extends Error {
for(const context of contexts){
for(const key in context){
const value = context[key]
this[key] = Buffer.isBuffer(value) ? value.toString() : value == null ? value : JSON.parse(JSON.stringify(value))
this[key] = Buffer.isBuffer(value) ? value.toString(options.encoding) : value == null ? value : JSON.parse(JSON.stringify(value))
}
}
}
@ -12032,8 +12099,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.asyncForEach = exports.getInputList = exports.getArgs = exports.getInputs = exports.tmpNameSync = exports.tmpDir = exports.defaultContext = void 0;
const sync_1 = __importDefault(__webpack_require__(750));
const fs = __importStar(__webpack_require__(747));
const os = __importStar(__webpack_require__(87));
const path = __importStar(__webpack_require__(622));
@ -12151,7 +12222,12 @@ function getBuildArgs(inputs, defaultContext, buildxVersion) {
args.push('--cache-to', cacheTo);
}));
yield exports.asyncForEach(inputs.secrets, (secret) => __awaiter(this, void 0, void 0, function* () {
args.push('--secret', yield buildx.getSecret(secret));
try {
args.push('--secret', yield buildx.getSecret(secret));
}
catch (err) {
core.warning(err.message);
}
}));
if (inputs.githubToken && !buildx.hasGitAuthToken(inputs.secrets) && inputs.context == defaultContext) {
args.push('--secret', yield buildx.getSecret(`GIT_AUTH_TOKEN=${inputs.githubToken}`));
@ -12188,14 +12264,27 @@ function getCommonArgs(inputs) {
}
function getInputList(name, ignoreComma) {
return __awaiter(this, void 0, void 0, function* () {
let res = [];
const items = core.getInput(name);
if (items == '') {
return [];
return res;
}
return items
.split(/\r?\n/)
.filter(x => x)
.reduce((acc, line) => acc.concat(!ignoreComma ? line.split(',').filter(x => x) : line).map(pat => pat.trim()), []);
for (let output of (yield sync_1.default(items, {
columns: false,
relaxColumnCount: true,
skipLinesWithEmptyValues: true
}))) {
if (output.length == 1) {
res.push(output[0]);
continue;
}
else if (!ignoreComma) {
res.push(...output);
continue;
}
res.push(output.join(','));
}
return res.filter(item => item);
});
}
exports.getInputList = getInputList;
@ -13210,13 +13299,28 @@ class ResizeableBuffer{
this.buf = Buffer.alloc(size)
}
prepend(val){
const length = this.length++
if(length === this.size){
this.resize()
if(Buffer.isBuffer(val)){
const length = this.length + val.length
if(length >= this.size){
this.resize()
if(length >= this.size){
throw Error('INVALID_BUFFER_STATE')
}
}
const buf = this.buf
this.buf = Buffer.alloc(this.size)
val.copy(this.buf, 0)
buf.copy(this.buf, val.length)
this.length += val.length
}else{
const length = this.length++
if(length === this.size){
this.resize()
}
const buf = this.clone()
this.buf[0] = val
buf.copy(this.buf,1, 0, length)
}
const buf = this.clone()
this.buf[0] = val
buf.copy(this.buf,1, 0, length)
}
append(val){
const length = this.length++
@ -13235,11 +13339,15 @@ class ResizeableBuffer{
this.buf.copy(buf,0, 0, length)
this.buf = buf
}
toString(){
return this.buf.slice(0, this.length).toString()
toString(encoding){
if(encoding){
return this.buf.slice(0, this.length).toString(encoding)
}else{
return Uint8Array.prototype.slice.call(this.buf.slice(0, this.length))
}
}
toJSON(){
return this.toString()
return this.toString('utf8')
}
reset(){
this.length = 0

View file

@ -31,7 +31,7 @@
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.4",
"@actions/github": "^4.0.0",
"csv-parse": "^4.12.0",
"csv-parse": "^4.14.1",
"semver": "^7.3.2",
"tmp": "^0.2.1"
},

View file

@ -1,7 +1,8 @@
import csvparse from 'csv-parse/lib/sync';
import fs from 'fs';
import path from 'path';
import csvparse from 'csv-parse/lib/sync';
import * as semver from 'semver';
import * as context from './context';
import * as exec from './exec';
@ -21,6 +22,9 @@ export async function getSecret(kvp: string): Promise<string> {
const delimiterIndex = kvp.indexOf('=');
const key = kvp.substring(0, delimiterIndex);
const value = kvp.substring(delimiterIndex + 1);
if (key.length == 0 || value.length == 0) {
throw new Error(`${kvp} is not a valid secret`);
}
const secretFile = context.tmpNameSync({
tmpdir: context.tmpDir()
});
@ -33,7 +37,7 @@ export function isLocalOrTarExporter(outputs: string[]): Boolean {
delimiter: ',',
trim: true,
columns: false,
relax_column_count: true
relaxColumnCount: true
})) {
// Local if no type is defined
// https://github.com/docker/buildx/blob/d2bf42f8b4784d83fde17acb3ed84703ddc2156b/build/output.go#L29-L43

View file

@ -1,3 +1,4 @@
import csvparse from 'csv-parse/lib/sync';
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
@ -143,7 +144,11 @@ async function getBuildArgs(inputs: Inputs, defaultContext: string, buildxVersio
args.push('--cache-to', cacheTo);
});
await asyncForEach(inputs.secrets, async secret => {
args.push('--secret', await buildx.getSecret(secret));
try {
args.push('--secret', await buildx.getSecret(secret));
} catch (err) {
core.warning(err.message);
}
});
if (inputs.githubToken && !buildx.hasGitAuthToken(inputs.secrets) && inputs.context == defaultContext) {
args.push('--secret', await buildx.getSecret(`GIT_AUTH_TOKEN=${inputs.githubToken}`));
@ -178,17 +183,29 @@ async function getCommonArgs(inputs: Inputs): Promise<Array<string>> {
}
export async function getInputList(name: string, ignoreComma?: boolean): Promise<string[]> {
let res: Array<string> = [];
const items = core.getInput(name);
if (items == '') {
return [];
return res;
}
return items
.split(/\r?\n/)
.filter(x => x)
.reduce<string[]>(
(acc, line) => acc.concat(!ignoreComma ? line.split(',').filter(x => x) : line).map(pat => pat.trim()),
[]
);
for (let output of (await csvparse(items, {
columns: false,
relaxColumnCount: true,
skipLinesWithEmptyValues: true
})) as Array<string[]>) {
if (output.length == 1) {
res.push(output[0]);
continue;
} else if (!ignoreComma) {
res.push(...output);
continue;
}
res.push(output.join(','));
}
return res.filter(item => item);
}
export const asyncForEach = async (array, callback) => {

View file

@ -1236,10 +1236,10 @@ cssstyle@^2.2.0:
dependencies:
cssom "~0.3.6"
csv-parse@*, csv-parse@^4.12.0:
version "4.12.0"
resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-4.12.0.tgz#fd42d6291bbaadd51d3009f6cadbb3e53b4ce026"
integrity sha512-wPQl3H79vWLPI8cgKFcQXl0NBgYYEqVnT1i6/So7OjMpsI540oD7p93r3w6fDSyPvwkTepG05F69/7AViX2lXg==
csv-parse@*, csv-parse@^4.14.1:
version "4.14.1"
resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-4.14.1.tgz#b6b3736508fb94682fa6d450fe1755237221d291"
integrity sha512-4wmcO7QbWtDAncGFaBwlWFPhEN4Akr64IbM4zvDwEOFekI8blLc04Nw7XjQjtSNy+3AUAgBgtUa9nWo5Cq89Xg==
dashdash@^1.12.0:
version "1.14.1"