mirror of
https://gitea.com/gitea/act_runner.git
synced 2026-05-08 16:23:23 +02:00
Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ea7d39690 | ||
|
|
861d351845 | ||
|
|
cce8543d06 | ||
|
|
75643645f0 | ||
|
|
dff63b3ecc | ||
|
|
a5d9fe9651 | ||
|
|
d607f3b342 | ||
|
|
5e59402fb2 | ||
|
|
dfeb463904 | ||
|
|
594c9ade7c | ||
|
|
2a4d56c650 | ||
|
|
a22119cf88 | ||
|
|
b68ecf2580 | ||
|
|
d1434237c2 | ||
|
|
35c65e2b14 | ||
|
|
c45a4e6d32 | ||
|
|
68d9fc45c9 | ||
|
|
b1c873a66b | ||
|
|
1d6e7879c8 | ||
|
|
13dc9386fe | ||
|
|
8e6b3be96a |
@@ -40,7 +40,7 @@ cpu.out
|
|||||||
*.db
|
*.db
|
||||||
*.log
|
*.log
|
||||||
|
|
||||||
/act_runner
|
/gitea-runner
|
||||||
/debug
|
/debug
|
||||||
|
|
||||||
/bin
|
/bin
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ jobs:
|
|||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Echo the tag
|
- name: Echo the tag
|
||||||
run: echo "${{ env.DOCKER_ORG }}/act_runner:nightly${{ matrix.variant.tag_suffix }}"
|
run: echo "${{ env.DOCKER_ORG }}/runner:nightly${{ matrix.variant.tag_suffix }}"
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v6
|
||||||
@@ -82,4 +82,4 @@ jobs:
|
|||||||
linux/arm64
|
linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: |
|
tags: |
|
||||||
${{ env.DOCKER_ORG }}/act_runner:nightly${{ matrix.variant.tag_suffix }}
|
${{ env.DOCKER_ORG }}/runner:nightly${{ matrix.variant.tag_suffix }}
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ jobs:
|
|||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Import GPG key
|
- name: Import GPG key
|
||||||
id: import_gpg
|
id: import_gpg
|
||||||
uses: crazy-max/ghaction-import-gpg@v6
|
uses: crazy-max/ghaction-import-gpg@v7
|
||||||
with:
|
with:
|
||||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||||
passphrase: ${{ secrets.PASSPHRASE }}
|
passphrase: ${{ secrets.PASSPHRASE }}
|
||||||
@@ -71,17 +71,12 @@ jobs:
|
|||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Repo Meta
|
|
||||||
id: repo_meta
|
|
||||||
run: |
|
|
||||||
echo REPO_NAME=$(echo ${GITHUB_REPOSITORY} | awk -F"/" '{print $2}') >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Docker meta"
|
- name: "Docker meta"
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
uses: https://github.com/docker/metadata-action@v5
|
uses: https://github.com/docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ env.DOCKER_ORG }}/${{ steps.repo_meta.outputs.REPO_NAME }}
|
${{ env.DOCKER_ORG }}/runner
|
||||||
tags: |
|
tags: |
|
||||||
type=semver,pattern={{major}}.{{minor}}.{{patch}}
|
type=semver,pattern={{major}}.{{minor}}.{{patch}}
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
name: checks
|
name: checks
|
||||||
on:
|
on:
|
||||||
- push
|
push:
|
||||||
- pull_request
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
@@ -17,4 +19,4 @@ jobs:
|
|||||||
- name: build
|
- name: build
|
||||||
run: make build
|
run: make build
|
||||||
- name: test
|
- name: test
|
||||||
run: make test
|
run: make test
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,4 +1,4 @@
|
|||||||
/act_runner
|
/gitea-runner
|
||||||
.env
|
.env
|
||||||
.runner
|
.runner
|
||||||
coverage.txt
|
coverage.txt
|
||||||
|
|||||||
@@ -114,7 +114,7 @@ formatters:
|
|||||||
custom-order: true
|
custom-order: true
|
||||||
sections:
|
sections:
|
||||||
- standard
|
- standard
|
||||||
- prefix(gitea.com/gitea/act_runner)
|
- prefix(gitea.com/gitea/runner)
|
||||||
- blank
|
- blank
|
||||||
- default
|
- default
|
||||||
gofumpt:
|
gofumpt:
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
version: 2
|
version: 2
|
||||||
|
|
||||||
|
project_name: gitea-runner
|
||||||
|
|
||||||
before:
|
before:
|
||||||
hooks:
|
hooks:
|
||||||
- go mod tidy
|
- go mod tidy
|
||||||
@@ -63,7 +65,7 @@ builds:
|
|||||||
flags:
|
flags:
|
||||||
- -trimpath
|
- -trimpath
|
||||||
ldflags:
|
ldflags:
|
||||||
- -s -w -X gitea.com/gitea/act_runner/internal/pkg/ver.version={{ .Summary }}
|
- -s -w -X gitea.com/gitea/runner/internal/pkg/ver.version={{ .Summary }}
|
||||||
binary: >-
|
binary: >-
|
||||||
{{ .ProjectName }}-
|
{{ .ProjectName }}-
|
||||||
{{- .Version }}-
|
{{- .Version }}-
|
||||||
@@ -86,7 +88,7 @@ blobs:
|
|||||||
provider: s3
|
provider: s3
|
||||||
bucket: "{{ .Env.S3_BUCKET }}"
|
bucket: "{{ .Env.S3_BUCKET }}"
|
||||||
region: "{{ .Env.S3_REGION }}"
|
region: "{{ .Env.S3_REGION }}"
|
||||||
directory: "act_runner/{{.Version}}"
|
directory: "gitea-runner/{{.Version}}"
|
||||||
extra_files:
|
extra_files:
|
||||||
- glob: ./**.xz
|
- glob: ./**.xz
|
||||||
- glob: ./**.sha256
|
- glob: ./**.sha256
|
||||||
|
|||||||
14
Dockerfile
14
Dockerfile
@@ -9,19 +9,19 @@ RUN apk add --no-cache make git
|
|||||||
ARG GOPROXY
|
ARG GOPROXY
|
||||||
ENV GOPROXY=${GOPROXY:-}
|
ENV GOPROXY=${GOPROXY:-}
|
||||||
|
|
||||||
COPY . /opt/src/act_runner
|
COPY . /opt/src/runner
|
||||||
WORKDIR /opt/src/act_runner
|
WORKDIR /opt/src/runner
|
||||||
|
|
||||||
RUN make clean && make build
|
RUN make clean && make build
|
||||||
|
|
||||||
### DIND VARIANT
|
### DIND VARIANT
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
FROM docker:28-dind AS dind
|
FROM docker:29-dind AS dind
|
||||||
|
|
||||||
RUN apk add --no-cache s6 bash git tzdata
|
RUN apk add --no-cache s6 bash git tzdata
|
||||||
|
|
||||||
COPY --from=builder /opt/src/act_runner/act_runner /usr/local/bin/act_runner
|
COPY --from=builder /opt/src/runner/gitea-runner /usr/local/bin/gitea-runner
|
||||||
COPY scripts/run.sh /usr/local/bin/run.sh
|
COPY scripts/run.sh /usr/local/bin/run.sh
|
||||||
COPY scripts/s6 /etc/s6
|
COPY scripts/s6 /etc/s6
|
||||||
|
|
||||||
@@ -32,12 +32,12 @@ ENTRYPOINT ["s6-svscan","/etc/s6"]
|
|||||||
### DIND-ROOTLESS VARIANT
|
### DIND-ROOTLESS VARIANT
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
FROM docker:28-dind-rootless AS dind-rootless
|
FROM docker:29-dind-rootless AS dind-rootless
|
||||||
|
|
||||||
USER root
|
USER root
|
||||||
RUN apk add --no-cache s6 bash git tzdata
|
RUN apk add --no-cache s6 bash git tzdata
|
||||||
|
|
||||||
COPY --from=builder /opt/src/act_runner/act_runner /usr/local/bin/act_runner
|
COPY --from=builder /opt/src/runner/gitea-runner /usr/local/bin/gitea-runner
|
||||||
COPY scripts/run.sh /usr/local/bin/run.sh
|
COPY scripts/run.sh /usr/local/bin/run.sh
|
||||||
COPY scripts/s6 /etc/s6
|
COPY scripts/s6 /etc/s6
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ ENTRYPOINT ["s6-svscan","/etc/s6"]
|
|||||||
FROM alpine AS basic
|
FROM alpine AS basic
|
||||||
RUN apk add --no-cache tini bash git tzdata
|
RUN apk add --no-cache tini bash git tzdata
|
||||||
|
|
||||||
COPY --from=builder /opt/src/act_runner/act_runner /usr/local/bin/act_runner
|
COPY --from=builder /opt/src/runner/gitea-runner /usr/local/bin/gitea-runner
|
||||||
COPY scripts/run.sh /usr/local/bin/run.sh
|
COPY scripts/run.sh /usr/local/bin/run.sh
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
|||||||
12
Makefile
12
Makefile
@@ -1,5 +1,5 @@
|
|||||||
DIST := dist
|
DIST := dist
|
||||||
EXECUTABLE := act_runner
|
EXECUTABLE := gitea-runner
|
||||||
DIST_DIRS := $(DIST)/binaries $(DIST)/release
|
DIST_DIRS := $(DIST)/binaries $(DIST)/release
|
||||||
GO ?= go
|
GO ?= go
|
||||||
SHASUM ?= shasum -a 256
|
SHASUM ?= shasum -a 256
|
||||||
@@ -13,7 +13,7 @@ DARWIN_ARCHS ?= darwin-12/amd64,darwin-12/arm64
|
|||||||
WINDOWS_ARCHS ?= windows/amd64
|
WINDOWS_ARCHS ?= windows/amd64
|
||||||
GOFILES := $(shell find . -type f -name "*.go" -o -name "go.mod" ! -name "generated.*")
|
GOFILES := $(shell find . -type f -name "*.go" -o -name "go.mod" ! -name "generated.*")
|
||||||
|
|
||||||
DOCKER_IMAGE ?= gitea/act_runner
|
DOCKER_IMAGE ?= gitea/runner
|
||||||
DOCKER_TAG ?= nightly
|
DOCKER_TAG ?= nightly
|
||||||
DOCKER_REF := $(DOCKER_IMAGE):$(DOCKER_TAG)
|
DOCKER_REF := $(DOCKER_IMAGE):$(DOCKER_TAG)
|
||||||
DOCKER_ROOTLESS_REF := $(DOCKER_IMAGE):$(DOCKER_TAG)-dind-rootless
|
DOCKER_ROOTLESS_REF := $(DOCKER_IMAGE):$(DOCKER_TAG)-dind-rootless
|
||||||
@@ -67,7 +67,7 @@ else
|
|||||||
endif
|
endif
|
||||||
|
|
||||||
TAGS ?=
|
TAGS ?=
|
||||||
LDFLAGS ?= -X "gitea.com/gitea/act_runner/internal/pkg/ver.version=v$(RELASE_VERSION)"
|
LDFLAGS ?= -X "gitea.com/gitea/runner/internal/pkg/ver.version=v$(RELASE_VERSION)"
|
||||||
|
|
||||||
.PHONY: all
|
.PHONY: all
|
||||||
all: build
|
all: build
|
||||||
@@ -86,7 +86,7 @@ go-check:
|
|||||||
$(eval MIN_GO_VERSION := $(shell printf "%03d%03d" $(shell echo '$(MIN_GO_VERSION_STR)' | tr '.' ' ')))
|
$(eval MIN_GO_VERSION := $(shell printf "%03d%03d" $(shell echo '$(MIN_GO_VERSION_STR)' | tr '.' ' ')))
|
||||||
$(eval GO_VERSION := $(shell printf "%03d%03d" $(shell $(GO) version | grep -Eo '[0-9]+\.[0-9]+' | tr '.' ' ');))
|
$(eval GO_VERSION := $(shell printf "%03d%03d" $(shell $(GO) version | grep -Eo '[0-9]+\.[0-9]+' | tr '.' ' ');))
|
||||||
@if [ "$(GO_VERSION)" -lt "$(MIN_GO_VERSION)" ]; then \
|
@if [ "$(GO_VERSION)" -lt "$(MIN_GO_VERSION)" ]; then \
|
||||||
echo "Act Runner requires Go $(MIN_GO_VERSION_STR) or greater to build. You can get it at https://go.dev/dl/"; \
|
echo "Gitea Runner requires Go $(MIN_GO_VERSION_STR) or greater to build. You can get it at https://go.dev/dl/"; \
|
||||||
exit 1; \
|
exit 1; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -140,11 +140,11 @@ test: fmt-check security-check ## test everything
|
|||||||
@$(GO) test -race -short -v -cover -coverprofile coverage.txt ./... && echo "\n==>\033[32m Ok\033[m\n" || exit 1
|
@$(GO) test -race -short -v -cover -coverprofile coverage.txt ./... && echo "\n==>\033[32m Ok\033[m\n" || exit 1
|
||||||
|
|
||||||
.PHONY: install
|
.PHONY: install
|
||||||
install: $(GOFILES) ## install the act_runner binary via `go install`
|
install: $(GOFILES) ## install the runner binary via `go install`
|
||||||
$(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)'
|
$(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)'
|
||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
build: go-check $(EXECUTABLE) ## build the act_runner binary
|
build: go-check $(EXECUTABLE) ## build the runner binary
|
||||||
|
|
||||||
$(EXECUTABLE): $(GOFILES)
|
$(EXECUTABLE): $(GOFILES)
|
||||||
$(GO) build -v -tags '$(TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)' -o $@
|
$(GO) build -v -tags '$(TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)' -o $@
|
||||||
|
|||||||
69
README.md
69
README.md
@@ -1,6 +1,4 @@
|
|||||||
# act runner
|
# Gitea Runner
|
||||||
|
|
||||||
Act runner is a runner for Gitea.
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@@ -10,7 +8,7 @@ Docker Engine Community version is required for docker mode. To install Docker C
|
|||||||
|
|
||||||
### Download pre-built binary
|
### Download pre-built binary
|
||||||
|
|
||||||
Visit [here](https://dl.gitea.com/act_runner/) and download the right version for your platform.
|
Visit [here](https://dl.gitea.com/gitea-runner/) and download the right version for your platform.
|
||||||
|
|
||||||
### Build from source
|
### Build from source
|
||||||
|
|
||||||
@@ -26,8 +24,8 @@ make docker
|
|||||||
|
|
||||||
## Quickstart
|
## Quickstart
|
||||||
|
|
||||||
Actions are disabled by default, so you need to add the following to the configuration file of your Gitea instance to enable it:
|
Actions are disabled by default, so you need to add the following to the configuration file of your Gitea instance to enable it:
|
||||||
|
|
||||||
```ini
|
```ini
|
||||||
[actions]
|
[actions]
|
||||||
ENABLED=true
|
ENABLED=true
|
||||||
@@ -36,7 +34,7 @@ ENABLED=true
|
|||||||
### Register
|
### Register
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./act_runner register
|
./gitea-runner register
|
||||||
```
|
```
|
||||||
|
|
||||||
And you will be asked to input:
|
And you will be asked to input:
|
||||||
@@ -68,7 +66,7 @@ INFO Runner registered successfully.
|
|||||||
You can also register with command line arguments.
|
You can also register with command line arguments.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./act_runner register --instance http://192.168.8.8:3000 --token <my_runner_token> --no-interactive
|
./gitea-runner register --instance http://192.168.8.8:3000 --token <my_runner_token> --no-interactive
|
||||||
```
|
```
|
||||||
|
|
||||||
If the registry succeed, it will run immediately. Next time, you could run the runner directly.
|
If the registry succeed, it will run immediately. Next time, you could run the runner directly.
|
||||||
@@ -76,32 +74,69 @@ If the registry succeed, it will run immediately. Next time, you could run the r
|
|||||||
### Run
|
### Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./act_runner daemon
|
./gitea-runner daemon
|
||||||
```
|
```
|
||||||
|
|
||||||
### Run with docker
|
### Run with docker
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -e GITEA_INSTANCE_URL=https://your_gitea.com -e GITEA_RUNNER_REGISTRATION_TOKEN=<your_token> -v /var/run/docker.sock:/var/run/docker.sock --name my_runner gitea/act_runner:nightly
|
docker run -e GITEA_INSTANCE_URL=https://your_gitea.com -e GITEA_RUNNER_REGISTRATION_TOKEN=<your_token> -v /var/run/docker.sock:/var/run/docker.sock --name my_runner gitea/runner:nightly
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Mount a volume on `/data` if you want the registration file and optional config to survive container recreation (see [scripts/run.sh](scripts/run.sh)).
|
||||||
|
|
||||||
### Configuration
|
### Configuration
|
||||||
|
|
||||||
You can also configure the runner with a configuration file.
|
The runner is configured with a YAML file. Generate a starting point (this matches what ships in the tree):
|
||||||
The configuration file is a YAML file, you can generate a sample configuration file with `./act_runner generate-config`.
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./act_runner generate-config > config.yaml
|
./gitea-runner generate-config > config.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
You can specify the configuration file path with `-c`/`--config` argument.
|
Pass it with `-c` / `--config` on any command that loads configuration (`register`, `daemon`, `cache-server`):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./act_runner -c config.yaml register # register with config file
|
./gitea-runner -c config.yaml register
|
||||||
./act_runner -c config.yaml daemon # run with config file
|
./gitea-runner -c config.yaml daemon
|
||||||
|
./gitea-runner -c config.yaml cache-server
|
||||||
```
|
```
|
||||||
|
|
||||||
You can read the latest version of the configuration file online at [config.example.yaml](internal/pkg/config/config.example.yaml).
|
Every option is described in [config.example.yaml](internal/pkg/config/config.example.yaml) (the same content `generate-config` prints).
|
||||||
|
|
||||||
|
#### Without a config file
|
||||||
|
|
||||||
|
If you omit `-c`, built-in defaults apply (same as an empty YAML document). A small set of **deprecated** environment variables can still override parts of that default config, but **only when no `-c` path was given**; they are ignored if you use a config file:
|
||||||
|
|
||||||
|
| Variable | Effect |
|
||||||
|
| --- | --- |
|
||||||
|
| `GITEA_DEBUG` | If true, sets log level to `debug` |
|
||||||
|
| `GITEA_TRACE` | If true, sets log level to `trace` |
|
||||||
|
| `GITEA_RUNNER_CAPACITY` | Concurrent jobs (integer) |
|
||||||
|
| `GITEA_RUNNER_FILE` | Registration state file path (default `.runner`) |
|
||||||
|
| `GITEA_RUNNER_ENVIRON` | Extra job env vars as comma-separated `KEY:VALUE` pairs |
|
||||||
|
| `GITEA_RUNNER_ENV_FILE` | Path to an env file merged into job env (same idea as `runner.env_file` in YAML) |
|
||||||
|
|
||||||
|
Prefer a YAML file for all settings.
|
||||||
|
|
||||||
|
#### Registration vs config labels
|
||||||
|
|
||||||
|
If `runner.labels` is set in the YAML file, those labels are used during `register` and the `--labels` CLI flag is ignored.
|
||||||
|
|
||||||
|
#### External cache (`actions/cache`)
|
||||||
|
|
||||||
|
If `cache.external_server` is set, you must set `cache.external_secret` to the same value on this runner and on the standalone cache server. Run the server with `gitea-runner cache-server` using a config that defines `cache.external_secret` (and matching `cache.dir` / host / port as needed). Flags `--dir`, `--host`, and `--port` on `cache-server` override the file.
|
||||||
|
|
||||||
|
#### Official Docker image
|
||||||
|
|
||||||
|
Besides `GITEA_INSTANCE_URL` and `GITEA_RUNNER_REGISTRATION_TOKEN`, the image entrypoint supports optional variables such as `CONFIG_FILE` (passed through as `-c`), `GITEA_RUNNER_LABELS`, `GITEA_RUNNER_EPHEMERAL`, `GITEA_RUNNER_ONCE`, `GITEA_RUNNER_NAME`, `GITEA_MAX_REG_ATTEMPTS`, `RUNNER_STATE_FILE`, and `GITEA_RUNNER_REGISTRATION_TOKEN_FILE`. See [scripts/run.sh](scripts/run.sh) for exact behavior.
|
||||||
|
|
||||||
|
For a fuller container-oriented walkthrough, see [examples/docker](examples/docker/README.md).
|
||||||
|
|
||||||
|
When `container.bind_workdir` is enabled, stale task workspace directories can be cleaned while the runner is idle:
|
||||||
|
- directories older than `runner.workdir_cleanup_age` are removed (default: `24h`; set `0` to disable)
|
||||||
|
- cleanup runs every `runner.idle_cleanup_interval` (default: `10m`; set `0` to disable)
|
||||||
|
- only purely numeric subdirectories under `container.workdir_parent` are treated as task workspaces and may be removed
|
||||||
|
- cleanup assumes `container.workdir_parent` is not shared across multiple runners
|
||||||
|
|
||||||
### Example Deployments
|
### Example Deployments
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ import (
|
|||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/julienschmidt/httprouter"
|
"github.com/julienschmidt/httprouter"
|
||||||
"github.com/sirupsen/logrus"
|
"github.com/sirupsen/logrus"
|
||||||
|
|||||||
@@ -969,7 +969,7 @@ func TestHandler_ArtifactSignature(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TestHandler_SecretPersistsAcrossRestarts is the property that lets
|
// TestHandler_SecretPersistsAcrossRestarts is the property that lets
|
||||||
// act_runner cache-server be pointed at via cfg.Cache.ExternalServer: a
|
// gitea-runner cache-server be pointed at via cfg.Cache.ExternalServer: a
|
||||||
// restart must not invalidate signed URLs the handler has already issued
|
// restart must not invalidate signed URLs the handler has already issued
|
||||||
// (within their expiry window).
|
// (within their expiry window).
|
||||||
func TestHandler_SecretPersistsAcrossRestarts(t *testing.T) {
|
func TestHandler_SecretPersistsAcrossRestarts(t *testing.T) {
|
||||||
|
|||||||
30
act/artifactcache/testdata/example/example.yaml
vendored
30
act/artifactcache/testdata/example/example.yaml
vendored
@@ -1,30 +0,0 @@
|
|||||||
# Copied from https://github.com/actions/cache#example-cache-workflow
|
|
||||||
name: Caching Primes
|
|
||||||
|
|
||||||
on: push
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- run: env
|
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Cache Primes
|
|
||||||
id: cache-primes
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: prime-numbers
|
|
||||||
key: ${{ runner.os }}-primes-${{ github.run_id }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-primes
|
|
||||||
${{ runner.os }}
|
|
||||||
|
|
||||||
- name: Generate Prime Numbers
|
|
||||||
if: steps.cache-primes.outputs.cache-hit != 'true'
|
|
||||||
run: cat /proc/sys/kernel/random/uuid > prime-numbers
|
|
||||||
|
|
||||||
- name: Use Prime Numbers
|
|
||||||
run: cat prime-numbers
|
|
||||||
@@ -17,7 +17,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/julienschmidt/httprouter"
|
"github.com/julienschmidt/httprouter"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"testing/fstest"
|
"testing/fstest"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
"gitea.com/gitea/act_runner/act/runner"
|
"gitea.com/gitea/runner/act/runner"
|
||||||
|
|
||||||
"github.com/julienschmidt/httprouter"
|
"github.com/julienschmidt/httprouter"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
@@ -260,7 +260,7 @@ func TestArtifactFlow(t *testing.T) {
|
|||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
platforms := map[string]string{
|
platforms := map[string]string{
|
||||||
"ubuntu-latest": "node:16-buster", // Don't use node:16-buster-slim because it doesn't have curl command, which is used in the tests
|
"ubuntu-latest": "node:24-bookworm", // Don't use node:24-bookworm-slim because it doesn't have curl command, which is used in the tests
|
||||||
}
|
}
|
||||||
|
|
||||||
tables := []TestJobFileInfo{
|
tables := []TestJobFileInfo{
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ func NewParallelExecutor(parallel int, executors ...Executor) Executor {
|
|||||||
log.Debugf("Worker %d executing task %d", workerID, taskCount)
|
log.Debugf("Worker %d executing task %d", workerID, taskCount)
|
||||||
// Recover from panics in executors to avoid crashing the worker
|
// Recover from panics in executors to avoid crashing the worker
|
||||||
// goroutine which would leave the runner process hung.
|
// goroutine which would leave the runner process hung.
|
||||||
// https://gitea.com/gitea/act_runner/issues/371
|
// https://gitea.com/gitea/runner/issues/371
|
||||||
errs <- func() (err error) {
|
errs <- func() (err error) {
|
||||||
defer func() {
|
defer func() {
|
||||||
if r := recover(); r != nil {
|
if r := recover(); r != nil {
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/go-git/go-git/v5"
|
"github.com/go-git/go-git/v5"
|
||||||
"github.com/go-git/go-git/v5/config"
|
"github.com/go-git/go-git/v5/config"
|
||||||
@@ -38,9 +38,11 @@ var (
|
|||||||
ErrNoRepo = errors.New("unable to find git repo")
|
ErrNoRepo = errors.New("unable to find git repo")
|
||||||
)
|
)
|
||||||
|
|
||||||
// acquireCloneLock returns an unlock function after locking the per-directory mutex for dir.
|
// AcquireCloneLock returns an unlock function after locking the per-directory mutex for dir.
|
||||||
// Only concurrent operations targeting the same directory are erialized; clones into different directories run in parallel.
|
// Only concurrent operations targeting the same directory are serialized; clones into different directories run in parallel.
|
||||||
func acquireCloneLock(dir string) func() {
|
// Callers reading files inside dir (e.g. tarring a checked-out action into a job container) must hold this lock too,
|
||||||
|
// otherwise a concurrent NewGitCloneExecutor on the same dir can mutate the worktree mid-read.
|
||||||
|
func AcquireCloneLock(dir string) func() {
|
||||||
v, _ := cloneLocks.LoadOrStore(dir, &sync.Mutex{})
|
v, _ := cloneLocks.LoadOrStore(dir, &sync.Mutex{})
|
||||||
mu := v.(*sync.Mutex)
|
mu := v.(*sync.Mutex)
|
||||||
mu.Lock()
|
mu.Lock()
|
||||||
@@ -305,10 +307,10 @@ func gitOptions(token string) (fetchOptions git.FetchOptions, pullOptions git.Pu
|
|||||||
func NewGitCloneExecutor(input NewGitCloneExecutorInput) common.Executor {
|
func NewGitCloneExecutor(input NewGitCloneExecutorInput) common.Executor {
|
||||||
return func(ctx context.Context) error {
|
return func(ctx context.Context) error {
|
||||||
logger := common.Logger(ctx)
|
logger := common.Logger(ctx)
|
||||||
logger.Infof(" \u2601 git clone '%s' # ref=%s", input.URL, input.Ref)
|
logger.Infof("git clone '%s' # ref=%s", input.URL, input.Ref)
|
||||||
logger.Debugf(" cloning %s to %s", input.URL, input.Dir)
|
logger.Debugf(" cloning %s to %s", input.URL, input.Dir)
|
||||||
|
|
||||||
defer acquireCloneLock(input.Dir)()
|
defer AcquireCloneLock(input.Dir)()
|
||||||
|
|
||||||
refName := plumbing.ReferenceName("refs/heads/" + input.Ref)
|
refName := plumbing.ReferenceName("refs/heads/" + input.Ref)
|
||||||
r, err := CloneIfRequired(ctx, refName, input, logger)
|
r, err := CloneIfRequired(ctx, refName, input, logger)
|
||||||
|
|||||||
@@ -310,11 +310,11 @@ func TestAcquireCloneLock(t *testing.T) {
|
|||||||
t.Run("same directory serializes", func(t *testing.T) {
|
t.Run("same directory serializes", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
unlock1 := acquireCloneLock(dir)
|
unlock1 := AcquireCloneLock(dir)
|
||||||
|
|
||||||
secondAcquired := make(chan struct{})
|
secondAcquired := make(chan struct{})
|
||||||
go func() {
|
go func() {
|
||||||
unlock := acquireCloneLock(dir)
|
unlock := AcquireCloneLock(dir)
|
||||||
close(secondAcquired)
|
close(secondAcquired)
|
||||||
unlock()
|
unlock()
|
||||||
}()
|
}()
|
||||||
@@ -338,12 +338,12 @@ func TestAcquireCloneLock(t *testing.T) {
|
|||||||
dirA := t.TempDir()
|
dirA := t.TempDir()
|
||||||
dirB := t.TempDir()
|
dirB := t.TempDir()
|
||||||
|
|
||||||
unlockA := acquireCloneLock(dirA)
|
unlockA := AcquireCloneLock(dirA)
|
||||||
defer unlockA()
|
defer unlockA()
|
||||||
|
|
||||||
done := make(chan struct{})
|
done := make(chan struct{})
|
||||||
go func() {
|
go func() {
|
||||||
unlock := acquireCloneLock(dirB)
|
unlock := AcquireCloneLock(dirB)
|
||||||
unlock()
|
unlock()
|
||||||
close(done)
|
close(done)
|
||||||
}()
|
}()
|
||||||
|
|||||||
@@ -6,13 +6,21 @@ package container
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/docker/go-connections/nat"
|
"github.com/docker/go-connections/nat"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ExitCodeError reports a non-zero process exit code from a container command.
|
||||||
|
type ExitCodeError int
|
||||||
|
|
||||||
|
func (e ExitCodeError) Error() string {
|
||||||
|
return fmt.Sprintf("Process completed with exit code %d.", int(e))
|
||||||
|
}
|
||||||
|
|
||||||
// NewContainerInput the input for the New function
|
// NewContainerInput the input for the New function
|
||||||
type NewContainerInput struct {
|
type NewContainerInput struct {
|
||||||
Image string
|
Image string
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/docker/cli/cli/config"
|
"github.com/docker/cli/cli/config"
|
||||||
"github.com/docker/cli/cli/config/credentials"
|
"github.com/docker/cli/cli/config/credentials"
|
||||||
|
|||||||
@@ -12,11 +12,10 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/docker/docker/api/types"
|
"github.com/docker/docker/api/types"
|
||||||
"github.com/docker/docker/pkg/archive"
|
"github.com/docker/docker/pkg/archive"
|
||||||
// github.com/docker/docker/builder/dockerignore is deprecated
|
|
||||||
"github.com/moby/buildkit/frontend/dockerfile/dockerignore"
|
"github.com/moby/buildkit/frontend/dockerfile/dockerignore"
|
||||||
"github.com/moby/patternmatcher"
|
"github.com/moby/patternmatcher"
|
||||||
)
|
)
|
||||||
@@ -26,9 +25,9 @@ func NewDockerBuildExecutor(input NewDockerBuildExecutorInput) common.Executor {
|
|||||||
return func(ctx context.Context) error {
|
return func(ctx context.Context) error {
|
||||||
logger := common.Logger(ctx)
|
logger := common.Logger(ctx)
|
||||||
if input.Platform != "" {
|
if input.Platform != "" {
|
||||||
logger.Infof("%sdocker build -t %s --platform %s %s", logPrefix, input.ImageTag, input.Platform, input.ContextDir)
|
logger.Infof("docker build -t %s --platform %s %s", input.ImageTag, input.Platform, input.ContextDir)
|
||||||
} else {
|
} else {
|
||||||
logger.Infof("%sdocker build -t %s %s", logPrefix, input.ImageTag, input.ContextDir)
|
logger.Infof("docker build -t %s %s", input.ImageTag, input.ContextDir)
|
||||||
}
|
}
|
||||||
if common.Dryrun(ctx) {
|
if common.Dryrun(ctx) {
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ func TestImageExistsLocally(t *testing.T) {
|
|||||||
|
|
||||||
// Chose alpine latest because it's so small
|
// Chose alpine latest because it's so small
|
||||||
// maybe we should build an image instead so that tests aren't reliable on dockerhub
|
// maybe we should build an image instead so that tests aren't reliable on dockerhub
|
||||||
readerDefault, err := cli.ImagePull(ctx, "node:16-buster-slim", types.ImagePullOptions{
|
readerDefault, err := cli.ImagePull(ctx, "node:24-bookworm-slim", types.ImagePullOptions{
|
||||||
Platform: "linux/amd64",
|
Platform: "linux/amd64",
|
||||||
})
|
})
|
||||||
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
||||||
@@ -52,12 +52,12 @@ func TestImageExistsLocally(t *testing.T) {
|
|||||||
_, err = io.ReadAll(readerDefault)
|
_, err = io.ReadAll(readerDefault)
|
||||||
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
||||||
|
|
||||||
imageDefaultArchExists, err := ImageExistsLocally(ctx, "node:16-buster-slim", "linux/amd64")
|
imageDefaultArchExists, err := ImageExistsLocally(ctx, "node:24-bookworm-slim", "linux/amd64")
|
||||||
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
||||||
assert.True(t, imageDefaultArchExists)
|
assert.True(t, imageDefaultArchExists)
|
||||||
|
|
||||||
// Validate if another architecture platform can be pulled
|
// Validate if another architecture platform can be pulled
|
||||||
readerArm64, err := cli.ImagePull(ctx, "node:16-buster-slim", types.ImagePullOptions{
|
readerArm64, err := cli.ImagePull(ctx, "node:24-bookworm-slim", types.ImagePullOptions{
|
||||||
Platform: "linux/arm64",
|
Platform: "linux/arm64",
|
||||||
})
|
})
|
||||||
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
||||||
@@ -65,7 +65,7 @@ func TestImageExistsLocally(t *testing.T) {
|
|||||||
_, err = io.ReadAll(readerArm64)
|
_, err = io.ReadAll(readerArm64)
|
||||||
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
||||||
|
|
||||||
imageArm64Exists, err := ImageExistsLocally(ctx, "node:16-buster-slim", "linux/arm64")
|
imageArm64Exists, err := ImageExistsLocally(ctx, "node:24-bookworm-slim", "linux/arm64")
|
||||||
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
assert.NoError(t, err) //nolint:testifylint // pre-existing issue from nektos/act
|
||||||
assert.True(t, imageArm64Exists)
|
assert.True(t, imageArm64Exists)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,8 +26,6 @@ type dockerMessage struct {
|
|||||||
Progress string `json:"progress"`
|
Progress string `json:"progress"`
|
||||||
}
|
}
|
||||||
|
|
||||||
const logPrefix = " \U0001F433 "
|
|
||||||
|
|
||||||
func logDockerResponse(logger logrus.FieldLogger, dockerResponse io.ReadCloser, isError bool) error {
|
func logDockerResponse(logger logrus.FieldLogger, dockerResponse io.ReadCloser, isError bool) error {
|
||||||
if dockerResponse == nil {
|
if dockerResponse == nil {
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ package container
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/docker/docker/api/types"
|
"github.com/docker/docker/api/types"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/distribution/reference"
|
"github.com/distribution/reference"
|
||||||
"github.com/docker/docker/api/types"
|
"github.com/docker/docker/api/types"
|
||||||
@@ -24,7 +24,7 @@ import (
|
|||||||
func NewDockerPullExecutor(input NewDockerPullExecutorInput) common.Executor {
|
func NewDockerPullExecutor(input NewDockerPullExecutorInput) common.Executor {
|
||||||
return func(ctx context.Context) error {
|
return func(ctx context.Context) error {
|
||||||
logger := common.Logger(ctx)
|
logger := common.Logger(ctx)
|
||||||
logger.Debugf("%sdocker pull %v", logPrefix, input.Image)
|
logger.Debugf("docker pull %v", input.Image)
|
||||||
|
|
||||||
if common.Dryrun(ctx) {
|
if common.Dryrun(ctx) {
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -20,8 +20,8 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/filecollector"
|
"gitea.com/gitea/runner/act/filecollector"
|
||||||
|
|
||||||
"github.com/Masterminds/semver"
|
"github.com/Masterminds/semver"
|
||||||
"github.com/docker/cli/cli/compose/loader"
|
"github.com/docker/cli/cli/compose/loader"
|
||||||
@@ -53,7 +53,7 @@ func NewContainer(input *NewContainerInput) ExecutionsEnvironment {
|
|||||||
|
|
||||||
func (cr *containerReference) ConnectToNetwork(name string) common.Executor {
|
func (cr *containerReference) ConnectToNetwork(name string) common.Executor {
|
||||||
return common.
|
return common.
|
||||||
NewDebugExecutor("%sdocker network connect %s %s", logPrefix, name, cr.input.Name).
|
NewDebugExecutor("docker network connect %s %s", name, cr.input.Name).
|
||||||
Then(
|
Then(
|
||||||
common.NewPipelineExecutor(
|
common.NewPipelineExecutor(
|
||||||
cr.connect(),
|
cr.connect(),
|
||||||
@@ -90,7 +90,7 @@ func supportsContainerImagePlatform(ctx context.Context, cli client.APIClient) b
|
|||||||
|
|
||||||
func (cr *containerReference) Create(capAdd, capDrop []string) common.Executor {
|
func (cr *containerReference) Create(capAdd, capDrop []string) common.Executor {
|
||||||
return common.
|
return common.
|
||||||
NewInfoExecutor("%sdocker create image=%s platform=%s entrypoint=%+q cmd=%+q network=%+q", logPrefix, cr.input.Image, cr.input.Platform, cr.input.Entrypoint, cr.input.Cmd, cr.input.NetworkMode).
|
NewInfoExecutor("docker create image=%s platform=%s entrypoint=%+q cmd=%+q network=%+q", cr.input.Image, cr.input.Platform, cr.input.Entrypoint, cr.input.Cmd, cr.input.NetworkMode).
|
||||||
Then(
|
Then(
|
||||||
common.NewPipelineExecutor(
|
common.NewPipelineExecutor(
|
||||||
cr.connect(),
|
cr.connect(),
|
||||||
@@ -102,7 +102,7 @@ func (cr *containerReference) Create(capAdd, capDrop []string) common.Executor {
|
|||||||
|
|
||||||
func (cr *containerReference) Start(attach bool) common.Executor {
|
func (cr *containerReference) Start(attach bool) common.Executor {
|
||||||
return common.
|
return common.
|
||||||
NewInfoExecutor("%sdocker run image=%s platform=%s entrypoint=%+q cmd=%+q network=%+q", logPrefix, cr.input.Image, cr.input.Platform, cr.input.Entrypoint, cr.input.Cmd, cr.input.NetworkMode).
|
NewInfoExecutor("docker run image=%s platform=%s entrypoint=%+q cmd=%+q network=%+q", cr.input.Image, cr.input.Platform, cr.input.Entrypoint, cr.input.Cmd, cr.input.NetworkMode).
|
||||||
Then(
|
Then(
|
||||||
common.NewPipelineExecutor(
|
common.NewPipelineExecutor(
|
||||||
cr.connect(),
|
cr.connect(),
|
||||||
@@ -125,7 +125,7 @@ func (cr *containerReference) Start(attach bool) common.Executor {
|
|||||||
|
|
||||||
func (cr *containerReference) Pull(forcePull bool) common.Executor {
|
func (cr *containerReference) Pull(forcePull bool) common.Executor {
|
||||||
return common.
|
return common.
|
||||||
NewInfoExecutor("%sdocker pull image=%s platform=%s username=%s forcePull=%t", logPrefix, cr.input.Image, cr.input.Platform, cr.input.Username, forcePull).
|
NewInfoExecutor("docker pull image=%s platform=%s username=%s forcePull=%t", cr.input.Image, cr.input.Platform, cr.input.Username, forcePull).
|
||||||
Then(
|
Then(
|
||||||
NewDockerPullExecutor(NewDockerPullExecutorInput{
|
NewDockerPullExecutor(NewDockerPullExecutorInput{
|
||||||
Image: cr.input.Image,
|
Image: cr.input.Image,
|
||||||
@@ -147,7 +147,7 @@ func (cr *containerReference) Copy(destPath string, files ...*FileEntry) common.
|
|||||||
|
|
||||||
func (cr *containerReference) CopyDir(destPath, srcPath string, useGitIgnore bool) common.Executor {
|
func (cr *containerReference) CopyDir(destPath, srcPath string, useGitIgnore bool) common.Executor {
|
||||||
return common.NewPipelineExecutor(
|
return common.NewPipelineExecutor(
|
||||||
common.NewInfoExecutor("%sdocker cp src=%s dst=%s", logPrefix, srcPath, destPath),
|
common.NewInfoExecutor("docker cp src=%s dst=%s", srcPath, destPath),
|
||||||
cr.copyDir(destPath, srcPath, useGitIgnore),
|
cr.copyDir(destPath, srcPath, useGitIgnore),
|
||||||
func(ctx context.Context) error {
|
func(ctx context.Context) error {
|
||||||
// If this fails, then folders have wrong permissions on non root container
|
// If this fails, then folders have wrong permissions on non root container
|
||||||
@@ -177,7 +177,7 @@ func (cr *containerReference) UpdateFromImageEnv(env *map[string]string) common.
|
|||||||
|
|
||||||
func (cr *containerReference) Exec(command []string, env map[string]string, user, workdir string) common.Executor {
|
func (cr *containerReference) Exec(command []string, env map[string]string, user, workdir string) common.Executor {
|
||||||
return common.NewPipelineExecutor(
|
return common.NewPipelineExecutor(
|
||||||
common.NewInfoExecutor("%sdocker exec cmd=[%s] user=%s workdir=%s", logPrefix, strings.Join(command, " "), user, workdir),
|
common.NewInfoExecutor("docker exec cmd=[%s] user=%s workdir=%s", strings.Join(command, " "), user, workdir),
|
||||||
cr.connect(),
|
cr.connect(),
|
||||||
cr.find(),
|
cr.find(),
|
||||||
cr.exec(command, env, user, workdir),
|
cr.exec(command, env, user, workdir),
|
||||||
@@ -633,14 +633,10 @@ func (cr *containerReference) exec(cmd []string, env map[string]string, user, wo
|
|||||||
return fmt.Errorf("failed to inspect exec: %w", err)
|
return fmt.Errorf("failed to inspect exec: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch inspectResp.ExitCode {
|
if inspectResp.ExitCode == 0 {
|
||||||
case 0:
|
|
||||||
return nil
|
return nil
|
||||||
case 127:
|
|
||||||
return fmt.Errorf("exitcode '%d': command not found, please refer to https://github.com/nektos/act/issues/107 for more information", inspectResp.ExitCode)
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("exitcode '%d': failure", inspectResp.ExitCode)
|
|
||||||
}
|
}
|
||||||
|
return ExitCodeError(inspectResp.ExitCode)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -930,7 +926,7 @@ func (cr *containerReference) wait() common.Executor {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return fmt.Errorf("exit with `FAILURE`: %v", statusCode)
|
return ExitCodeError(statusCode)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/docker/docker/api/types"
|
"github.com/docker/docker/api/types"
|
||||||
"github.com/docker/docker/api/types/container"
|
"github.com/docker/docker/api/types/container"
|
||||||
@@ -23,6 +23,7 @@ import (
|
|||||||
"github.com/sirupsen/logrus/hooks/test"
|
"github.com/sirupsen/logrus/hooks/test"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestDocker(t *testing.T) {
|
func TestDocker(t *testing.T) {
|
||||||
@@ -85,6 +86,11 @@ func (m *mockDockerClient) ContainerExecInspect(ctx context.Context, execID stri
|
|||||||
return args.Get(0).(types.ContainerExecInspect), args.Error(1)
|
return args.Get(0).(types.ContainerExecInspect), args.Error(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *mockDockerClient) ContainerWait(ctx context.Context, containerID string, condition container.WaitCondition) (<-chan container.WaitResponse, <-chan error) {
|
||||||
|
args := m.Called(ctx, containerID, condition)
|
||||||
|
return args.Get(0).(<-chan container.WaitResponse), args.Get(1).(<-chan error)
|
||||||
|
}
|
||||||
|
|
||||||
func (m *mockDockerClient) CopyToContainer(ctx context.Context, id, path string, content io.Reader, options types.CopyToContainerOptions) error {
|
func (m *mockDockerClient) CopyToContainer(ctx context.Context, id, path string, content io.Reader, options types.CopyToContainerOptions) error {
|
||||||
args := m.Called(ctx, id, path, content, options)
|
args := m.Called(ctx, id, path, content, options)
|
||||||
return args.Error(0)
|
return args.Error(0)
|
||||||
@@ -174,12 +180,43 @@ func TestDockerExecFailure(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
err := cr.exec([]string{""}, map[string]string{}, "user", "workdir")(ctx)
|
err := cr.exec([]string{""}, map[string]string{}, "user", "workdir")(ctx)
|
||||||
assert.Error(t, err, "exit with `FAILURE`: 1") //nolint:testifylint // pre-existing issue from nektos/act
|
var exitErr ExitCodeError
|
||||||
|
require.ErrorAs(t, err, &exitErr)
|
||||||
|
assert.Equal(t, ExitCodeError(1), exitErr)
|
||||||
|
assert.Equal(t, "Process completed with exit code 1.", err.Error())
|
||||||
|
|
||||||
conn.AssertExpectations(t)
|
conn.AssertExpectations(t)
|
||||||
client.AssertExpectations(t)
|
client.AssertExpectations(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDockerWaitFailure(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
statusCh := make(chan container.WaitResponse, 1)
|
||||||
|
statusCh <- container.WaitResponse{StatusCode: 2}
|
||||||
|
errCh := make(chan error, 1)
|
||||||
|
|
||||||
|
client := &mockDockerClient{}
|
||||||
|
client.On("ContainerWait", ctx, "123", container.WaitConditionNotRunning).
|
||||||
|
Return((<-chan container.WaitResponse)(statusCh), (<-chan error)(errCh))
|
||||||
|
|
||||||
|
cr := &containerReference{
|
||||||
|
id: "123",
|
||||||
|
cli: client,
|
||||||
|
input: &NewContainerInput{
|
||||||
|
Image: "image",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := cr.wait()(ctx)
|
||||||
|
var exitErr ExitCodeError
|
||||||
|
require.ErrorAs(t, err, &exitErr)
|
||||||
|
assert.Equal(t, ExitCodeError(2), exitErr)
|
||||||
|
assert.Equal(t, "Process completed with exit code 2.", err.Error())
|
||||||
|
|
||||||
|
client.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
func TestDockerCopyTarStream(t *testing.T) {
|
func TestDockerCopyTarStream(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/docker/docker/api/types"
|
"github.com/docker/docker/api/types"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ package container
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/docker/docker/api/types/filters"
|
"github.com/docker/docker/api/types/filters"
|
||||||
"github.com/docker/docker/api/types/volume"
|
"github.com/docker/docker/api/types/volume"
|
||||||
@@ -42,7 +42,7 @@ func NewDockerVolumeRemoveExecutor(volumeName string, force bool) common.Executo
|
|||||||
func removeExecutor(volume string, force bool) common.Executor {
|
func removeExecutor(volume string, force bool) common.Executor {
|
||||||
return func(ctx context.Context) error {
|
return func(ctx context.Context) error {
|
||||||
logger := common.Logger(ctx)
|
logger := common.Logger(ctx)
|
||||||
logger.Debugf("%sdocker volume rm %s", logPrefix, volume)
|
logger.Debugf("docker volume rm %s", volume)
|
||||||
|
|
||||||
if common.Dryrun(ctx) {
|
if common.Dryrun(ctx) {
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -16,12 +16,14 @@ import (
|
|||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/filecollector"
|
"gitea.com/gitea/runner/act/filecollector"
|
||||||
"gitea.com/gitea/act_runner/act/lookpath"
|
"gitea.com/gitea/runner/act/lookpath"
|
||||||
|
|
||||||
"github.com/go-git/go-billy/v5/helper/polyfill"
|
"github.com/go-git/go-billy/v5/helper/polyfill"
|
||||||
"github.com/go-git/go-billy/v5/osfs"
|
"github.com/go-git/go-billy/v5/osfs"
|
||||||
@@ -34,9 +36,15 @@ type HostEnvironment struct {
|
|||||||
TmpDir string
|
TmpDir string
|
||||||
ToolCache string
|
ToolCache string
|
||||||
Workdir string
|
Workdir string
|
||||||
ActPath string
|
// BindWorkdir is true when the app runner mounts the workspace on the host and
|
||||||
CleanUp func()
|
// deletes the task directory after the job; host teardown must not remove Workdir.
|
||||||
StdOut io.Writer
|
BindWorkdir bool
|
||||||
|
ActPath string
|
||||||
|
CleanUp func()
|
||||||
|
StdOut io.Writer
|
||||||
|
|
||||||
|
mu sync.Mutex
|
||||||
|
runningPIDs map[int]struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *HostEnvironment) Create(_, _ []string) common.Executor {
|
func (e *HostEnvironment) Create(_, _ []string) common.Executor {
|
||||||
@@ -344,8 +352,30 @@ func (e *HostEnvironment) exec(ctx context.Context, command []string, cmdline st
|
|||||||
if ppty != nil {
|
if ppty != nil {
|
||||||
go writeKeepAlive(ppty)
|
go writeKeepAlive(ppty)
|
||||||
}
|
}
|
||||||
err = cmd.Run()
|
// Split Start/Wait so the PID can be registered before the process can exit;
|
||||||
|
// cmd.Run() would block until exit, by which time the PID may have been reused.
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if cmd.Process != nil {
|
||||||
|
e.mu.Lock()
|
||||||
|
if e.runningPIDs == nil {
|
||||||
|
e.runningPIDs = map[int]struct{}{}
|
||||||
|
}
|
||||||
|
e.runningPIDs[cmd.Process.Pid] = struct{}{}
|
||||||
|
e.mu.Unlock()
|
||||||
|
defer func(pid int) {
|
||||||
|
e.mu.Lock()
|
||||||
|
delete(e.runningPIDs, pid)
|
||||||
|
e.mu.Unlock()
|
||||||
|
}(cmd.Process.Pid)
|
||||||
|
}
|
||||||
|
err = cmd.Wait()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
var exitErr *exec.ExitError
|
||||||
|
if errors.As(err, &exitErr) {
|
||||||
|
return ExitCodeError(exitErr.ExitCode())
|
||||||
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if tty != nil {
|
if tty != nil {
|
||||||
@@ -385,12 +415,83 @@ func (e *HostEnvironment) UpdateFromEnv(srcPath string, env *map[string]string)
|
|||||||
return parseEnvFile(e, srcPath, env)
|
return parseEnvFile(e, srcPath, env)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func removePathWithRetry(ctx context.Context, path string) error {
|
||||||
|
if path == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
attempts := 1
|
||||||
|
delay := time.Duration(0)
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
attempts = 5
|
||||||
|
delay = 200 * time.Millisecond
|
||||||
|
}
|
||||||
|
var lastErr error
|
||||||
|
for i := 0; i < attempts; i++ {
|
||||||
|
if i > 0 {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return ctx.Err()
|
||||||
|
case <-time.After(delay):
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lastErr = os.RemoveAll(path)
|
||||||
|
if lastErr == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return lastErr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *HostEnvironment) terminateRunningProcesses(ctx context.Context) {
|
||||||
|
if runtime.GOOS != "windows" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
e.mu.Lock()
|
||||||
|
pids := make([]int, 0, len(e.runningPIDs))
|
||||||
|
for pid := range e.runningPIDs {
|
||||||
|
pids = append(pids, pid)
|
||||||
|
}
|
||||||
|
e.mu.Unlock()
|
||||||
|
|
||||||
|
if len(pids) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
logger := common.Logger(ctx)
|
||||||
|
for _, pid := range pids {
|
||||||
|
// Best-effort: forcibly terminate process tree to release file handles
|
||||||
|
// so that workspace cleanup can succeed on Windows.
|
||||||
|
cmd := exec.CommandContext(ctx, "taskkill", "/PID", strconv.Itoa(pid), "/T", "/F")
|
||||||
|
out, err := cmd.CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
logger.Debugf("taskkill failed for pid=%d: %v output=%s", pid, err, strings.TrimSpace(string(out)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (e *HostEnvironment) Remove() common.Executor {
|
func (e *HostEnvironment) Remove() common.Executor {
|
||||||
return func(ctx context.Context) error {
|
return func(ctx context.Context) error {
|
||||||
|
// Ensure any lingering child processes are ended before attempting
|
||||||
|
// to remove the workspace (Windows file locks otherwise prevent cleanup).
|
||||||
|
e.terminateRunningProcesses(ctx)
|
||||||
|
|
||||||
|
// Only removes per-job misc state. Must not remove the cache/toolcache root.
|
||||||
if e.CleanUp != nil {
|
if e.CleanUp != nil {
|
||||||
e.CleanUp()
|
e.CleanUp()
|
||||||
}
|
}
|
||||||
return os.RemoveAll(e.Path)
|
logger := common.Logger(ctx)
|
||||||
|
var errs []error
|
||||||
|
if err := removePathWithRetry(ctx, e.Path); err != nil {
|
||||||
|
logger.Warnf("failed to remove host misc state %s: %v", e.Path, err)
|
||||||
|
errs = append(errs, err)
|
||||||
|
}
|
||||||
|
if !e.BindWorkdir && e.Workdir != "" {
|
||||||
|
if err := removePathWithRetry(ctx, e.Workdir); err != nil {
|
||||||
|
logger.Warnf("failed to remove host workspace %s: %v", e.Workdir, err)
|
||||||
|
errs = append(errs, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return errors.Join(errs...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,9 +11,14 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
|
"github.com/sirupsen/logrus"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Type assert HostEnvironment implements ExecutionsEnvironment
|
// Type assert HostEnvironment implements ExecutionsEnvironment
|
||||||
@@ -69,3 +74,76 @@ func TestGetContainerArchive(t *testing.T) {
|
|||||||
_, err = reader.Next()
|
_, err = reader.Next()
|
||||||
assert.ErrorIs(t, err, io.EOF)
|
assert.ErrorIs(t, err, io.EOF)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestHostEnvironmentExecExitCode(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
t.Skip("uses POSIX shell")
|
||||||
|
}
|
||||||
|
dir := t.TempDir()
|
||||||
|
ctx := context.Background()
|
||||||
|
e := &HostEnvironment{
|
||||||
|
Path: filepath.Join(dir, "path"),
|
||||||
|
TmpDir: filepath.Join(dir, "tmp"),
|
||||||
|
ToolCache: filepath.Join(dir, "tool_cache"),
|
||||||
|
ActPath: filepath.Join(dir, "act_path"),
|
||||||
|
StdOut: io.Discard,
|
||||||
|
Workdir: filepath.Join(dir, "path"),
|
||||||
|
}
|
||||||
|
for _, p := range []string{e.Path, e.TmpDir, e.ToolCache, e.ActPath} {
|
||||||
|
assert.NoError(t, os.MkdirAll(p, 0o700)) //nolint:testifylint // test setup
|
||||||
|
}
|
||||||
|
|
||||||
|
err := e.Exec([]string{"sh", "-c", "exit 3"}, map[string]string{"PATH": os.Getenv("PATH")}, "", "")(ctx)
|
||||||
|
var exitErr ExitCodeError
|
||||||
|
require.ErrorAs(t, err, &exitErr)
|
||||||
|
assert.Equal(t, ExitCodeError(3), exitErr)
|
||||||
|
assert.Equal(t, "Process completed with exit code 3.", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHostEnvironmentRemoveCleansWorkdir(t *testing.T) {
|
||||||
|
logger := logrus.New()
|
||||||
|
ctx := common.WithLogger(context.Background(), logrus.NewEntry(logger))
|
||||||
|
base := t.TempDir()
|
||||||
|
miscRoot := filepath.Join(base, "misc")
|
||||||
|
path := filepath.Join(miscRoot, "hostexecutor")
|
||||||
|
require.NoError(t, os.MkdirAll(path, 0o700))
|
||||||
|
workdir := filepath.Join(base, "workspace", "owner", "repo")
|
||||||
|
require.NoError(t, os.MkdirAll(workdir, 0o700))
|
||||||
|
|
||||||
|
e := &HostEnvironment{
|
||||||
|
Path: path,
|
||||||
|
Workdir: workdir,
|
||||||
|
BindWorkdir: false,
|
||||||
|
CleanUp: func() {
|
||||||
|
_ = os.RemoveAll(miscRoot)
|
||||||
|
},
|
||||||
|
StdOut: os.Stdout,
|
||||||
|
}
|
||||||
|
require.NoError(t, e.Remove()(ctx))
|
||||||
|
_, err := os.Stat(workdir)
|
||||||
|
assert.ErrorIs(t, err, os.ErrNotExist)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHostEnvironmentRemoveSkipsWorkdirWhenBindWorkdir(t *testing.T) {
|
||||||
|
logger := logrus.New()
|
||||||
|
ctx := common.WithLogger(context.Background(), logrus.NewEntry(logger))
|
||||||
|
base := t.TempDir()
|
||||||
|
miscRoot := filepath.Join(base, "misc")
|
||||||
|
path := filepath.Join(miscRoot, "hostexecutor")
|
||||||
|
require.NoError(t, os.MkdirAll(path, 0o700))
|
||||||
|
workdir := filepath.Join(base, "workspace", "123", "owner", "repo")
|
||||||
|
require.NoError(t, os.MkdirAll(workdir, 0o700))
|
||||||
|
|
||||||
|
e := &HostEnvironment{
|
||||||
|
Path: path,
|
||||||
|
Workdir: workdir,
|
||||||
|
BindWorkdir: true,
|
||||||
|
CleanUp: func() {
|
||||||
|
_ = os.RemoveAll(miscRoot)
|
||||||
|
},
|
||||||
|
StdOut: os.Stdout,
|
||||||
|
}
|
||||||
|
require.NoError(t, e.Remove()(ctx))
|
||||||
|
_, err := os.Stat(workdir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
)
|
)
|
||||||
|
|
||||||
func parseEnvFile(e Container, srcPath string, env *map[string]string) common.Executor {
|
func parseEnvFile(e Container, srcPath string, env *map[string]string) common.Executor {
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/go-git/go-git/v5/plumbing/format/gitignore"
|
"github.com/go-git/go-git/v5/plumbing/format/gitignore"
|
||||||
"github.com/rhysd/actionlint"
|
"github.com/rhysd/actionlint"
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import (
|
|||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import (
|
|||||||
"reflect"
|
"reflect"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/rhysd/actionlint"
|
"github.com/rhysd/actionlint"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import (
|
|||||||
"math"
|
"math"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -73,10 +73,16 @@ func (cc *CopyCollector) WriteFile(fpath string, fi fs.FileInfo, linkName string
|
|||||||
if err := os.MkdirAll(filepath.Dir(fdestpath), 0o777); err != nil {
|
if err := os.MkdirAll(filepath.Dir(fdestpath), 0o777); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
// Remove any existing destination so we can overwrite read-only files
|
||||||
|
// (e.g. git pack files at mode 0444 trip EACCES on macOS and "Access is
|
||||||
|
// denied" on Windows when reopened with O_WRONLY) and so os.Symlink does
|
||||||
|
// not fail with EEXIST. os.Remove clears the Windows read-only attribute
|
||||||
|
// internally; on Unix unlink only needs write permission on the parent.
|
||||||
|
_ = os.Remove(fdestpath)
|
||||||
if f == nil {
|
if f == nil {
|
||||||
return os.Symlink(linkName, fdestpath)
|
return os.Symlink(linkName, fdestpath)
|
||||||
}
|
}
|
||||||
df, err := os.OpenFile(fdestpath, os.O_CREATE|os.O_WRONLY, fi.Mode())
|
df, err := os.OpenFile(fdestpath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, fi.Mode())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,9 @@ import (
|
|||||||
"archive/tar"
|
"archive/tar"
|
||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@@ -20,6 +22,7 @@ import (
|
|||||||
"github.com/go-git/go-git/v5/plumbing/format/index"
|
"github.com/go-git/go-git/v5/plumbing/format/index"
|
||||||
"github.com/go-git/go-git/v5/storage/filesystem"
|
"github.com/go-git/go-git/v5/storage/filesystem"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
type memoryFs struct {
|
type memoryFs struct {
|
||||||
@@ -174,3 +177,47 @@ func TestSymlinks(t *testing.T) {
|
|||||||
assert.Equal(t, ".env", files["test.env"].Linkname)
|
assert.Equal(t, ".env", files["test.env"].Linkname)
|
||||||
assert.ErrorIs(t, err, io.EOF, "tar must be read cleanly to EOF")
|
assert.ErrorIs(t, err, io.EOF, "tar must be read cleanly to EOF")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Regression for https://gitea.com/gitea/runner/issues/876 and /941:
|
||||||
|
// re-copying an action directory must overwrite a pre-existing read-only
|
||||||
|
// file (e.g. a git pack .idx at mode 0444) instead of failing with EACCES
|
||||||
|
// on macOS or "Access is denied" on Windows.
|
||||||
|
func TestCopyCollectorWriteFileOverwritesReadOnlyFile(t *testing.T) {
|
||||||
|
dst := t.TempDir()
|
||||||
|
target := filepath.Join(dst, "sub", "pack.idx")
|
||||||
|
require.NoError(t, os.MkdirAll(filepath.Dir(target), 0o755))
|
||||||
|
require.NoError(t, os.WriteFile(target, []byte("old"), 0o444))
|
||||||
|
|
||||||
|
src := filepath.Join(t.TempDir(), "pack.idx")
|
||||||
|
require.NoError(t, os.WriteFile(src, []byte("new"), 0o444))
|
||||||
|
fi, err := os.Stat(src)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
cc := &CopyCollector{DstDir: dst}
|
||||||
|
require.NoError(t, cc.WriteFile("sub/pack.idx", fi, "", strings.NewReader("new")))
|
||||||
|
|
||||||
|
got, err := os.ReadFile(target)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "new", string(got))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Without the destination removal, os.Symlink fails with EEXIST when the
|
||||||
|
// path already holds a regular file from an earlier copy of the action.
|
||||||
|
func TestCopyCollectorWriteFileOverwritesFileWithSymlink(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
t.Skip("creating symlinks requires elevated privileges on Windows")
|
||||||
|
}
|
||||||
|
dst := t.TempDir()
|
||||||
|
target := filepath.Join(dst, "link")
|
||||||
|
require.NoError(t, os.WriteFile(target, []byte("stale"), 0o644))
|
||||||
|
|
||||||
|
fi, err := os.Lstat(target)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
cc := &CopyCollector{DstDir: dst}
|
||||||
|
require.NoError(t, cc.WriteFile("link", fi, "target", nil))
|
||||||
|
|
||||||
|
resolved, err := os.Readlink(target)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "target", resolved)
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/common/git"
|
"gitea.com/gitea/runner/act/common/git"
|
||||||
)
|
)
|
||||||
|
|
||||||
type GithubContext struct {
|
type GithubContext struct {
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ jobs:
|
|||||||
with-volumes:
|
with-volumes:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: node:16-buster-slim
|
image: node:24-bookworm-slim
|
||||||
volumes:
|
volumes:
|
||||||
- my_docker_volume:/path/to/volume
|
- my_docker_volume:/path/to/volume
|
||||||
- /path/to/nonexist/directory
|
- /path/to/nonexist/directory
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"go.yaml.in/yaml/v4"
|
"go.yaml.in/yaml/v4"
|
||||||
|
|||||||
@@ -18,9 +18,10 @@ import (
|
|||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/common/git"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/container"
|
||||||
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/kballard/go-shellquote"
|
"github.com/kballard/go-shellquote"
|
||||||
)
|
)
|
||||||
@@ -44,6 +45,11 @@ type runAction func(step actionStep, actionDir string, remoteAction *remoteActio
|
|||||||
//go:embed res/trampoline.js
|
//go:embed res/trampoline.js
|
||||||
var trampoline embed.FS
|
var trampoline embed.FS
|
||||||
|
|
||||||
|
var (
|
||||||
|
ContainerImageExistsLocally = container.ImageExistsLocally
|
||||||
|
ContainerNewDockerBuildExecutor = container.NewDockerBuildExecutor
|
||||||
|
)
|
||||||
|
|
||||||
func readActionImpl(ctx context.Context, step *model.Step, actionDir, actionPath string, readFile actionYamlReader, writeFile fileWriter) (*model.Action, error) {
|
func readActionImpl(ctx context.Context, step *model.Step, actionDir, actionPath string, readFile actionYamlReader, writeFile fileWriter) (*model.Action, error) {
|
||||||
logger := common.Logger(ctx)
|
logger := common.Logger(ctx)
|
||||||
allErrors := []error{}
|
allErrors := []error{}
|
||||||
@@ -148,6 +154,8 @@ func maybeCopyToActionDir(ctx context.Context, step actionStep, actionDir, actio
|
|||||||
return rc.JobContainer.CopyTarStream(ctx, containerActionDirCopy, ta)
|
return rc.JobContainer.CopyTarStream(ctx, containerActionDirCopy, ta)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
defer git.AcquireCloneLock(actionDir)()
|
||||||
|
|
||||||
if err := removeGitIgnore(ctx, actionDir); err != nil {
|
if err := removeGitIgnore(ctx, actionDir); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -197,7 +205,7 @@ func runActionImpl(step actionStep, actionDir string, remoteAction *remoteAction
|
|||||||
if remoteAction == nil {
|
if remoteAction == nil {
|
||||||
location = containerActionDir
|
location = containerActionDir
|
||||||
}
|
}
|
||||||
return execAsDocker(ctx, step, actionName, location, remoteAction == nil)
|
return execAsDocker(ctx, step, actionName, actionDir, location, remoteAction == nil)
|
||||||
case x.IsComposite():
|
case x.IsComposite():
|
||||||
if err := maybeCopyToActionDir(ctx, step, actionDir, actionPath, containerActionDir); err != nil {
|
if err := maybeCopyToActionDir(ctx, step, actionDir, actionPath, containerActionDir); err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -265,7 +273,7 @@ func removeGitIgnore(ctx context.Context, directory string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: break out parts of function to reduce complexicity
|
// TODO: break out parts of function to reduce complexicity
|
||||||
func execAsDocker(ctx context.Context, step actionStep, actionName, basedir string, localAction bool) error {
|
func execAsDocker(ctx context.Context, step actionStep, actionName, actionDir, basedir string, localAction bool) error {
|
||||||
logger := common.Logger(ctx)
|
logger := common.Logger(ctx)
|
||||||
rc := step.getRunContext()
|
rc := step.getRunContext()
|
||||||
action := step.getActionModel()
|
action := step.getActionModel()
|
||||||
@@ -284,12 +292,12 @@ func execAsDocker(ctx context.Context, step actionStep, actionName, basedir stri
|
|||||||
image = strings.ToLower(image)
|
image = strings.ToLower(image)
|
||||||
contextDir, fileName := filepath.Split(filepath.Join(basedir, action.Runs.Image))
|
contextDir, fileName := filepath.Split(filepath.Join(basedir, action.Runs.Image))
|
||||||
|
|
||||||
anyArchExists, err := container.ImageExistsLocally(ctx, image, "any")
|
anyArchExists, err := ContainerImageExistsLocally(ctx, image, "any")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
correctArchExists, err := container.ImageExistsLocally(ctx, image, rc.Config.ContainerArchitecture)
|
correctArchExists, err := ContainerImageExistsLocally(ctx, image, rc.Config.ContainerArchitecture)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -321,13 +329,21 @@ func execAsDocker(ctx context.Context, step actionStep, actionName, basedir stri
|
|||||||
}
|
}
|
||||||
defer buildContext.Close()
|
defer buildContext.Close()
|
||||||
}
|
}
|
||||||
prepImage = container.NewDockerBuildExecutor(container.NewDockerBuildExecutorInput{
|
prepImage = ContainerNewDockerBuildExecutor(container.NewDockerBuildExecutorInput{
|
||||||
ContextDir: contextDir,
|
ContextDir: contextDir,
|
||||||
Dockerfile: fileName,
|
Dockerfile: fileName,
|
||||||
ImageTag: image,
|
ImageTag: image,
|
||||||
BuildContext: buildContext,
|
BuildContext: buildContext,
|
||||||
Platform: rc.Config.ContainerArchitecture,
|
Platform: rc.Config.ContainerArchitecture,
|
||||||
})
|
})
|
||||||
|
if buildContext == nil {
|
||||||
|
// Held across the whole build: the daemon drains contextDir lazily.
|
||||||
|
inner := prepImage
|
||||||
|
prepImage = func(ctx context.Context) error {
|
||||||
|
defer git.AcquireCloneLock(actionDir)()
|
||||||
|
return inner(ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.Debugf("image '%s' for architecture '%s' already exists", image, rc.Config.ContainerArchitecture)
|
logger.Debugf("image '%s' for architecture '%s' already exists", image, rc.Config.ContainerArchitecture)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
func evaluateCompositeInputAndEnv(ctx context.Context, parent *RunContext, step actionStep) map[string]string {
|
func evaluateCompositeInputAndEnv(ctx context.Context, parent *RunContext, step actionStep) map[string]string {
|
||||||
|
|||||||
@@ -9,9 +9,14 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
"gitea.com/gitea/runner/act/common/git"
|
||||||
|
"gitea.com/gitea/runner/act/container"
|
||||||
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
@@ -252,3 +257,153 @@ func TestActionRunner(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestMaybeCopyToActionDirHoldsCloneLock(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
actionDir := t.TempDir()
|
||||||
|
|
||||||
|
releaseCopy := make(chan struct{})
|
||||||
|
release := sync.OnceFunc(func() { close(releaseCopy) })
|
||||||
|
defer release()
|
||||||
|
|
||||||
|
copyEntered := make(chan struct{})
|
||||||
|
|
||||||
|
cm := &containerMock{}
|
||||||
|
cm.On("CopyDir", "/var/run/act/actions/", actionDir+"/", false).Return(func(ctx context.Context) error {
|
||||||
|
close(copyEntered)
|
||||||
|
<-releaseCopy
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
step := &stepActionRemote{
|
||||||
|
Step: &model.Step{Uses: "remote/action@v1"},
|
||||||
|
RunContext: &RunContext{
|
||||||
|
Config: &Config{},
|
||||||
|
JobContainer: cm,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
copyDone := make(chan error, 1)
|
||||||
|
go func() {
|
||||||
|
copyDone <- maybeCopyToActionDir(ctx, step, actionDir, "", "/var/run/act/actions/")
|
||||||
|
}()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-copyEntered:
|
||||||
|
case err := <-copyDone:
|
||||||
|
t.Fatalf("maybeCopyToActionDir returned before CopyDir was entered: %v", err)
|
||||||
|
case <-time.After(time.Second):
|
||||||
|
t.Fatal("CopyDir was not entered within 1 second")
|
||||||
|
}
|
||||||
|
|
||||||
|
peerAcquired := make(chan struct{})
|
||||||
|
go func() {
|
||||||
|
unlock := git.AcquireCloneLock(actionDir)
|
||||||
|
close(peerAcquired)
|
||||||
|
unlock()
|
||||||
|
}()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-peerAcquired:
|
||||||
|
t.Fatal("peer AcquireCloneLock returned while CopyDir was running")
|
||||||
|
case <-time.After(50 * time.Millisecond):
|
||||||
|
}
|
||||||
|
|
||||||
|
release()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case err := <-copyDone:
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("maybeCopyToActionDir returned error: %v", err)
|
||||||
|
}
|
||||||
|
case <-time.After(time.Second):
|
||||||
|
t.Fatal("maybeCopyToActionDir did not return after CopyDir was unblocked")
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-peerAcquired:
|
||||||
|
case <-time.After(time.Second):
|
||||||
|
t.Fatal("peer AcquireCloneLock did not proceed after lock released")
|
||||||
|
}
|
||||||
|
|
||||||
|
cm.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExecAsDockerHoldsCloneLockForRemoteUncached(t *testing.T) {
|
||||||
|
actionDir := t.TempDir()
|
||||||
|
|
||||||
|
unlockOnce := sync.OnceFunc(git.AcquireCloneLock(actionDir))
|
||||||
|
defer unlockOnce()
|
||||||
|
|
||||||
|
innerEntered := make(chan struct{})
|
||||||
|
releaseInner := make(chan struct{})
|
||||||
|
releaseOnce := sync.OnceFunc(func() { close(releaseInner) })
|
||||||
|
defer releaseOnce()
|
||||||
|
|
||||||
|
origImageExists := ContainerImageExistsLocally
|
||||||
|
ContainerImageExistsLocally = func(_ context.Context, _, _ string) (bool, error) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
defer func() { ContainerImageExistsLocally = origImageExists }()
|
||||||
|
|
||||||
|
origBuildExec := ContainerNewDockerBuildExecutor
|
||||||
|
ContainerNewDockerBuildExecutor = func(_ container.NewDockerBuildExecutorInput) common.Executor {
|
||||||
|
return func(_ context.Context) error {
|
||||||
|
close(innerEntered)
|
||||||
|
<-releaseInner
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
defer func() { ContainerNewDockerBuildExecutor = origBuildExec }()
|
||||||
|
|
||||||
|
step := &stepActionRemote{
|
||||||
|
Step: &model.Step{ID: "1", Uses: "remote/action@v1", With: map[string]string{}},
|
||||||
|
RunContext: &RunContext{
|
||||||
|
Config: &Config{},
|
||||||
|
Run: &model.Run{
|
||||||
|
JobID: "1",
|
||||||
|
Workflow: &model.Workflow{
|
||||||
|
Name: "wf",
|
||||||
|
Jobs: map[string]*model.Job{"1": {}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
JobContainer: &containerMock{},
|
||||||
|
},
|
||||||
|
action: &model.Action{Runs: model.ActionRuns{Using: "docker", Image: "Dockerfile"}},
|
||||||
|
env: map[string]string{},
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
done := make(chan error, 1)
|
||||||
|
go func() { done <- execAsDocker(ctx, step, "test-action", actionDir, actionDir, false) }()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-innerEntered:
|
||||||
|
t.Fatal("inner build executor ran before clone lock was released")
|
||||||
|
case err := <-done:
|
||||||
|
t.Fatalf("execAsDocker returned before inner was entered: %v", err)
|
||||||
|
case <-time.After(50 * time.Millisecond):
|
||||||
|
}
|
||||||
|
|
||||||
|
unlockOnce()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-innerEntered:
|
||||||
|
case err := <-done:
|
||||||
|
t.Fatalf("execAsDocker returned without entering inner: %v", err)
|
||||||
|
case <-time.After(time.Second):
|
||||||
|
t.Fatal("inner build executor not entered after lock released")
|
||||||
|
}
|
||||||
|
|
||||||
|
cancel()
|
||||||
|
releaseOnce()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-done:
|
||||||
|
case <-time.After(time.Second):
|
||||||
|
t.Fatal("execAsDocker did not return after inner was released and ctx was canceled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
)
|
)
|
||||||
|
|
||||||
var commandPatternGA *regexp.Regexp
|
var commandPatternGA *regexp.Regexp
|
||||||
@@ -120,7 +120,7 @@ func (rc *RunContext) setOutput(ctx context.Context, kvPairs map[string]string,
|
|||||||
|
|
||||||
result, ok := rc.StepResults[stepID]
|
result, ok := rc.StepResults[stepID]
|
||||||
if !ok {
|
if !ok {
|
||||||
logger.Infof(" \U00002757 no outputs used step '%s'", stepID)
|
logger.Infof("No outputs registered for step '%s'", stepID)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/sirupsen/logrus/hooks/test"
|
"github.com/sirupsen/logrus/hooks/test"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|||||||
@@ -8,8 +8,8 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/container"
|
||||||
|
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -15,10 +15,10 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/container"
|
||||||
"gitea.com/gitea/act_runner/act/exprparser"
|
"gitea.com/gitea/runner/act/exprparser"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
_ "embed"
|
_ "embed"
|
||||||
|
|
||||||
|
|||||||
@@ -8,8 +8,8 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/exprparser"
|
"gitea.com/gitea/runner/act/exprparser"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
assert "github.com/stretchr/testify/assert"
|
assert "github.com/stretchr/testify/assert"
|
||||||
yaml "go.yaml.in/yaml/v4"
|
yaml "go.yaml.in/yaml/v4"
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
type jobInfo interface {
|
type jobInfo interface {
|
||||||
@@ -24,6 +24,13 @@ type jobInfo interface {
|
|||||||
result(result string)
|
result(result string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// reportStepError emits the GitHub Actions ##[error] annotation and records
|
||||||
|
// the error against the job so the job is reported as failed.
|
||||||
|
func reportStepError(ctx context.Context, err error) {
|
||||||
|
common.Logger(ctx).Errorf("##[error]%v", err)
|
||||||
|
common.SetJobError(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
func newJobExecutor(info jobInfo, sf stepFactory, rc *RunContext) common.Executor {
|
func newJobExecutor(info jobInfo, sf stepFactory, rc *RunContext) common.Executor {
|
||||||
steps := make([]common.Executor, 0)
|
steps := make([]common.Executor, 0)
|
||||||
preSteps := make([]common.Executor, 0)
|
preSteps := make([]common.Executor, 0)
|
||||||
@@ -32,7 +39,7 @@ func newJobExecutor(info jobInfo, sf stepFactory, rc *RunContext) common.Executo
|
|||||||
steps = append(steps, func(ctx context.Context) error {
|
steps = append(steps, func(ctx context.Context) error {
|
||||||
logger := common.Logger(ctx)
|
logger := common.Logger(ctx)
|
||||||
if len(info.matrix()) > 0 {
|
if len(info.matrix()) > 0 {
|
||||||
logger.Infof("\U0001F9EA Matrix: %v", info.matrix())
|
logger.Infof("Matrix: %v", info.matrix())
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
@@ -75,33 +82,36 @@ func newJobExecutor(info jobInfo, sf stepFactory, rc *RunContext) common.Executo
|
|||||||
|
|
||||||
preExec := step.pre()
|
preExec := step.pre()
|
||||||
preSteps = append(preSteps, useStepLogger(rc, stepModel, stepStagePre, func(ctx context.Context) error {
|
preSteps = append(preSteps, useStepLogger(rc, stepModel, stepStagePre, func(ctx context.Context) error {
|
||||||
logger := common.Logger(ctx)
|
|
||||||
preErr := preExec(ctx)
|
preErr := preExec(ctx)
|
||||||
if preErr != nil {
|
if preErr != nil {
|
||||||
logger.Errorf("%v", preErr)
|
reportStepError(ctx, preErr)
|
||||||
common.SetJobError(ctx, preErr)
|
|
||||||
} else if ctx.Err() != nil {
|
} else if ctx.Err() != nil {
|
||||||
logger.Errorf("%v", ctx.Err())
|
reportStepError(ctx, ctx.Err())
|
||||||
common.SetJobError(ctx, ctx.Err())
|
|
||||||
}
|
}
|
||||||
return preErr
|
return preErr
|
||||||
}))
|
}))
|
||||||
|
|
||||||
stepExec := step.main()
|
stepExec := step.main()
|
||||||
steps = append(steps, useStepLogger(rc, stepModel, stepStageMain, func(ctx context.Context) error {
|
steps = append(steps, useStepLogger(rc, stepModel, stepStageMain, func(ctx context.Context) error {
|
||||||
logger := common.Logger(ctx)
|
|
||||||
err := stepExec(ctx)
|
err := stepExec(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("%v", err)
|
reportStepError(ctx, err)
|
||||||
common.SetJobError(ctx, err)
|
|
||||||
} else if ctx.Err() != nil {
|
} else if ctx.Err() != nil {
|
||||||
logger.Errorf("%v", ctx.Err())
|
reportStepError(ctx, ctx.Err())
|
||||||
common.SetJobError(ctx, ctx.Err())
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}))
|
}))
|
||||||
|
|
||||||
postExec := useStepLogger(rc, stepModel, stepStagePost, step.post())
|
postFn := step.post()
|
||||||
|
postExec := useStepLogger(rc, stepModel, stepStagePost, func(ctx context.Context) error {
|
||||||
|
err := postFn(ctx)
|
||||||
|
if err != nil {
|
||||||
|
reportStepError(ctx, err)
|
||||||
|
} else if ctx.Err() != nil {
|
||||||
|
reportStepError(ctx, ctx.Err())
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
})
|
||||||
if postExecutor != nil {
|
if postExecutor != nil {
|
||||||
// run the post executor in reverse order
|
// run the post executor in reverse order
|
||||||
postExecutor = postExec.Finally(postExecutor)
|
postExecutor = postExec.Finally(postExecutor)
|
||||||
@@ -136,7 +146,7 @@ func newJobExecutor(info jobInfo, sf stepFactory, rc *RunContext) common.Executo
|
|||||||
// if !rc.IsHostEnv(ctx) && rc.Config.ContainerNetworkMode == "" {
|
// if !rc.IsHostEnv(ctx) && rc.Config.ContainerNetworkMode == "" {
|
||||||
// // clean network in docker mode only
|
// // clean network in docker mode only
|
||||||
// // if the value of `ContainerNetworkMode` is empty string,
|
// // if the value of `ContainerNetworkMode` is empty string,
|
||||||
// // it means that the network to which containers are connecting is created by `act_runner`,
|
// // it means that the network to which containers are connecting is created by `runner`,
|
||||||
// // so, we should remove the network at last.
|
// // so, we should remove the network at last.
|
||||||
// networkName, _ := rc.networkName()
|
// networkName, _ := rc.networkName()
|
||||||
// logger.Infof("Cleaning up network for job %s, and network name is: %s", rc.JobName, networkName)
|
// logger.Infof("Cleaning up network for job %s, and network name is: %s", rc.JobName, networkName)
|
||||||
@@ -196,7 +206,7 @@ func setJobResult(ctx context.Context, info jobInfo, rc *RunContext, success boo
|
|||||||
jobResultMessage = "failed"
|
jobResultMessage = "failed"
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.WithField("jobResult", jobResult).Infof("\U0001F3C1 Job %s", jobResultMessage)
|
logger.WithField("jobResult", jobResult).Infof("Job %s", jobResultMessage)
|
||||||
}
|
}
|
||||||
|
|
||||||
func setJobOutputs(ctx context.Context, rc *RunContext) {
|
func setJobOutputs(ctx context.Context, rc *RunContext) {
|
||||||
|
|||||||
@@ -12,9 +12,9 @@ import (
|
|||||||
"slices"
|
"slices"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/container"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import (
|
|||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/filecollector"
|
"gitea.com/gitea/runner/act/filecollector"
|
||||||
)
|
)
|
||||||
|
|
||||||
type LocalRepositoryCache struct {
|
type LocalRepositoryCache struct {
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
|
|
||||||
"github.com/sirupsen/logrus"
|
"github.com/sirupsen/logrus"
|
||||||
"golang.org/x/term"
|
"golang.org/x/term"
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ package runner
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"go.yaml.in/yaml/v4"
|
"go.yaml.in/yaml/v4"
|
||||||
|
|||||||
@@ -7,19 +7,15 @@ package runner
|
|||||||
import (
|
import (
|
||||||
"archive/tar"
|
"archive/tar"
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/fs"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
|
||||||
"path"
|
"path"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/common/git"
|
"gitea.com/gitea/runner/act/common/git"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
func newLocalReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
func newLocalReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
||||||
@@ -51,7 +47,7 @@ func newLocalReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
|||||||
token := rc.Config.GetToken()
|
token := rc.Config.GetToken()
|
||||||
|
|
||||||
return common.NewPipelineExecutor(
|
return common.NewPipelineExecutor(
|
||||||
newMutexExecutor(cloneIfRequired(rc, *remoteReusableWorkflow, workflowDir, token)),
|
cloneRemoteReusableWorkflow(rc, remoteReusableWorkflow.CloneURL(), remoteReusableWorkflow.Ref, workflowDir, token),
|
||||||
newReusableWorkflowExecutor(rc, workflowDir, remoteReusableWorkflow.FilePath()),
|
newReusableWorkflowExecutor(rc, workflowDir, remoteReusableWorkflow.FilePath()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -85,7 +81,7 @@ func newRemoteReusableWorkflowExecutor(rc *RunContext) common.Executor {
|
|||||||
token := getGitCloneToken(rc.Config, remoteReusableWorkflow.CloneURL())
|
token := getGitCloneToken(rc.Config, remoteReusableWorkflow.CloneURL())
|
||||||
|
|
||||||
return common.NewPipelineExecutor(
|
return common.NewPipelineExecutor(
|
||||||
newMutexExecutor(cloneIfRequired(rc, *remoteReusableWorkflow, workflowDir, token)),
|
cloneRemoteReusableWorkflow(rc, remoteReusableWorkflow.CloneURL(), remoteReusableWorkflow.Ref, workflowDir, token),
|
||||||
newReusableWorkflowExecutor(rc, workflowDir, remoteReusableWorkflow.FilePath()),
|
newReusableWorkflowExecutor(rc, workflowDir, remoteReusableWorkflow.FilePath()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -125,46 +121,37 @@ func newActionCacheReusableWorkflowExecutor(rc *RunContext, filename string, rem
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var executorLock sync.Mutex
|
// cloneRemoteReusableWorkflow always invokes the clone executor — moving refs
|
||||||
|
// (branches, tags) must be re-resolved each run, matching GitHub Actions.
|
||||||
func newMutexExecutor(executor common.Executor) common.Executor {
|
//
|
||||||
|
// Callers must not change remoteReusableWorkflow.URL, because:
|
||||||
|
// 1. Gitea doesn't support specifying GithubContext.ServerURL by the GITHUB_SERVER_URL env
|
||||||
|
// 2. Gitea has already full URL with rc.Config.GitHubInstance when calling newRemoteReusableWorkflowWithPlat
|
||||||
|
//
|
||||||
|
// remoteReusableWorkflow.URL = rc.getGithubContext(ctx).ServerURL
|
||||||
|
func cloneRemoteReusableWorkflow(rc *RunContext, cloneURL, ref, targetDirectory, token string) common.Executor {
|
||||||
return func(ctx context.Context) error {
|
return func(ctx context.Context) error {
|
||||||
executorLock.Lock()
|
cloneURL = rc.NewExpressionEvaluator(ctx).Interpolate(ctx, cloneURL)
|
||||||
defer executorLock.Unlock()
|
return git.NewGitCloneExecutor(git.NewGitCloneExecutorInput{
|
||||||
|
URL: cloneURL,
|
||||||
return executor(ctx)
|
Ref: ref,
|
||||||
|
Dir: targetDirectory,
|
||||||
|
Token: token,
|
||||||
|
OfflineMode: rc.Config.ActionOfflineMode,
|
||||||
|
})(ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func cloneIfRequired(rc *RunContext, remoteReusableWorkflow remoteReusableWorkflow, targetDirectory, token string) common.Executor {
|
var modelNewWorkflowPlanner = model.NewWorkflowPlanner
|
||||||
return common.NewConditionalExecutor(
|
|
||||||
func(ctx context.Context) bool {
|
|
||||||
_, err := os.Stat(targetDirectory)
|
|
||||||
notExists := errors.Is(err, fs.ErrNotExist)
|
|
||||||
return notExists
|
|
||||||
},
|
|
||||||
func(ctx context.Context) error {
|
|
||||||
// interpolate the cloneURL
|
|
||||||
cloneURL := rc.NewExpressionEvaluator(ctx).Interpolate(ctx, remoteReusableWorkflow.CloneURL())
|
|
||||||
// Do not change the remoteReusableWorkflow.URL, because:
|
|
||||||
// 1. Gitea doesn't support specifying GithubContext.ServerURL by the GITHUB_SERVER_URL env
|
|
||||||
// 2. Gitea has already full URL with rc.Config.GitHubInstance when calling newRemoteReusableWorkflowWithPlat
|
|
||||||
// remoteReusableWorkflow.URL = rc.getGithubContext(ctx).ServerURL
|
|
||||||
return git.NewGitCloneExecutor(git.NewGitCloneExecutorInput{
|
|
||||||
URL: cloneURL,
|
|
||||||
Ref: remoteReusableWorkflow.Ref,
|
|
||||||
Dir: targetDirectory,
|
|
||||||
Token: token,
|
|
||||||
OfflineMode: rc.Config.ActionOfflineMode,
|
|
||||||
})(ctx)
|
|
||||||
},
|
|
||||||
nil,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func newReusableWorkflowExecutor(rc *RunContext, directory, workflow string) common.Executor {
|
func newReusableWorkflowExecutor(rc *RunContext, directory, workflow string) common.Executor {
|
||||||
return func(ctx context.Context) error {
|
return func(ctx context.Context) error {
|
||||||
planner, err := model.NewWorkflowPlanner(path.Join(directory, workflow), true)
|
// Scoped to the yaml read so concurrent invocations don't serialize
|
||||||
|
// on the whole job run.
|
||||||
|
planner, err := func() (model.WorkflowPlanner, error) {
|
||||||
|
defer git.AcquireCloneLock(directory)()
|
||||||
|
return modelNewWorkflowPlanner(path.Join(directory, workflow), true)
|
||||||
|
}()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -298,7 +285,7 @@ func setReusedWorkflowCallerResult(rc *RunContext, runner Runner) common.Executo
|
|||||||
rc.caller.setReusedWorkflowJobResult(rc.JobName, reusedWorkflowJobResult)
|
rc.caller.setReusedWorkflowJobResult(rc.JobName, reusedWorkflowJobResult)
|
||||||
} else {
|
} else {
|
||||||
rc.result(reusedWorkflowJobResult)
|
rc.result(reusedWorkflowJobResult)
|
||||||
logger.WithField("jobResult", reusedWorkflowJobResult).Infof("\U0001F3C1 Job %s", reusedWorkflowJobResultMessage)
|
logger.WithField("jobResult", reusedWorkflowJobResult).Infof("Job %s", reusedWorkflowJobResultMessage)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
134
act/runner/reusable_workflow_test.go
Normal file
134
act/runner/reusable_workflow_test.go
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
// Copyright 2026 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package runner
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"sync"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gitea.com/gitea/runner/act/common/git"
|
||||||
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Regression test for go-gitea/gitea#37483: a remote reusable workflow at a moving
|
||||||
|
// ref (branch/tag) must reflect the new tip on every invocation, not stay pinned
|
||||||
|
// to the cache populated on the first run.
|
||||||
|
func TestReusableWorkflowCachedBranchRefRefreshes(t *testing.T) {
|
||||||
|
if _, err := exec.LookPath("git"); err != nil {
|
||||||
|
t.Skip("git not available in PATH")
|
||||||
|
}
|
||||||
|
|
||||||
|
remoteDir := t.TempDir()
|
||||||
|
gitMust(t, "", "init", "--bare", "--initial-branch=master", remoteDir)
|
||||||
|
|
||||||
|
workDir := t.TempDir()
|
||||||
|
gitMust(t, "", "clone", remoteDir, workDir)
|
||||||
|
gitMust(t, workDir, "config", "user.email", "test@test")
|
||||||
|
gitMust(t, workDir, "config", "user.name", "test")
|
||||||
|
gitMust(t, workDir, "checkout", "-b", "master")
|
||||||
|
|
||||||
|
const workflowPath = ".gitea/workflows/reusable.yml"
|
||||||
|
tmpl := func(tag string) string {
|
||||||
|
return "name: reusable\non:\n workflow_call:\njobs:\n build:\n runs-on: ubuntu-latest\n steps:\n - run: echo " + tag + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, os.MkdirAll(filepath.Join(workDir, ".gitea/workflows"), 0o755))
|
||||||
|
require.NoError(t, os.WriteFile(filepath.Join(workDir, workflowPath), []byte(tmpl("v1")), 0o644))
|
||||||
|
gitMust(t, workDir, "add", workflowPath)
|
||||||
|
gitMust(t, workDir, "commit", "-m", "v1")
|
||||||
|
gitMust(t, workDir, "push", "-u", "origin", "master")
|
||||||
|
|
||||||
|
rc := &RunContext{
|
||||||
|
Config: &Config{},
|
||||||
|
Run: &model.Run{
|
||||||
|
JobID: "j1",
|
||||||
|
Workflow: &model.Workflow{
|
||||||
|
Name: "wf",
|
||||||
|
Jobs: map[string]*model.Job{"j1": {}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
cacheDir := t.TempDir()
|
||||||
|
|
||||||
|
require.NoError(t, cloneRemoteReusableWorkflow(rc, remoteDir, "master", cacheDir, "")(context.Background()))
|
||||||
|
got, err := os.ReadFile(filepath.Join(cacheDir, workflowPath))
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, tmpl("v1"), string(got))
|
||||||
|
|
||||||
|
// Branch tip moves; cache key (cacheDir) does not.
|
||||||
|
require.NoError(t, os.WriteFile(filepath.Join(workDir, workflowPath), []byte(tmpl("v2")), 0o644))
|
||||||
|
gitMust(t, workDir, "commit", "-am", "v2")
|
||||||
|
gitMust(t, workDir, "push", "origin", "master")
|
||||||
|
|
||||||
|
require.NoError(t, cloneRemoteReusableWorkflow(rc, remoteDir, "master", cacheDir, "")(context.Background()))
|
||||||
|
got, err = os.ReadFile(filepath.Join(cacheDir, workflowPath))
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, tmpl("v2"), string(got), "cached workflow file must reflect the updated branch tip")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewReusableWorkflowExecutorHoldsCloneLock(t *testing.T) {
|
||||||
|
workflowDir := t.TempDir()
|
||||||
|
|
||||||
|
unlockOnce := sync.OnceFunc(git.AcquireCloneLock(workflowDir))
|
||||||
|
defer unlockOnce()
|
||||||
|
|
||||||
|
plannerCalled := make(chan struct{})
|
||||||
|
|
||||||
|
origPlanner := modelNewWorkflowPlanner
|
||||||
|
modelNewWorkflowPlanner = func(string, bool) (model.WorkflowPlanner, error) {
|
||||||
|
close(plannerCalled)
|
||||||
|
return nil, errors.New("stop")
|
||||||
|
}
|
||||||
|
defer func() { modelNewWorkflowPlanner = origPlanner }()
|
||||||
|
|
||||||
|
rc := &RunContext{
|
||||||
|
Config: &Config{},
|
||||||
|
Run: &model.Run{Workflow: &model.Workflow{Jobs: map[string]*model.Job{}}},
|
||||||
|
}
|
||||||
|
exec := newReusableWorkflowExecutor(rc, workflowDir, "reusable.yml")
|
||||||
|
|
||||||
|
done := make(chan error, 1)
|
||||||
|
go func() { done <- exec(context.Background()) }()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-plannerCalled:
|
||||||
|
t.Fatal("planner ran while clone lock was held")
|
||||||
|
case err := <-done:
|
||||||
|
t.Fatalf("executor returned before planner was reached: %v", err)
|
||||||
|
case <-time.After(50 * time.Millisecond):
|
||||||
|
}
|
||||||
|
|
||||||
|
unlockOnce()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-plannerCalled:
|
||||||
|
case <-time.After(time.Second):
|
||||||
|
t.Fatal("planner not called after lock was released")
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case err := <-done:
|
||||||
|
require.Error(t, err)
|
||||||
|
case <-time.After(time.Second):
|
||||||
|
t.Fatal("executor did not return after planner ran")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func gitMust(t *testing.T, dir string, args ...string) {
|
||||||
|
t.Helper()
|
||||||
|
cmd := exec.Command("git", args...)
|
||||||
|
if dir != "" {
|
||||||
|
cmd.Dir = dir
|
||||||
|
}
|
||||||
|
out, err := cmd.CombinedOutput()
|
||||||
|
require.NoError(t, err, "git %v: %s", args, string(out))
|
||||||
|
}
|
||||||
@@ -23,10 +23,10 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/container"
|
||||||
"gitea.com/gitea/act_runner/act/exprparser"
|
"gitea.com/gitea/runner/act/exprparser"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/docker/go-connections/nat"
|
"github.com/docker/go-connections/nat"
|
||||||
"github.com/opencontainers/selinux/go-selinux"
|
"github.com/opencontainers/selinux/go-selinux"
|
||||||
@@ -193,7 +193,7 @@ func (rc *RunContext) GetBindsAndMounts() ([]string, map[string]string) {
|
|||||||
func (rc *RunContext) startHostEnvironment() common.Executor {
|
func (rc *RunContext) startHostEnvironment() common.Executor {
|
||||||
return func(ctx context.Context) error {
|
return func(ctx context.Context) error {
|
||||||
logger := common.Logger(ctx)
|
logger := common.Logger(ctx)
|
||||||
rawLogger := logger.WithField("raw_output", true)
|
rawLogger := logger.WithField(rawOutputField, true)
|
||||||
logWriter := common.NewLineWriter(rc.commandHandler(ctx), func(s string) bool {
|
logWriter := common.NewLineWriter(rc.commandHandler(ctx), func(s string) bool {
|
||||||
if rc.Config.LogOutput {
|
if rc.Config.LogOutput {
|
||||||
rawLogger.Infof("%s", s)
|
rawLogger.Infof("%s", s)
|
||||||
@@ -220,11 +220,12 @@ func (rc *RunContext) startHostEnvironment() common.Executor {
|
|||||||
}
|
}
|
||||||
toolCache := filepath.Join(cacheDir, "tool_cache")
|
toolCache := filepath.Join(cacheDir, "tool_cache")
|
||||||
rc.JobContainer = &container.HostEnvironment{
|
rc.JobContainer = &container.HostEnvironment{
|
||||||
Path: path,
|
Path: path,
|
||||||
TmpDir: runnerTmp,
|
TmpDir: runnerTmp,
|
||||||
ToolCache: toolCache,
|
ToolCache: toolCache,
|
||||||
Workdir: rc.Config.Workdir,
|
Workdir: rc.Config.Workdir,
|
||||||
ActPath: actPath,
|
BindWorkdir: rc.Config.BindWorkdir,
|
||||||
|
ActPath: actPath,
|
||||||
CleanUp: func() {
|
CleanUp: func() {
|
||||||
os.RemoveAll(miscpath)
|
os.RemoveAll(miscpath)
|
||||||
},
|
},
|
||||||
@@ -259,11 +260,24 @@ func (rc *RunContext) startHostEnvironment() common.Executor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// printStartJobContainerGroup mirrors actions/runner's "Starting job container"
|
||||||
|
// section: emit the group header and summary, return a closer for ::endgroup::.
|
||||||
|
func printStartJobContainerGroup(ctx context.Context, image, name, network string) func() {
|
||||||
|
rawLogger := common.Logger(ctx).WithField(rawOutputField, true)
|
||||||
|
rawLogger.Infof("::group::Starting job container")
|
||||||
|
rawLogger.Infof("image: %s", image)
|
||||||
|
rawLogger.Infof("name: %s", name)
|
||||||
|
rawLogger.Infof("network: %s", network)
|
||||||
|
return func() {
|
||||||
|
rawLogger.Infof("::endgroup::")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (rc *RunContext) startJobContainer() common.Executor {
|
func (rc *RunContext) startJobContainer() common.Executor {
|
||||||
return func(ctx context.Context) error {
|
return func(ctx context.Context) error {
|
||||||
logger := common.Logger(ctx)
|
logger := common.Logger(ctx)
|
||||||
image := rc.platformImage(ctx)
|
image := rc.platformImage(ctx)
|
||||||
rawLogger := logger.WithField("raw_output", true)
|
rawLogger := logger.WithField(rawOutputField, true)
|
||||||
logWriter := common.NewLineWriter(rc.commandHandler(ctx), func(s string) bool {
|
logWriter := common.NewLineWriter(rc.commandHandler(ctx), func(s string) bool {
|
||||||
if rc.Config.LogOutput {
|
if rc.Config.LogOutput {
|
||||||
rawLogger.Infof("%s", s)
|
rawLogger.Infof("%s", s)
|
||||||
@@ -278,7 +292,6 @@ func (rc *RunContext) startJobContainer() common.Executor {
|
|||||||
return fmt.Errorf("failed to handle credentials: %s", err)
|
return fmt.Errorf("failed to handle credentials: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Infof("\U0001f680 Start image=%s", image)
|
|
||||||
name := rc.jobContainerName()
|
name := rc.jobContainerName()
|
||||||
// For gitea, to support --volumes-from <container_name_or_id> in options.
|
// For gitea, to support --volumes-from <container_name_or_id> in options.
|
||||||
// We need to set the container name to the environment variable.
|
// We need to set the container name to the environment variable.
|
||||||
@@ -381,7 +394,7 @@ func (rc *RunContext) startJobContainer() common.Executor {
|
|||||||
if createAndDeleteNetwork {
|
if createAndDeleteNetwork {
|
||||||
// clean network if it has been created by act
|
// clean network if it has been created by act
|
||||||
// if using service containers
|
// if using service containers
|
||||||
// it means that the network to which containers are connecting is created by `act_runner`,
|
// it means that the network to which containers are connecting is created by `runner`,
|
||||||
// so, we should remove the network at last.
|
// so, we should remove the network at last.
|
||||||
logger.Infof("Cleaning up network for job %s, and network name is: %s", rc.JobName, networkName)
|
logger.Infof("Cleaning up network for job %s, and network name is: %s", rc.JobName, networkName)
|
||||||
if err := container.NewDockerNetworkRemoveExecutor(networkName)(ctx); err != nil {
|
if err := container.NewDockerNetworkRemoveExecutor(networkName)(ctx); err != nil {
|
||||||
@@ -423,6 +436,7 @@ func (rc *RunContext) startJobContainer() common.Executor {
|
|||||||
return errors.New("Failed to create job container")
|
return errors.New("Failed to create job container")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
defer printStartJobContainerGroup(ctx, image, name, networkName)()
|
||||||
return common.NewPipelineExecutor(
|
return common.NewPipelineExecutor(
|
||||||
rc.pullServicesImages(rc.Config.ForcePull),
|
rc.pullServicesImages(rc.Config.ForcePull),
|
||||||
rc.JobContainer.Pull(rc.Config.ForcePull),
|
rc.JobContainer.Pull(rc.Config.ForcePull),
|
||||||
@@ -729,7 +743,7 @@ func (rc *RunContext) isEnabled(ctx context.Context) (bool, error) {
|
|||||||
jobType, jobTypeErr := job.Type()
|
jobType, jobTypeErr := job.Type()
|
||||||
|
|
||||||
if runJobErr != nil {
|
if runJobErr != nil {
|
||||||
return false, fmt.Errorf(" \u274C Error in if-expression: \"if: %s\" (%s)", job.If.Value, runJobErr)
|
return false, fmt.Errorf("if-expression %q evaluation failed: %s", job.If.Value, runJobErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
if jobType == model.JobTypeInvalid {
|
if jobType == model.JobTypeInvalid {
|
||||||
@@ -752,7 +766,7 @@ func (rc *RunContext) isEnabled(ctx context.Context) (bool, error) {
|
|||||||
img := rc.platformImage(ctx)
|
img := rc.platformImage(ctx)
|
||||||
if img == "" {
|
if img == "" {
|
||||||
for _, platformName := range rc.runsOnPlatformNames(ctx) {
|
for _, platformName := range rc.runsOnPlatformNames(ctx) {
|
||||||
l.Infof("\U0001F6A7 Skipping unsupported platform -- Try running with `-P %+v=...`", platformName)
|
l.Infof("Skipping unsupported platform -- Try running with `-P %+v=...`", platformName)
|
||||||
}
|
}
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
package runner
|
package runner
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
@@ -12,8 +13,9 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/exprparser"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/exprparser"
|
||||||
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
assert "github.com/stretchr/testify/assert"
|
assert "github.com/stretchr/testify/assert"
|
||||||
@@ -635,3 +637,25 @@ func TestCreateContainerNameBoundedForLongMatrixInput(t *testing.T) {
|
|||||||
assert.LessOrEqual(t, len(name+"-network"), 255)
|
assert.LessOrEqual(t, len(name+"-network"), 255)
|
||||||
assert.LessOrEqual(t, len(name+"-job1234567890"), 255)
|
assert.LessOrEqual(t, len(name+"-job1234567890"), 255)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPrintStartJobContainerGroupGolden(t *testing.T) {
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
logger := log.New()
|
||||||
|
logger.SetOutput(buf)
|
||||||
|
logger.SetLevel(log.InfoLevel)
|
||||||
|
logger.SetFormatter(&jobLogFormatter{color: cyan})
|
||||||
|
entry := logger.WithFields(log.Fields{"job": "j1"})
|
||||||
|
ctx := common.WithLogger(context.Background(), entry)
|
||||||
|
|
||||||
|
printStartJobContainerGroup(ctx, "node:20", "GITEA-WORKFLOW-build-JOB-test", "gitea-runner-network")()
|
||||||
|
|
||||||
|
want := strings.Join([]string{
|
||||||
|
"[j1] | ::group::Starting job container",
|
||||||
|
"[j1] | image: node:20",
|
||||||
|
"[j1] | name: GITEA-WORKFLOW-build-JOB-test",
|
||||||
|
"[j1] | network: gitea-runner-network",
|
||||||
|
"[j1] | ::endgroup::",
|
||||||
|
"",
|
||||||
|
}, "\n")
|
||||||
|
assert.Equal(t, want, buf.String())
|
||||||
|
}
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
docker_container "github.com/docker/docker/api/types/container"
|
docker_container "github.com/docker/docker/api/types/container"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
|||||||
@@ -16,8 +16,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/joho/godotenv"
|
"github.com/joho/godotenv"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
@@ -26,7 +26,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
baseImage = "node:16-buster-slim"
|
baseImage = "node:24-bookworm-slim"
|
||||||
platforms map[string]string
|
platforms map[string]string
|
||||||
logLevel = log.DebugLevel
|
logLevel = log.DebugLevel
|
||||||
workdir = "testdata"
|
workdir = "testdata"
|
||||||
@@ -230,11 +230,9 @@ func TestRunEvent(t *testing.T) {
|
|||||||
tables := []TestJobFileInfo{
|
tables := []TestJobFileInfo{
|
||||||
// Shells
|
// Shells
|
||||||
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
||||||
// TODO: figure out why it fails
|
|
||||||
// {workdir, "shells/custom", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, }, // custom image with pwsh
|
|
||||||
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, secrets}, // custom image with pwsh
|
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, secrets}, // custom image with pwsh
|
||||||
{workdir, "shells/bash", "push", "", platforms, secrets},
|
{workdir, "shells/bash", "push", "", platforms, secrets},
|
||||||
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:16-buster"}, secrets}, // slim doesn't have python
|
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:24-bookworm"}, secrets}, // slim doesn't have python
|
||||||
{workdir, "shells/sh", "push", "", platforms, secrets},
|
{workdir, "shells/sh", "push", "", platforms, secrets},
|
||||||
|
|
||||||
// Local action
|
// Local action
|
||||||
@@ -463,7 +461,7 @@ func TestDryrunEvent(t *testing.T) {
|
|||||||
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
||||||
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, secrets}, // custom image with pwsh
|
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, secrets}, // custom image with pwsh
|
||||||
{workdir, "shells/bash", "push", "", platforms, secrets},
|
{workdir, "shells/bash", "push", "", platforms, secrets},
|
||||||
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:16-buster"}, secrets}, // slim doesn't have python
|
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:24-bookworm"}, secrets}, // slim doesn't have python
|
||||||
{workdir, "shells/sh", "push", "", platforms, secrets},
|
{workdir, "shells/sh", "push", "", platforms, secrets},
|
||||||
|
|
||||||
// Local action
|
// Local action
|
||||||
@@ -593,7 +591,7 @@ func TestRunWithService(t *testing.T) {
|
|||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
platforms := map[string]string{
|
platforms := map[string]string{
|
||||||
"ubuntu-latest": "node:12.20.1-buster-slim",
|
"ubuntu-latest": "node:24-bookworm-slim",
|
||||||
}
|
}
|
||||||
|
|
||||||
workflowPath := "services"
|
workflowPath := "services"
|
||||||
|
|||||||
@@ -13,10 +13,10 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/container"
|
||||||
"gitea.com/gitea/act_runner/act/exprparser"
|
"gitea.com/gitea/runner/act/exprparser"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
type step interface {
|
type step interface {
|
||||||
@@ -107,7 +107,7 @@ func runStepExecutor(step step, stage stepStage, executor common.Executor) commo
|
|||||||
if strings.Contains(stepString, "::add-mask::") {
|
if strings.Contains(stepString, "::add-mask::") {
|
||||||
stepString = "add-mask command"
|
stepString = "add-mask command"
|
||||||
}
|
}
|
||||||
logger.Infof("\u2B50 Run %s %s", stage, stepString)
|
logger.Infof("Run %s %s", stage, stepString)
|
||||||
|
|
||||||
// Prepare and clean Runner File Commands
|
// Prepare and clean Runner File Commands
|
||||||
actPath := rc.JobContainer.GetActPath()
|
actPath := rc.JobContainer.GetActPath()
|
||||||
@@ -158,7 +158,7 @@ func runStepExecutor(step step, stage stepStage, executor common.Executor) commo
|
|||||||
err = executor(timeoutctx)
|
err = executor(timeoutctx)
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
logger.WithField("stepResult", stepResult.Outcome).Infof(" \u2705 Success - %s %s", stage, stepString)
|
logger.WithField("stepResult", stepResult.Outcome).Infof("Success - %s %s", stage, stepString)
|
||||||
} else {
|
} else {
|
||||||
stepResult.Outcome = model.StepStatusFailure
|
stepResult.Outcome = model.StepStatusFailure
|
||||||
|
|
||||||
@@ -169,6 +169,7 @@ func runStepExecutor(step step, stage stepStage, executor common.Executor) commo
|
|||||||
}
|
}
|
||||||
|
|
||||||
if continueOnError {
|
if continueOnError {
|
||||||
|
logger.Errorf("##[error]%v", err)
|
||||||
logger.Infof("Failed but continue next step")
|
logger.Infof("Failed but continue next step")
|
||||||
err = nil
|
err = nil
|
||||||
stepResult.Conclusion = model.StepStatusSuccess
|
stepResult.Conclusion = model.StepStatusSuccess
|
||||||
@@ -176,7 +177,9 @@ func runStepExecutor(step step, stage stepStage, executor common.Executor) commo
|
|||||||
stepResult.Conclusion = model.StepStatusFailure
|
stepResult.Conclusion = model.StepStatusFailure
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.WithField("stepResult", stepResult.Outcome).Errorf(" \u274C Failure - %s %s", stage, stepString)
|
// Infof: Errorf entries are promoted to the user log by the reporter,
|
||||||
|
// which would duplicate the ##[error] annotation emitted elsewhere.
|
||||||
|
logger.WithField("stepResult", stepResult.Outcome).Infof("Failure - %s %s", stage, stepString)
|
||||||
}
|
}
|
||||||
// Process Runner File Commands
|
// Process Runner File Commands
|
||||||
orgerr := err
|
orgerr := err
|
||||||
@@ -268,7 +271,7 @@ func isStepEnabled(ctx context.Context, expr string, step step, stage stepStage)
|
|||||||
|
|
||||||
runStep, err := EvalBool(ctx, rc.NewStepExpressionEvaluator(ctx, step), expr, defaultStatusCheck)
|
runStep, err := EvalBool(ctx, rc.NewStepExpressionEvaluator(ctx, step), expr, defaultStatusCheck)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, fmt.Errorf(" \u274C Error in if-expression: \"if: %s\" (%s)", expr, err)
|
return false, fmt.Errorf("if-expression %q evaluation failed: %s", expr, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return runStep, nil
|
return runStep, nil
|
||||||
@@ -284,7 +287,7 @@ func isContinueOnError(ctx context.Context, expr string, step step, _ stepStage)
|
|||||||
|
|
||||||
continueOnError, err := EvalBool(ctx, rc.NewStepExpressionEvaluator(ctx, step), expr, exprparser.DefaultStatusCheckNone)
|
continueOnError, err := EvalBool(ctx, rc.NewStepExpressionEvaluator(ctx, step), expr, exprparser.DefaultStatusCheckNone)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, fmt.Errorf(" \u274C Error in continue-on-error-expression: \"continue-on-error: %s\" (%s)", expr, err)
|
return false, fmt.Errorf("continue-on-error expression %q evaluation failed: %s", expr, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return continueOnError, nil
|
return continueOnError, nil
|
||||||
|
|||||||
@@ -15,8 +15,8 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
type stepActionLocal struct {
|
type stepActionLocal struct {
|
||||||
|
|||||||
@@ -12,8 +12,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
|
|||||||
@@ -16,9 +16,9 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/common/git"
|
"gitea.com/gitea/runner/act/common/git"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
gogit "github.com/go-git/go-git/v5"
|
gogit "github.com/go-git/go-git/v5"
|
||||||
)
|
)
|
||||||
@@ -145,6 +145,7 @@ func (sar *stepActionRemote) prepareActionExecutor() common.Executor {
|
|||||||
return common.NewPipelineExecutor(
|
return common.NewPipelineExecutor(
|
||||||
ntErr,
|
ntErr,
|
||||||
func(ctx context.Context) error {
|
func(ctx context.Context) error {
|
||||||
|
defer git.AcquireCloneLock(actionDir)()
|
||||||
actionModel, err := sar.readAction(ctx, sar.Step, actionDir, sar.remoteAction.Path, remoteReader(ctx), os.WriteFile)
|
actionModel, err := sar.readAction(ctx, sar.Step, actionDir, sar.remoteAction.Path, remoteReader(ctx), os.WriteFile)
|
||||||
sar.action = actionModel
|
sar.action = actionModel
|
||||||
return err
|
return err
|
||||||
|
|||||||
@@ -14,9 +14,9 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/common/git"
|
"gitea.com/gitea/runner/act/common/git"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
|
|||||||
@@ -9,9 +9,9 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/container"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/kballard/go-shellquote"
|
"github.com/kballard/go-shellquote"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/container"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ package runner
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
type stepFactory interface {
|
type stepFactory interface {
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ package runner
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -12,10 +12,10 @@ import (
|
|||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/container"
|
||||||
"gitea.com/gitea/act_runner/act/lookpath"
|
"gitea.com/gitea/runner/act/lookpath"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/kballard/go-shellquote"
|
"github.com/kballard/go-shellquote"
|
||||||
yaml "go.yaml.in/yaml/v4"
|
yaml "go.yaml.in/yaml/v4"
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/sirupsen/logrus"
|
"github.com/sirupsen/logrus"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/container"
|
"gitea.com/gitea/runner/act/container"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
|
|||||||
@@ -8,8 +8,8 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gitea.com/gitea/act_runner/act/common"
|
"gitea.com/gitea/runner/act/common"
|
||||||
"gitea.com/gitea/act_runner/act/model"
|
"gitea.com/gitea/runner/act/model"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
name: 'Test'
|
name: 'Test'
|
||||||
description: 'Test'
|
description: 'Test'
|
||||||
runs:
|
runs:
|
||||||
using: 'node12'
|
using: 'node24'
|
||||||
main: 'index.js'
|
main: 'index.js'
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
FROM ubuntu:18.04
|
FROM ubuntu:24.04
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# Container image that runs your code
|
# Container image that runs your code
|
||||||
FROM node:12-buster-slim
|
FROM node:24-bookworm-slim
|
||||||
|
|
||||||
# Copies your code file from your action repository to the filesystem path `/` of the container
|
# Copies your code file from your action repository to the filesystem path `/` of the container
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Container image that runs your code
|
# Container image that runs your code
|
||||||
FROM node:16-buster-slim
|
FROM node:24-bookworm-slim
|
||||||
|
|
||||||
# Copies your code file from your action repository to the filesystem path `/` of the container
|
# Copies your code file from your action repository to the filesystem path `/` of the container
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ inputs:
|
|||||||
default: World
|
default: World
|
||||||
runs:
|
runs:
|
||||||
using: docker
|
using: docker
|
||||||
image: docker://node:16-buster-slim
|
image: docker://node:24-bookworm-slim
|
||||||
entrypoint: /bin/sh -c
|
entrypoint: /bin/sh -c
|
||||||
env:
|
env:
|
||||||
TEST: enabled
|
TEST: enabled
|
||||||
|
|||||||
13
act/runner/testdata/actions/node12/action.yml
vendored
13
act/runner/testdata/actions/node12/action.yml
vendored
@@ -1,13 +0,0 @@
|
|||||||
name: 'Hello World'
|
|
||||||
description: 'Greet someone and record the time'
|
|
||||||
inputs:
|
|
||||||
who-to-greet: # id of input
|
|
||||||
description: 'Who to greet'
|
|
||||||
required: true
|
|
||||||
default: 'World'
|
|
||||||
outputs:
|
|
||||||
time: # id of output
|
|
||||||
description: 'The time we greeted you'
|
|
||||||
runs:
|
|
||||||
using: 'node12'
|
|
||||||
main: 'dist/index.js'
|
|
||||||
15
act/runner/testdata/actions/node12/index.js
vendored
15
act/runner/testdata/actions/node12/index.js
vendored
@@ -1,15 +0,0 @@
|
|||||||
const core = require('@actions/core');
|
|
||||||
const github = require('@actions/github');
|
|
||||||
|
|
||||||
try {
|
|
||||||
// `who-to-greet` input defined in action metadata file
|
|
||||||
const nameToGreet = core.getInput('who-to-greet');
|
|
||||||
console.log(`Hello ${nameToGreet}!`);
|
|
||||||
const time = (new Date()).toTimeString();
|
|
||||||
core.setOutput("time", time);
|
|
||||||
// Get the JSON webhook payload for the event that triggered the workflow
|
|
||||||
const payload = JSON.stringify(github.context.payload, undefined, 2)
|
|
||||||
console.log(`The event payload: ${payload}`);
|
|
||||||
} catch (error) {
|
|
||||||
core.setFailed(error.message);
|
|
||||||
}
|
|
||||||
1
act/runner/testdata/actions/node12/node_modules/.bin/ncc
generated
vendored
1
act/runner/testdata/actions/node12/node_modules/.bin/ncc
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../@vercel/ncc/dist/ncc/cli.js
|
|
||||||
1
act/runner/testdata/actions/node12/node_modules/.bin/uuid
generated
vendored
1
act/runner/testdata/actions/node12/node_modules/.bin/uuid
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../uuid/dist/bin/uuid
|
|
||||||
244
act/runner/testdata/actions/node12/node_modules/.package-lock.json
generated
vendored
244
act/runner/testdata/actions/node12/node_modules/.package-lock.json
generated
vendored
@@ -1,244 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "node12",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"lockfileVersion": 3,
|
|
||||||
"requires": true,
|
|
||||||
"packages": {
|
|
||||||
"node_modules/@actions/core": {
|
|
||||||
"version": "1.10.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
|
|
||||||
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
|
|
||||||
"dependencies": {
|
|
||||||
"@actions/http-client": "^2.0.1",
|
|
||||||
"uuid": "^8.3.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@actions/github": {
|
|
||||||
"version": "4.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@actions/github/-/github-4.0.0.tgz",
|
|
||||||
"integrity": "sha512-Ej/Y2E+VV6sR9X7pWL5F3VgEWrABaT292DRqRU6R4hnQjPtC/zD3nagxVdXWiRQvYDh8kHXo7IDmG42eJ/dOMA==",
|
|
||||||
"dependencies": {
|
|
||||||
"@actions/http-client": "^1.0.8",
|
|
||||||
"@octokit/core": "^3.0.0",
|
|
||||||
"@octokit/plugin-paginate-rest": "^2.2.3",
|
|
||||||
"@octokit/plugin-rest-endpoint-methods": "^4.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@actions/github/node_modules/@actions/http-client": {
|
|
||||||
"version": "1.0.11",
|
|
||||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz",
|
|
||||||
"integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==",
|
|
||||||
"dependencies": {
|
|
||||||
"tunnel": "0.0.6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@actions/http-client": {
|
|
||||||
"version": "2.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz",
|
|
||||||
"integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==",
|
|
||||||
"dependencies": {
|
|
||||||
"tunnel": "^0.0.6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/auth-token": {
|
|
||||||
"version": "2.5.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz",
|
|
||||||
"integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^6.0.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/core": {
|
|
||||||
"version": "3.6.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz",
|
|
||||||
"integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/auth-token": "^2.4.4",
|
|
||||||
"@octokit/graphql": "^4.5.8",
|
|
||||||
"@octokit/request": "^5.6.3",
|
|
||||||
"@octokit/request-error": "^2.0.5",
|
|
||||||
"@octokit/types": "^6.0.3",
|
|
||||||
"before-after-hook": "^2.2.0",
|
|
||||||
"universal-user-agent": "^6.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/endpoint": {
|
|
||||||
"version": "6.0.12",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz",
|
|
||||||
"integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^6.0.3",
|
|
||||||
"is-plain-object": "^5.0.0",
|
|
||||||
"universal-user-agent": "^6.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/graphql": {
|
|
||||||
"version": "4.8.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz",
|
|
||||||
"integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/request": "^5.6.0",
|
|
||||||
"@octokit/types": "^6.0.3",
|
|
||||||
"universal-user-agent": "^6.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/openapi-types": {
|
|
||||||
"version": "12.11.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz",
|
|
||||||
"integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ=="
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/plugin-paginate-rest": {
|
|
||||||
"version": "2.21.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz",
|
|
||||||
"integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^6.40.0"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"@octokit/core": ">=2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/plugin-rest-endpoint-methods": {
|
|
||||||
"version": "4.15.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-4.15.1.tgz",
|
|
||||||
"integrity": "sha512-4gQg4ySoW7ktKB0Mf38fHzcSffVZd6mT5deJQtpqkuPuAqzlED5AJTeW8Uk7dPRn7KaOlWcXB0MedTFJU1j4qA==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^6.13.0",
|
|
||||||
"deprecation": "^2.3.1"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"@octokit/core": ">=3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/request": {
|
|
||||||
"version": "5.6.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz",
|
|
||||||
"integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/endpoint": "^6.0.1",
|
|
||||||
"@octokit/request-error": "^2.1.0",
|
|
||||||
"@octokit/types": "^6.16.1",
|
|
||||||
"is-plain-object": "^5.0.0",
|
|
||||||
"node-fetch": "^2.6.7",
|
|
||||||
"universal-user-agent": "^6.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/request-error": {
|
|
||||||
"version": "2.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz",
|
|
||||||
"integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/types": "^6.0.3",
|
|
||||||
"deprecation": "^2.0.0",
|
|
||||||
"once": "^1.4.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@octokit/types": {
|
|
||||||
"version": "6.41.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz",
|
|
||||||
"integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==",
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/openapi-types": "^12.11.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@vercel/ncc": {
|
|
||||||
"version": "0.24.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.24.1.tgz",
|
|
||||||
"integrity": "sha512-r9m7brz2hNmq5TF3sxrK4qR/FhXn44XIMglQUir4sT7Sh5GOaYXlMYikHFwJStf8rmQGTlvOoBXt4yHVonRG8A==",
|
|
||||||
"dev": true,
|
|
||||||
"bin": {
|
|
||||||
"ncc": "dist/ncc/cli.js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/before-after-hook": {
|
|
||||||
"version": "2.2.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
|
|
||||||
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="
|
|
||||||
},
|
|
||||||
"node_modules/deprecation": {
|
|
||||||
"version": "2.3.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
|
|
||||||
"integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
|
|
||||||
},
|
|
||||||
"node_modules/is-plain-object": {
|
|
||||||
"version": "5.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
|
|
||||||
"integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/node-fetch": {
|
|
||||||
"version": "2.6.12",
|
|
||||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz",
|
|
||||||
"integrity": "sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==",
|
|
||||||
"dependencies": {
|
|
||||||
"whatwg-url": "^5.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": "4.x || >=6.0.0"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"encoding": "^0.1.0"
|
|
||||||
},
|
|
||||||
"peerDependenciesMeta": {
|
|
||||||
"encoding": {
|
|
||||||
"optional": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/once": {
|
|
||||||
"version": "1.4.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
|
||||||
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
|
|
||||||
"dependencies": {
|
|
||||||
"wrappy": "1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/tr46": {
|
|
||||||
"version": "0.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
|
|
||||||
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
|
|
||||||
},
|
|
||||||
"node_modules/tunnel": {
|
|
||||||
"version": "0.0.6",
|
|
||||||
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
|
|
||||||
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/universal-user-agent": {
|
|
||||||
"version": "6.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz",
|
|
||||||
"integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
|
|
||||||
},
|
|
||||||
"node_modules/uuid": {
|
|
||||||
"version": "8.3.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
|
||||||
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
|
|
||||||
"bin": {
|
|
||||||
"uuid": "dist/bin/uuid"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/webidl-conversions": {
|
|
||||||
"version": "3.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
|
||||||
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
|
|
||||||
},
|
|
||||||
"node_modules/whatwg-url": {
|
|
||||||
"version": "5.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
|
|
||||||
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
|
|
||||||
"dependencies": {
|
|
||||||
"tr46": "~0.0.3",
|
|
||||||
"webidl-conversions": "^3.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/wrappy": {
|
|
||||||
"version": "1.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
|
||||||
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
9
act/runner/testdata/actions/node12/node_modules/@actions/core/LICENSE.md
generated
vendored
9
act/runner/testdata/actions/node12/node_modules/@actions/core/LICENSE.md
generated
vendored
@@ -1,9 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright 2019 GitHub
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
15
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/command.d.ts
generated
vendored
15
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/command.d.ts
generated
vendored
@@ -1,15 +0,0 @@
|
|||||||
export interface CommandProperties {
|
|
||||||
[key: string]: any;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Commands
|
|
||||||
*
|
|
||||||
* Command Format:
|
|
||||||
* ::name key=value,key=value::message
|
|
||||||
*
|
|
||||||
* Examples:
|
|
||||||
* ::warning::This is the message
|
|
||||||
* ::set-env name=MY_VAR::some value
|
|
||||||
*/
|
|
||||||
export declare function issueCommand(command: string, properties: CommandProperties, message: any): void;
|
|
||||||
export declare function issue(name: string, message?: string): void;
|
|
||||||
92
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/command.js
generated
vendored
92
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/command.js
generated
vendored
@@ -1,92 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
||||||
}) : (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
o[k2] = m[k];
|
|
||||||
}));
|
|
||||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
||||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
||||||
}) : function(o, v) {
|
|
||||||
o["default"] = v;
|
|
||||||
});
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
||||||
__setModuleDefault(result, mod);
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.issue = exports.issueCommand = void 0;
|
|
||||||
const os = __importStar(require("os"));
|
|
||||||
const utils_1 = require("./utils");
|
|
||||||
/**
|
|
||||||
* Commands
|
|
||||||
*
|
|
||||||
* Command Format:
|
|
||||||
* ::name key=value,key=value::message
|
|
||||||
*
|
|
||||||
* Examples:
|
|
||||||
* ::warning::This is the message
|
|
||||||
* ::set-env name=MY_VAR::some value
|
|
||||||
*/
|
|
||||||
function issueCommand(command, properties, message) {
|
|
||||||
const cmd = new Command(command, properties, message);
|
|
||||||
process.stdout.write(cmd.toString() + os.EOL);
|
|
||||||
}
|
|
||||||
exports.issueCommand = issueCommand;
|
|
||||||
function issue(name, message = '') {
|
|
||||||
issueCommand(name, {}, message);
|
|
||||||
}
|
|
||||||
exports.issue = issue;
|
|
||||||
const CMD_STRING = '::';
|
|
||||||
class Command {
|
|
||||||
constructor(command, properties, message) {
|
|
||||||
if (!command) {
|
|
||||||
command = 'missing.command';
|
|
||||||
}
|
|
||||||
this.command = command;
|
|
||||||
this.properties = properties;
|
|
||||||
this.message = message;
|
|
||||||
}
|
|
||||||
toString() {
|
|
||||||
let cmdStr = CMD_STRING + this.command;
|
|
||||||
if (this.properties && Object.keys(this.properties).length > 0) {
|
|
||||||
cmdStr += ' ';
|
|
||||||
let first = true;
|
|
||||||
for (const key in this.properties) {
|
|
||||||
if (this.properties.hasOwnProperty(key)) {
|
|
||||||
const val = this.properties[key];
|
|
||||||
if (val) {
|
|
||||||
if (first) {
|
|
||||||
first = false;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
cmdStr += ',';
|
|
||||||
}
|
|
||||||
cmdStr += `${key}=${escapeProperty(val)}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
|
||||||
return cmdStr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function escapeData(s) {
|
|
||||||
return utils_1.toCommandValue(s)
|
|
||||||
.replace(/%/g, '%25')
|
|
||||||
.replace(/\r/g, '%0D')
|
|
||||||
.replace(/\n/g, '%0A');
|
|
||||||
}
|
|
||||||
function escapeProperty(s) {
|
|
||||||
return utils_1.toCommandValue(s)
|
|
||||||
.replace(/%/g, '%25')
|
|
||||||
.replace(/\r/g, '%0D')
|
|
||||||
.replace(/\n/g, '%0A')
|
|
||||||
.replace(/:/g, '%3A')
|
|
||||||
.replace(/,/g, '%2C');
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=command.js.map
|
|
||||||
1
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/command.js.map
generated
vendored
1
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/command.js.map
generated
vendored
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,mCAAsC;AAWtC;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAY;IAEZ,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,OAAO,GAAG,EAAE;IAC9C,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAM;IACxB,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAM;IAC5B,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"}
|
|
||||||
198
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/core.d.ts
generated
vendored
198
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/core.d.ts
generated
vendored
@@ -1,198 +0,0 @@
|
|||||||
/**
|
|
||||||
* Interface for getInput options
|
|
||||||
*/
|
|
||||||
export interface InputOptions {
|
|
||||||
/** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */
|
|
||||||
required?: boolean;
|
|
||||||
/** Optional. Whether leading/trailing whitespace will be trimmed for the input. Defaults to true */
|
|
||||||
trimWhitespace?: boolean;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* The code to exit an action
|
|
||||||
*/
|
|
||||||
export declare enum ExitCode {
|
|
||||||
/**
|
|
||||||
* A code indicating that the action was successful
|
|
||||||
*/
|
|
||||||
Success = 0,
|
|
||||||
/**
|
|
||||||
* A code indicating that the action was a failure
|
|
||||||
*/
|
|
||||||
Failure = 1
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Optional properties that can be sent with annotatation commands (notice, error, and warning)
|
|
||||||
* See: https://docs.github.com/en/rest/reference/checks#create-a-check-run for more information about annotations.
|
|
||||||
*/
|
|
||||||
export interface AnnotationProperties {
|
|
||||||
/**
|
|
||||||
* A title for the annotation.
|
|
||||||
*/
|
|
||||||
title?: string;
|
|
||||||
/**
|
|
||||||
* The path of the file for which the annotation should be created.
|
|
||||||
*/
|
|
||||||
file?: string;
|
|
||||||
/**
|
|
||||||
* The start line for the annotation.
|
|
||||||
*/
|
|
||||||
startLine?: number;
|
|
||||||
/**
|
|
||||||
* The end line for the annotation. Defaults to `startLine` when `startLine` is provided.
|
|
||||||
*/
|
|
||||||
endLine?: number;
|
|
||||||
/**
|
|
||||||
* The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values.
|
|
||||||
*/
|
|
||||||
startColumn?: number;
|
|
||||||
/**
|
|
||||||
* The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values.
|
|
||||||
* Defaults to `startColumn` when `startColumn` is provided.
|
|
||||||
*/
|
|
||||||
endColumn?: number;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Sets env variable for this action and future actions in the job
|
|
||||||
* @param name the name of the variable to set
|
|
||||||
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
|
||||||
*/
|
|
||||||
export declare function exportVariable(name: string, val: any): void;
|
|
||||||
/**
|
|
||||||
* Registers a secret which will get masked from logs
|
|
||||||
* @param secret value of the secret
|
|
||||||
*/
|
|
||||||
export declare function setSecret(secret: string): void;
|
|
||||||
/**
|
|
||||||
* Prepends inputPath to the PATH (for this action and future actions)
|
|
||||||
* @param inputPath
|
|
||||||
*/
|
|
||||||
export declare function addPath(inputPath: string): void;
|
|
||||||
/**
|
|
||||||
* Gets the value of an input.
|
|
||||||
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
|
|
||||||
* Returns an empty string if the value is not defined.
|
|
||||||
*
|
|
||||||
* @param name name of the input to get
|
|
||||||
* @param options optional. See InputOptions.
|
|
||||||
* @returns string
|
|
||||||
*/
|
|
||||||
export declare function getInput(name: string, options?: InputOptions): string;
|
|
||||||
/**
|
|
||||||
* Gets the values of an multiline input. Each value is also trimmed.
|
|
||||||
*
|
|
||||||
* @param name name of the input to get
|
|
||||||
* @param options optional. See InputOptions.
|
|
||||||
* @returns string[]
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
export declare function getMultilineInput(name: string, options?: InputOptions): string[];
|
|
||||||
/**
|
|
||||||
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
|
||||||
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
|
|
||||||
* The return value is also in boolean type.
|
|
||||||
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
|
|
||||||
*
|
|
||||||
* @param name name of the input to get
|
|
||||||
* @param options optional. See InputOptions.
|
|
||||||
* @returns boolean
|
|
||||||
*/
|
|
||||||
export declare function getBooleanInput(name: string, options?: InputOptions): boolean;
|
|
||||||
/**
|
|
||||||
* Sets the value of an output.
|
|
||||||
*
|
|
||||||
* @param name name of the output to set
|
|
||||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
||||||
*/
|
|
||||||
export declare function setOutput(name: string, value: any): void;
|
|
||||||
/**
|
|
||||||
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
|
||||||
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
export declare function setCommandEcho(enabled: boolean): void;
|
|
||||||
/**
|
|
||||||
* Sets the action status to failed.
|
|
||||||
* When the action exits it will be with an exit code of 1
|
|
||||||
* @param message add error issue message
|
|
||||||
*/
|
|
||||||
export declare function setFailed(message: string | Error): void;
|
|
||||||
/**
|
|
||||||
* Gets whether Actions Step Debug is on or not
|
|
||||||
*/
|
|
||||||
export declare function isDebug(): boolean;
|
|
||||||
/**
|
|
||||||
* Writes debug message to user log
|
|
||||||
* @param message debug message
|
|
||||||
*/
|
|
||||||
export declare function debug(message: string): void;
|
|
||||||
/**
|
|
||||||
* Adds an error issue
|
|
||||||
* @param message error issue message. Errors will be converted to string via toString()
|
|
||||||
* @param properties optional properties to add to the annotation.
|
|
||||||
*/
|
|
||||||
export declare function error(message: string | Error, properties?: AnnotationProperties): void;
|
|
||||||
/**
|
|
||||||
* Adds a warning issue
|
|
||||||
* @param message warning issue message. Errors will be converted to string via toString()
|
|
||||||
* @param properties optional properties to add to the annotation.
|
|
||||||
*/
|
|
||||||
export declare function warning(message: string | Error, properties?: AnnotationProperties): void;
|
|
||||||
/**
|
|
||||||
* Adds a notice issue
|
|
||||||
* @param message notice issue message. Errors will be converted to string via toString()
|
|
||||||
* @param properties optional properties to add to the annotation.
|
|
||||||
*/
|
|
||||||
export declare function notice(message: string | Error, properties?: AnnotationProperties): void;
|
|
||||||
/**
|
|
||||||
* Writes info to log with console.log.
|
|
||||||
* @param message info message
|
|
||||||
*/
|
|
||||||
export declare function info(message: string): void;
|
|
||||||
/**
|
|
||||||
* Begin an output group.
|
|
||||||
*
|
|
||||||
* Output until the next `groupEnd` will be foldable in this group
|
|
||||||
*
|
|
||||||
* @param name The name of the output group
|
|
||||||
*/
|
|
||||||
export declare function startGroup(name: string): void;
|
|
||||||
/**
|
|
||||||
* End an output group.
|
|
||||||
*/
|
|
||||||
export declare function endGroup(): void;
|
|
||||||
/**
|
|
||||||
* Wrap an asynchronous function call in a group.
|
|
||||||
*
|
|
||||||
* Returns the same type as the function itself.
|
|
||||||
*
|
|
||||||
* @param name The name of the group
|
|
||||||
* @param fn The function to wrap in the group
|
|
||||||
*/
|
|
||||||
export declare function group<T>(name: string, fn: () => Promise<T>): Promise<T>;
|
|
||||||
/**
|
|
||||||
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
|
||||||
*
|
|
||||||
* @param name name of the state to store
|
|
||||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
||||||
*/
|
|
||||||
export declare function saveState(name: string, value: any): void;
|
|
||||||
/**
|
|
||||||
* Gets the value of an state set by this action's main execution.
|
|
||||||
*
|
|
||||||
* @param name name of the state to get
|
|
||||||
* @returns string
|
|
||||||
*/
|
|
||||||
export declare function getState(name: string): string;
|
|
||||||
export declare function getIDToken(aud?: string): Promise<string>;
|
|
||||||
/**
|
|
||||||
* Summary exports
|
|
||||||
*/
|
|
||||||
export { summary } from './summary';
|
|
||||||
/**
|
|
||||||
* @deprecated use core.summary
|
|
||||||
*/
|
|
||||||
export { markdownSummary } from './summary';
|
|
||||||
/**
|
|
||||||
* Path exports
|
|
||||||
*/
|
|
||||||
export { toPosixPath, toWin32Path, toPlatformPath } from './path-utils';
|
|
||||||
336
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/core.js
generated
vendored
336
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/core.js
generated
vendored
@@ -1,336 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
||||||
}) : (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
o[k2] = m[k];
|
|
||||||
}));
|
|
||||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
||||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
||||||
}) : function(o, v) {
|
|
||||||
o["default"] = v;
|
|
||||||
});
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
||||||
__setModuleDefault(result, mod);
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
|
||||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
||||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
||||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
||||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
||||||
});
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
|
||||||
const command_1 = require("./command");
|
|
||||||
const file_command_1 = require("./file-command");
|
|
||||||
const utils_1 = require("./utils");
|
|
||||||
const os = __importStar(require("os"));
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const oidc_utils_1 = require("./oidc-utils");
|
|
||||||
/**
|
|
||||||
* The code to exit an action
|
|
||||||
*/
|
|
||||||
var ExitCode;
|
|
||||||
(function (ExitCode) {
|
|
||||||
/**
|
|
||||||
* A code indicating that the action was successful
|
|
||||||
*/
|
|
||||||
ExitCode[ExitCode["Success"] = 0] = "Success";
|
|
||||||
/**
|
|
||||||
* A code indicating that the action was a failure
|
|
||||||
*/
|
|
||||||
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
|
||||||
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
|
||||||
//-----------------------------------------------------------------------
|
|
||||||
// Variables
|
|
||||||
//-----------------------------------------------------------------------
|
|
||||||
/**
|
|
||||||
* Sets env variable for this action and future actions in the job
|
|
||||||
* @param name the name of the variable to set
|
|
||||||
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
|
||||||
*/
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
function exportVariable(name, val) {
|
|
||||||
const convertedVal = utils_1.toCommandValue(val);
|
|
||||||
process.env[name] = convertedVal;
|
|
||||||
const filePath = process.env['GITHUB_ENV'] || '';
|
|
||||||
if (filePath) {
|
|
||||||
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
|
|
||||||
}
|
|
||||||
command_1.issueCommand('set-env', { name }, convertedVal);
|
|
||||||
}
|
|
||||||
exports.exportVariable = exportVariable;
|
|
||||||
/**
|
|
||||||
* Registers a secret which will get masked from logs
|
|
||||||
* @param secret value of the secret
|
|
||||||
*/
|
|
||||||
function setSecret(secret) {
|
|
||||||
command_1.issueCommand('add-mask', {}, secret);
|
|
||||||
}
|
|
||||||
exports.setSecret = setSecret;
|
|
||||||
/**
|
|
||||||
* Prepends inputPath to the PATH (for this action and future actions)
|
|
||||||
* @param inputPath
|
|
||||||
*/
|
|
||||||
function addPath(inputPath) {
|
|
||||||
const filePath = process.env['GITHUB_PATH'] || '';
|
|
||||||
if (filePath) {
|
|
||||||
file_command_1.issueFileCommand('PATH', inputPath);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
command_1.issueCommand('add-path', {}, inputPath);
|
|
||||||
}
|
|
||||||
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
|
||||||
}
|
|
||||||
exports.addPath = addPath;
|
|
||||||
/**
|
|
||||||
* Gets the value of an input.
|
|
||||||
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
|
|
||||||
* Returns an empty string if the value is not defined.
|
|
||||||
*
|
|
||||||
* @param name name of the input to get
|
|
||||||
* @param options optional. See InputOptions.
|
|
||||||
* @returns string
|
|
||||||
*/
|
|
||||||
function getInput(name, options) {
|
|
||||||
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
|
||||||
if (options && options.required && !val) {
|
|
||||||
throw new Error(`Input required and not supplied: ${name}`);
|
|
||||||
}
|
|
||||||
if (options && options.trimWhitespace === false) {
|
|
||||||
return val;
|
|
||||||
}
|
|
||||||
return val.trim();
|
|
||||||
}
|
|
||||||
exports.getInput = getInput;
|
|
||||||
/**
|
|
||||||
* Gets the values of an multiline input. Each value is also trimmed.
|
|
||||||
*
|
|
||||||
* @param name name of the input to get
|
|
||||||
* @param options optional. See InputOptions.
|
|
||||||
* @returns string[]
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
function getMultilineInput(name, options) {
|
|
||||||
const inputs = getInput(name, options)
|
|
||||||
.split('\n')
|
|
||||||
.filter(x => x !== '');
|
|
||||||
if (options && options.trimWhitespace === false) {
|
|
||||||
return inputs;
|
|
||||||
}
|
|
||||||
return inputs.map(input => input.trim());
|
|
||||||
}
|
|
||||||
exports.getMultilineInput = getMultilineInput;
|
|
||||||
/**
|
|
||||||
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
|
||||||
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
|
|
||||||
* The return value is also in boolean type.
|
|
||||||
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
|
|
||||||
*
|
|
||||||
* @param name name of the input to get
|
|
||||||
* @param options optional. See InputOptions.
|
|
||||||
* @returns boolean
|
|
||||||
*/
|
|
||||||
function getBooleanInput(name, options) {
|
|
||||||
const trueValue = ['true', 'True', 'TRUE'];
|
|
||||||
const falseValue = ['false', 'False', 'FALSE'];
|
|
||||||
const val = getInput(name, options);
|
|
||||||
if (trueValue.includes(val))
|
|
||||||
return true;
|
|
||||||
if (falseValue.includes(val))
|
|
||||||
return false;
|
|
||||||
throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
|
|
||||||
`Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
|
|
||||||
}
|
|
||||||
exports.getBooleanInput = getBooleanInput;
|
|
||||||
/**
|
|
||||||
* Sets the value of an output.
|
|
||||||
*
|
|
||||||
* @param name name of the output to set
|
|
||||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
||||||
*/
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
function setOutput(name, value) {
|
|
||||||
const filePath = process.env['GITHUB_OUTPUT'] || '';
|
|
||||||
if (filePath) {
|
|
||||||
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
|
|
||||||
}
|
|
||||||
process.stdout.write(os.EOL);
|
|
||||||
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
|
|
||||||
}
|
|
||||||
exports.setOutput = setOutput;
|
|
||||||
/**
|
|
||||||
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
|
||||||
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
function setCommandEcho(enabled) {
|
|
||||||
command_1.issue('echo', enabled ? 'on' : 'off');
|
|
||||||
}
|
|
||||||
exports.setCommandEcho = setCommandEcho;
|
|
||||||
//-----------------------------------------------------------------------
|
|
||||||
// Results
|
|
||||||
//-----------------------------------------------------------------------
|
|
||||||
/**
|
|
||||||
* Sets the action status to failed.
|
|
||||||
* When the action exits it will be with an exit code of 1
|
|
||||||
* @param message add error issue message
|
|
||||||
*/
|
|
||||||
function setFailed(message) {
|
|
||||||
process.exitCode = ExitCode.Failure;
|
|
||||||
error(message);
|
|
||||||
}
|
|
||||||
exports.setFailed = setFailed;
|
|
||||||
//-----------------------------------------------------------------------
|
|
||||||
// Logging Commands
|
|
||||||
//-----------------------------------------------------------------------
|
|
||||||
/**
|
|
||||||
* Gets whether Actions Step Debug is on or not
|
|
||||||
*/
|
|
||||||
function isDebug() {
|
|
||||||
return process.env['RUNNER_DEBUG'] === '1';
|
|
||||||
}
|
|
||||||
exports.isDebug = isDebug;
|
|
||||||
/**
|
|
||||||
* Writes debug message to user log
|
|
||||||
* @param message debug message
|
|
||||||
*/
|
|
||||||
function debug(message) {
|
|
||||||
command_1.issueCommand('debug', {}, message);
|
|
||||||
}
|
|
||||||
exports.debug = debug;
|
|
||||||
/**
|
|
||||||
* Adds an error issue
|
|
||||||
* @param message error issue message. Errors will be converted to string via toString()
|
|
||||||
* @param properties optional properties to add to the annotation.
|
|
||||||
*/
|
|
||||||
function error(message, properties = {}) {
|
|
||||||
command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
|
||||||
}
|
|
||||||
exports.error = error;
|
|
||||||
/**
|
|
||||||
* Adds a warning issue
|
|
||||||
* @param message warning issue message. Errors will be converted to string via toString()
|
|
||||||
* @param properties optional properties to add to the annotation.
|
|
||||||
*/
|
|
||||||
function warning(message, properties = {}) {
|
|
||||||
command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
|
||||||
}
|
|
||||||
exports.warning = warning;
|
|
||||||
/**
|
|
||||||
* Adds a notice issue
|
|
||||||
* @param message notice issue message. Errors will be converted to string via toString()
|
|
||||||
* @param properties optional properties to add to the annotation.
|
|
||||||
*/
|
|
||||||
function notice(message, properties = {}) {
|
|
||||||
command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
|
||||||
}
|
|
||||||
exports.notice = notice;
|
|
||||||
/**
|
|
||||||
* Writes info to log with console.log.
|
|
||||||
* @param message info message
|
|
||||||
*/
|
|
||||||
function info(message) {
|
|
||||||
process.stdout.write(message + os.EOL);
|
|
||||||
}
|
|
||||||
exports.info = info;
|
|
||||||
/**
|
|
||||||
* Begin an output group.
|
|
||||||
*
|
|
||||||
* Output until the next `groupEnd` will be foldable in this group
|
|
||||||
*
|
|
||||||
* @param name The name of the output group
|
|
||||||
*/
|
|
||||||
function startGroup(name) {
|
|
||||||
command_1.issue('group', name);
|
|
||||||
}
|
|
||||||
exports.startGroup = startGroup;
|
|
||||||
/**
|
|
||||||
* End an output group.
|
|
||||||
*/
|
|
||||||
function endGroup() {
|
|
||||||
command_1.issue('endgroup');
|
|
||||||
}
|
|
||||||
exports.endGroup = endGroup;
|
|
||||||
/**
|
|
||||||
* Wrap an asynchronous function call in a group.
|
|
||||||
*
|
|
||||||
* Returns the same type as the function itself.
|
|
||||||
*
|
|
||||||
* @param name The name of the group
|
|
||||||
* @param fn The function to wrap in the group
|
|
||||||
*/
|
|
||||||
function group(name, fn) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
startGroup(name);
|
|
||||||
let result;
|
|
||||||
try {
|
|
||||||
result = yield fn();
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
endGroup();
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.group = group;
|
|
||||||
//-----------------------------------------------------------------------
|
|
||||||
// Wrapper action state
|
|
||||||
//-----------------------------------------------------------------------
|
|
||||||
/**
|
|
||||||
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
|
||||||
*
|
|
||||||
* @param name name of the state to store
|
|
||||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
||||||
*/
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
function saveState(name, value) {
|
|
||||||
const filePath = process.env['GITHUB_STATE'] || '';
|
|
||||||
if (filePath) {
|
|
||||||
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
|
|
||||||
}
|
|
||||||
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
|
|
||||||
}
|
|
||||||
exports.saveState = saveState;
|
|
||||||
/**
|
|
||||||
* Gets the value of an state set by this action's main execution.
|
|
||||||
*
|
|
||||||
* @param name name of the state to get
|
|
||||||
* @returns string
|
|
||||||
*/
|
|
||||||
function getState(name) {
|
|
||||||
return process.env[`STATE_${name}`] || '';
|
|
||||||
}
|
|
||||||
exports.getState = getState;
|
|
||||||
function getIDToken(aud) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield oidc_utils_1.OidcClient.getIDToken(aud);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.getIDToken = getIDToken;
|
|
||||||
/**
|
|
||||||
* Summary exports
|
|
||||||
*/
|
|
||||||
var summary_1 = require("./summary");
|
|
||||||
Object.defineProperty(exports, "summary", { enumerable: true, get: function () { return summary_1.summary; } });
|
|
||||||
/**
|
|
||||||
* @deprecated use core.summary
|
|
||||||
*/
|
|
||||||
var summary_2 = require("./summary");
|
|
||||||
Object.defineProperty(exports, "markdownSummary", { enumerable: true, get: function () { return summary_2.markdownSummary; } });
|
|
||||||
/**
|
|
||||||
* Path exports
|
|
||||||
*/
|
|
||||||
var path_utils_1 = require("./path-utils");
|
|
||||||
Object.defineProperty(exports, "toPosixPath", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });
|
|
||||||
Object.defineProperty(exports, "toWin32Path", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });
|
|
||||||
Object.defineProperty(exports, "toPlatformPath", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });
|
|
||||||
//# sourceMappingURL=core.js.map
|
|
||||||
1
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/core.js.map
generated
vendored
1
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/core.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/file-command.d.ts
generated
vendored
2
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/file-command.d.ts
generated
vendored
@@ -1,2 +0,0 @@
|
|||||||
export declare function issueFileCommand(command: string, message: any): void;
|
|
||||||
export declare function prepareKeyValueMessage(key: string, value: any): string;
|
|
||||||
58
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/file-command.js
generated
vendored
58
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/file-command.js
generated
vendored
@@ -1,58 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
// For internal use, subject to change.
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
||||||
}) : (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
o[k2] = m[k];
|
|
||||||
}));
|
|
||||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
||||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
||||||
}) : function(o, v) {
|
|
||||||
o["default"] = v;
|
|
||||||
});
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
||||||
__setModuleDefault(result, mod);
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
|
||||||
// We use any as a valid input type
|
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const os = __importStar(require("os"));
|
|
||||||
const uuid_1 = require("uuid");
|
|
||||||
const utils_1 = require("./utils");
|
|
||||||
function issueFileCommand(command, message) {
|
|
||||||
const filePath = process.env[`GITHUB_${command}`];
|
|
||||||
if (!filePath) {
|
|
||||||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
|
||||||
}
|
|
||||||
if (!fs.existsSync(filePath)) {
|
|
||||||
throw new Error(`Missing file at path: ${filePath}`);
|
|
||||||
}
|
|
||||||
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
|
||||||
encoding: 'utf8'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.issueFileCommand = issueFileCommand;
|
|
||||||
function prepareKeyValueMessage(key, value) {
|
|
||||||
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
|
||||||
const convertedValue = utils_1.toCommandValue(value);
|
|
||||||
// These should realistically never happen, but just in case someone finds a
|
|
||||||
// way to exploit uuid generation let's not allow keys or values that contain
|
|
||||||
// the delimiter.
|
|
||||||
if (key.includes(delimiter)) {
|
|
||||||
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
|
||||||
}
|
|
||||||
if (convertedValue.includes(delimiter)) {
|
|
||||||
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
|
||||||
}
|
|
||||||
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
|
|
||||||
}
|
|
||||||
exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
|
||||||
//# sourceMappingURL=file-command.js.map
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"file-command.js","sourceRoot":"","sources":["../src/file-command.ts"],"names":[],"mappings":";AAAA,uCAAuC;;;;;;;;;;;;;;;;;;;;;;AAEvC,mCAAmC;AACnC,uDAAuD;AAEvD,uCAAwB;AACxB,uCAAwB;AACxB,+BAAiC;AACjC,mCAAsC;AAEtC,SAAgB,gBAAgB,CAAC,OAAe,EAAE,OAAY;IAC5D,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,OAAO,EAAE,CAAC,CAAA;IACjD,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CACb,wDAAwD,OAAO,EAAE,CAClE,CAAA;KACF;IACD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAA;KACrD;IAED,EAAE,CAAC,cAAc,CAAC,QAAQ,EAAE,GAAG,sBAAc,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,EAAE;QACjE,QAAQ,EAAE,MAAM;KACjB,CAAC,CAAA;AACJ,CAAC;AAdD,4CAcC;AAED,SAAgB,sBAAsB,CAAC,GAAW,EAAE,KAAU;IAC5D,MAAM,SAAS,GAAG,gBAAgB,SAAM,EAAE,EAAE,CAAA;IAC5C,MAAM,cAAc,GAAG,sBAAc,CAAC,KAAK,CAAC,CAAA;IAE5C,4EAA4E;IAC5E,6EAA6E;IAC7E,iBAAiB;IACjB,IAAI,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QAC3B,MAAM,IAAI,KAAK,CACb,4DAA4D,SAAS,GAAG,CACzE,CAAA;KACF;IAED,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CACb,6DAA6D,SAAS,GAAG,CAC1E,CAAA;KACF;IAED,OAAO,GAAG,GAAG,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,cAAc,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;AAC9E,CAAC;AApBD,wDAoBC"}
|
|
||||||
7
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/oidc-utils.d.ts
generated
vendored
7
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/oidc-utils.d.ts
generated
vendored
@@ -1,7 +0,0 @@
|
|||||||
export declare class OidcClient {
|
|
||||||
private static createHttpClient;
|
|
||||||
private static getRequestToken;
|
|
||||||
private static getIDTokenUrl;
|
|
||||||
private static getCall;
|
|
||||||
static getIDToken(audience?: string): Promise<string>;
|
|
||||||
}
|
|
||||||
77
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/oidc-utils.js
generated
vendored
77
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/oidc-utils.js
generated
vendored
@@ -1,77 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
|
||||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
||||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
||||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
||||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
||||||
});
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.OidcClient = void 0;
|
|
||||||
const http_client_1 = require("@actions/http-client");
|
|
||||||
const auth_1 = require("@actions/http-client/lib/auth");
|
|
||||||
const core_1 = require("./core");
|
|
||||||
class OidcClient {
|
|
||||||
static createHttpClient(allowRetry = true, maxRetry = 10) {
|
|
||||||
const requestOptions = {
|
|
||||||
allowRetries: allowRetry,
|
|
||||||
maxRetries: maxRetry
|
|
||||||
};
|
|
||||||
return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
|
|
||||||
}
|
|
||||||
static getRequestToken() {
|
|
||||||
const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
|
|
||||||
if (!token) {
|
|
||||||
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
|
|
||||||
}
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
static getIDTokenUrl() {
|
|
||||||
const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
|
|
||||||
if (!runtimeUrl) {
|
|
||||||
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
|
|
||||||
}
|
|
||||||
return runtimeUrl;
|
|
||||||
}
|
|
||||||
static getCall(id_token_url) {
|
|
||||||
var _a;
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const httpclient = OidcClient.createHttpClient();
|
|
||||||
const res = yield httpclient
|
|
||||||
.getJson(id_token_url)
|
|
||||||
.catch(error => {
|
|
||||||
throw new Error(`Failed to get ID Token. \n
|
|
||||||
Error Code : ${error.statusCode}\n
|
|
||||||
Error Message: ${error.result.message}`);
|
|
||||||
});
|
|
||||||
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
|
|
||||||
if (!id_token) {
|
|
||||||
throw new Error('Response json body do not have ID Token field');
|
|
||||||
}
|
|
||||||
return id_token;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
static getIDToken(audience) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
try {
|
|
||||||
// New ID Token is requested from action service
|
|
||||||
let id_token_url = OidcClient.getIDTokenUrl();
|
|
||||||
if (audience) {
|
|
||||||
const encodedAudience = encodeURIComponent(audience);
|
|
||||||
id_token_url = `${id_token_url}&audience=${encodedAudience}`;
|
|
||||||
}
|
|
||||||
core_1.debug(`ID token url is ${id_token_url}`);
|
|
||||||
const id_token = yield OidcClient.getCall(id_token_url);
|
|
||||||
core_1.setSecret(id_token);
|
|
||||||
return id_token;
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`Error message: ${error.message}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.OidcClient = OidcClient;
|
|
||||||
//# sourceMappingURL=oidc-utils.js.map
|
|
||||||
1
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/oidc-utils.js.map
generated
vendored
1
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/oidc-utils.js.map
generated
vendored
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,wDAAqE;AACrE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAmB;YACrC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,MAAM,CAAC,OAAO,EAAE,CACtC,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"}
|
|
||||||
25
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/path-utils.d.ts
generated
vendored
25
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/path-utils.d.ts
generated
vendored
@@ -1,25 +0,0 @@
|
|||||||
/**
|
|
||||||
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
|
||||||
* replaced with /.
|
|
||||||
*
|
|
||||||
* @param pth. Path to transform.
|
|
||||||
* @return string Posix path.
|
|
||||||
*/
|
|
||||||
export declare function toPosixPath(pth: string): string;
|
|
||||||
/**
|
|
||||||
* toWin32Path converts the given path to the win32 form. On Linux, / will be
|
|
||||||
* replaced with \\.
|
|
||||||
*
|
|
||||||
* @param pth. Path to transform.
|
|
||||||
* @return string Win32 path.
|
|
||||||
*/
|
|
||||||
export declare function toWin32Path(pth: string): string;
|
|
||||||
/**
|
|
||||||
* toPlatformPath converts the given path to a platform-specific path. It does
|
|
||||||
* this by replacing instances of / and \ with the platform-specific path
|
|
||||||
* separator.
|
|
||||||
*
|
|
||||||
* @param pth The path to platformize.
|
|
||||||
* @return string The platform-specific path.
|
|
||||||
*/
|
|
||||||
export declare function toPlatformPath(pth: string): string;
|
|
||||||
58
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/path-utils.js
generated
vendored
58
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/path-utils.js
generated
vendored
@@ -1,58 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
||||||
}) : (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
o[k2] = m[k];
|
|
||||||
}));
|
|
||||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
||||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
||||||
}) : function(o, v) {
|
|
||||||
o["default"] = v;
|
|
||||||
});
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
||||||
__setModuleDefault(result, mod);
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
/**
|
|
||||||
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
|
||||||
* replaced with /.
|
|
||||||
*
|
|
||||||
* @param pth. Path to transform.
|
|
||||||
* @return string Posix path.
|
|
||||||
*/
|
|
||||||
function toPosixPath(pth) {
|
|
||||||
return pth.replace(/[\\]/g, '/');
|
|
||||||
}
|
|
||||||
exports.toPosixPath = toPosixPath;
|
|
||||||
/**
|
|
||||||
* toWin32Path converts the given path to the win32 form. On Linux, / will be
|
|
||||||
* replaced with \\.
|
|
||||||
*
|
|
||||||
* @param pth. Path to transform.
|
|
||||||
* @return string Win32 path.
|
|
||||||
*/
|
|
||||||
function toWin32Path(pth) {
|
|
||||||
return pth.replace(/[/]/g, '\\');
|
|
||||||
}
|
|
||||||
exports.toWin32Path = toWin32Path;
|
|
||||||
/**
|
|
||||||
* toPlatformPath converts the given path to a platform-specific path. It does
|
|
||||||
* this by replacing instances of / and \ with the platform-specific path
|
|
||||||
* separator.
|
|
||||||
*
|
|
||||||
* @param pth The path to platformize.
|
|
||||||
* @return string The platform-specific path.
|
|
||||||
*/
|
|
||||||
function toPlatformPath(pth) {
|
|
||||||
return pth.replace(/[/\\]/g, path.sep);
|
|
||||||
}
|
|
||||||
exports.toPlatformPath = toPlatformPath;
|
|
||||||
//# sourceMappingURL=path-utils.js.map
|
|
||||||
1
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/path-utils.js.map
generated
vendored
1
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/path-utils.js.map
generated
vendored
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"path-utils.js","sourceRoot":"","sources":["../src/path-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAE5B;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;;GAOG;AACH,SAAgB,cAAc,CAAC,GAAW;IACxC,OAAO,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,wCAEC"}
|
|
||||||
202
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/summary.d.ts
generated
vendored
202
act/runner/testdata/actions/node12/node_modules/@actions/core/lib/summary.d.ts
generated
vendored
@@ -1,202 +0,0 @@
|
|||||||
export declare const SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
|
|
||||||
export declare const SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
|
|
||||||
export declare type SummaryTableRow = (SummaryTableCell | string)[];
|
|
||||||
export interface SummaryTableCell {
|
|
||||||
/**
|
|
||||||
* Cell content
|
|
||||||
*/
|
|
||||||
data: string;
|
|
||||||
/**
|
|
||||||
* Render cell as header
|
|
||||||
* (optional) default: false
|
|
||||||
*/
|
|
||||||
header?: boolean;
|
|
||||||
/**
|
|
||||||
* Number of columns the cell extends
|
|
||||||
* (optional) default: '1'
|
|
||||||
*/
|
|
||||||
colspan?: string;
|
|
||||||
/**
|
|
||||||
* Number of rows the cell extends
|
|
||||||
* (optional) default: '1'
|
|
||||||
*/
|
|
||||||
rowspan?: string;
|
|
||||||
}
|
|
||||||
export interface SummaryImageOptions {
|
|
||||||
/**
|
|
||||||
* The width of the image in pixels. Must be an integer without a unit.
|
|
||||||
* (optional)
|
|
||||||
*/
|
|
||||||
width?: string;
|
|
||||||
/**
|
|
||||||
* The height of the image in pixels. Must be an integer without a unit.
|
|
||||||
* (optional)
|
|
||||||
*/
|
|
||||||
height?: string;
|
|
||||||
}
|
|
||||||
export interface SummaryWriteOptions {
|
|
||||||
/**
|
|
||||||
* Replace all existing content in summary file with buffer contents
|
|
||||||
* (optional) default: false
|
|
||||||
*/
|
|
||||||
overwrite?: boolean;
|
|
||||||
}
|
|
||||||
declare class Summary {
|
|
||||||
private _buffer;
|
|
||||||
private _filePath?;
|
|
||||||
constructor();
|
|
||||||
/**
|
|
||||||
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
|
|
||||||
* Also checks r/w permissions.
|
|
||||||
*
|
|
||||||
* @returns step summary file path
|
|
||||||
*/
|
|
||||||
private filePath;
|
|
||||||
/**
|
|
||||||
* Wraps content in an HTML tag, adding any HTML attributes
|
|
||||||
*
|
|
||||||
* @param {string} tag HTML tag to wrap
|
|
||||||
* @param {string | null} content content within the tag
|
|
||||||
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
|
|
||||||
*
|
|
||||||
* @returns {string} content wrapped in HTML element
|
|
||||||
*/
|
|
||||||
private wrap;
|
|
||||||
/**
|
|
||||||
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
|
|
||||||
*
|
|
||||||
* @param {SummaryWriteOptions} [options] (optional) options for write operation
|
|
||||||
*
|
|
||||||
* @returns {Promise<Summary>} summary instance
|
|
||||||
*/
|
|
||||||
write(options?: SummaryWriteOptions): Promise<Summary>;
|
|
||||||
/**
|
|
||||||
* Clears the summary buffer and wipes the summary file
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
clear(): Promise<Summary>;
|
|
||||||
/**
|
|
||||||
* Returns the current summary buffer as a string
|
|
||||||
*
|
|
||||||
* @returns {string} string of summary buffer
|
|
||||||
*/
|
|
||||||
stringify(): string;
|
|
||||||
/**
|
|
||||||
* If the summary buffer is empty
|
|
||||||
*
|
|
||||||
* @returns {boolen} true if the buffer is empty
|
|
||||||
*/
|
|
||||||
isEmptyBuffer(): boolean;
|
|
||||||
/**
|
|
||||||
* Resets the summary buffer without writing to summary file
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
emptyBuffer(): Summary;
|
|
||||||
/**
|
|
||||||
* Adds raw text to the summary buffer
|
|
||||||
*
|
|
||||||
* @param {string} text content to add
|
|
||||||
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addRaw(text: string, addEOL?: boolean): Summary;
|
|
||||||
/**
|
|
||||||
* Adds the operating system-specific end-of-line marker to the buffer
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addEOL(): Summary;
|
|
||||||
/**
|
|
||||||
* Adds an HTML codeblock to the summary buffer
|
|
||||||
*
|
|
||||||
* @param {string} code content to render within fenced code block
|
|
||||||
* @param {string} lang (optional) language to syntax highlight code
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addCodeBlock(code: string, lang?: string): Summary;
|
|
||||||
/**
|
|
||||||
* Adds an HTML list to the summary buffer
|
|
||||||
*
|
|
||||||
* @param {string[]} items list of items to render
|
|
||||||
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addList(items: string[], ordered?: boolean): Summary;
|
|
||||||
/**
|
|
||||||
* Adds an HTML table to the summary buffer
|
|
||||||
*
|
|
||||||
* @param {SummaryTableCell[]} rows table rows
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addTable(rows: SummaryTableRow[]): Summary;
|
|
||||||
/**
|
|
||||||
* Adds a collapsable HTML details element to the summary buffer
|
|
||||||
*
|
|
||||||
* @param {string} label text for the closed state
|
|
||||||
* @param {string} content collapsable content
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addDetails(label: string, content: string): Summary;
|
|
||||||
/**
|
|
||||||
* Adds an HTML image tag to the summary buffer
|
|
||||||
*
|
|
||||||
* @param {string} src path to the image you to embed
|
|
||||||
* @param {string} alt text description of the image
|
|
||||||
* @param {SummaryImageOptions} options (optional) addition image attributes
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addImage(src: string, alt: string, options?: SummaryImageOptions): Summary;
|
|
||||||
/**
|
|
||||||
* Adds an HTML section heading element
|
|
||||||
*
|
|
||||||
* @param {string} text heading text
|
|
||||||
* @param {number | string} [level=1] (optional) the heading level, default: 1
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addHeading(text: string, level?: number | string): Summary;
|
|
||||||
/**
|
|
||||||
* Adds an HTML thematic break (<hr>) to the summary buffer
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addSeparator(): Summary;
|
|
||||||
/**
|
|
||||||
* Adds an HTML line break (<br>) to the summary buffer
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addBreak(): Summary;
|
|
||||||
/**
|
|
||||||
* Adds an HTML blockquote to the summary buffer
|
|
||||||
*
|
|
||||||
* @param {string} text quote text
|
|
||||||
* @param {string} cite (optional) citation url
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addQuote(text: string, cite?: string): Summary;
|
|
||||||
/**
|
|
||||||
* Adds an HTML anchor tag to the summary buffer
|
|
||||||
*
|
|
||||||
* @param {string} text link text/content
|
|
||||||
* @param {string} href hyperlink
|
|
||||||
*
|
|
||||||
* @returns {Summary} summary instance
|
|
||||||
*/
|
|
||||||
addLink(text: string, href: string): Summary;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @deprecated use `core.summary`
|
|
||||||
*/
|
|
||||||
export declare const markdownSummary: Summary;
|
|
||||||
export declare const summary: Summary;
|
|
||||||
export {};
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user