mirror of
https://github.com/go-gitea/gitea.git
synced 2025-12-15 21:45:35 +08:00
Merge branch 'main' into lunny/remove_unsupported_signing
This commit is contained in:
commit
9ab4910c37
4
.github/workflows/cron-licenses.yml
vendored
4
.github/workflows/cron-licenses.yml
vendored
@ -9,8 +9,10 @@ jobs:
|
|||||||
cron-licenses:
|
cron-licenses:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.repository == 'go-gitea/gitea'
|
if: github.repository == 'go-gitea/gitea'
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
|
|||||||
4
.github/workflows/cron-translations.yml
vendored
4
.github/workflows/cron-translations.yml
vendored
@ -9,8 +9,10 @@ jobs:
|
|||||||
crowdin-pull:
|
crowdin-pull:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.repository == 'go-gitea/gitea'
|
if: github.repository == 'go-gitea/gitea'
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: crowdin/github-action@v1
|
- uses: crowdin/github-action@v1
|
||||||
with:
|
with:
|
||||||
upload_sources: true
|
upload_sources: true
|
||||||
|
|||||||
4
.github/workflows/files-changed.yml
vendored
4
.github/workflows/files-changed.yml
vendored
@ -24,6 +24,8 @@ jobs:
|
|||||||
detect:
|
detect:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 3
|
timeout-minutes: 3
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
backend: ${{ steps.changes.outputs.backend }}
|
backend: ${{ steps.changes.outputs.backend }}
|
||||||
frontend: ${{ steps.changes.outputs.frontend }}
|
frontend: ${{ steps.changes.outputs.frontend }}
|
||||||
@ -34,7 +36,7 @@ jobs:
|
|||||||
swagger: ${{ steps.changes.outputs.swagger }}
|
swagger: ${{ steps.changes.outputs.swagger }}
|
||||||
yaml: ${{ steps.changes.outputs.yaml }}
|
yaml: ${{ steps.changes.outputs.yaml }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: dorny/paths-filter@v3
|
- uses: dorny/paths-filter@v3
|
||||||
id: changes
|
id: changes
|
||||||
with:
|
with:
|
||||||
|
|||||||
50
.github/workflows/pull-compliance.yml
vendored
50
.github/workflows/pull-compliance.yml
vendored
@ -10,13 +10,17 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
files-changed:
|
files-changed:
|
||||||
uses: ./.github/workflows/files-changed.yml
|
uses: ./.github/workflows/files-changed.yml
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
lint-backend:
|
lint-backend:
|
||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -30,8 +34,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.templates == 'true'
|
if: needs.files-changed.outputs.templates == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: astral-sh/setup-uv@v6
|
- uses: astral-sh/setup-uv@v6
|
||||||
- run: uv python install 3.12
|
- run: uv python install 3.12
|
||||||
- uses: pnpm/action-setup@v4
|
- uses: pnpm/action-setup@v4
|
||||||
@ -46,8 +52,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.yaml == 'true'
|
if: needs.files-changed.outputs.yaml == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: astral-sh/setup-uv@v6
|
- uses: astral-sh/setup-uv@v6
|
||||||
- run: uv python install 3.12
|
- run: uv python install 3.12
|
||||||
- run: make deps-py
|
- run: make deps-py
|
||||||
@ -57,8 +65,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.swagger == 'true'
|
if: needs.files-changed.outputs.swagger == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: pnpm/action-setup@v4
|
- uses: pnpm/action-setup@v4
|
||||||
- uses: actions/setup-node@v5
|
- uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
@ -70,8 +80,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.templates == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.templates == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -82,8 +94,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -99,8 +113,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -114,8 +130,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -127,8 +145,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: pnpm/action-setup@v4
|
- uses: pnpm/action-setup@v4
|
||||||
- uses: actions/setup-node@v5
|
- uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
@ -143,8 +163,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -175,8 +197,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: pnpm/action-setup@v4
|
- uses: pnpm/action-setup@v4
|
||||||
- uses: actions/setup-node@v5
|
- uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
@ -188,8 +212,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
|
|||||||
22
.github/workflows/pull-db-tests.yml
vendored
22
.github/workflows/pull-db-tests.yml
vendored
@ -10,11 +10,15 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
files-changed:
|
files-changed:
|
||||||
uses: ./.github/workflows/files-changed.yml
|
uses: ./.github/workflows/files-changed.yml
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
test-pgsql:
|
test-pgsql:
|
||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
services:
|
services:
|
||||||
pgsql:
|
pgsql:
|
||||||
image: postgres:14
|
image: postgres:14
|
||||||
@ -38,7 +42,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- "9000:9000"
|
- "9000:9000"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -65,8 +69,10 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -90,6 +96,8 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
services:
|
services:
|
||||||
elasticsearch:
|
elasticsearch:
|
||||||
image: elasticsearch:7.5.0
|
image: elasticsearch:7.5.0
|
||||||
@ -124,7 +132,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- 10000:10000
|
- 10000:10000
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -152,6 +160,8 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
services:
|
services:
|
||||||
mysql:
|
mysql:
|
||||||
# the bitnami mysql image has more options than the official one, it's easier to customize
|
# the bitnami mysql image has more options than the official one, it's easier to customize
|
||||||
@ -177,7 +187,7 @@ jobs:
|
|||||||
- "587:587"
|
- "587:587"
|
||||||
- "993:993"
|
- "993:993"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
@ -203,6 +213,8 @@ jobs:
|
|||||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
services:
|
services:
|
||||||
mssql:
|
mssql:
|
||||||
image: mcr.microsoft.com/mssql/server:2019-latest
|
image: mcr.microsoft.com/mssql/server:2019-latest
|
||||||
@ -217,7 +229,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- 10000:10000
|
- 10000:10000
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
|
|||||||
6
.github/workflows/pull-docker-dryrun.yml
vendored
6
.github/workflows/pull-docker-dryrun.yml
vendored
@ -10,13 +10,17 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
files-changed:
|
files-changed:
|
||||||
uses: ./.github/workflows/files-changed.yml
|
uses: ./.github/workflows/files-changed.yml
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
container:
|
container:
|
||||||
if: needs.files-changed.outputs.docker == 'true' || needs.files-changed.outputs.actions == 'true'
|
if: needs.files-changed.outputs.docker == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||||
needs: files-changed
|
needs: files-changed
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: docker/setup-buildx-action@v3
|
- uses: docker/setup-buildx-action@v3
|
||||||
- name: Build regular container image
|
- name: Build regular container image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v5
|
||||||
|
|||||||
8
.github/workflows/release-nightly.yml
vendored
8
.github/workflows/release-nightly.yml
vendored
@ -11,8 +11,10 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
nightly-binary:
|
nightly-binary:
|
||||||
runs-on: namespace-profile-gitea-release-binary
|
runs-on: namespace-profile-gitea-release-binary
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||||
- run: git fetch --unshallow --quiet --tags --force
|
- run: git fetch --unshallow --quiet --tags --force
|
||||||
@ -56,12 +58,14 @@ jobs:
|
|||||||
- name: upload binaries to s3
|
- name: upload binaries to s3
|
||||||
run: |
|
run: |
|
||||||
aws s3 sync dist/release s3://${{ secrets.AWS_S3_BUCKET }}/gitea/${{ steps.clean_name.outputs.branch }} --no-progress
|
aws s3 sync dist/release s3://${{ secrets.AWS_S3_BUCKET }}/gitea/${{ steps.clean_name.outputs.branch }} --no-progress
|
||||||
|
|
||||||
nightly-container:
|
nightly-container:
|
||||||
runs-on: namespace-profile-gitea-release-docker
|
runs-on: namespace-profile-gitea-release-docker
|
||||||
permissions:
|
permissions:
|
||||||
|
contents: read
|
||||||
packages: write # to publish to ghcr.io
|
packages: write # to publish to ghcr.io
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||||
- run: git fetch --unshallow --quiet --tags --force
|
- run: git fetch --unshallow --quiet --tags --force
|
||||||
|
|||||||
8
.github/workflows/release-tag-rc.yml
vendored
8
.github/workflows/release-tag-rc.yml
vendored
@ -12,8 +12,10 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
binary:
|
binary:
|
||||||
runs-on: namespace-profile-gitea-release-binary
|
runs-on: namespace-profile-gitea-release-binary
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||||
- run: git fetch --unshallow --quiet --tags --force
|
- run: git fetch --unshallow --quiet --tags --force
|
||||||
@ -66,12 +68,14 @@ jobs:
|
|||||||
gh release create ${{ github.ref_name }} --title ${{ github.ref_name }} --draft --notes-from-tag dist/release/*
|
gh release create ${{ github.ref_name }} --title ${{ github.ref_name }} --draft --notes-from-tag dist/release/*
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||||
|
|
||||||
container:
|
container:
|
||||||
runs-on: namespace-profile-gitea-release-docker
|
runs-on: namespace-profile-gitea-release-docker
|
||||||
permissions:
|
permissions:
|
||||||
|
contents: read
|
||||||
packages: write # to publish to ghcr.io
|
packages: write # to publish to ghcr.io
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||||
- run: git fetch --unshallow --quiet --tags --force
|
- run: git fetch --unshallow --quiet --tags --force
|
||||||
|
|||||||
7
.github/workflows/release-tag-version.yml
vendored
7
.github/workflows/release-tag-version.yml
vendored
@ -15,9 +15,10 @@ jobs:
|
|||||||
binary:
|
binary:
|
||||||
runs-on: namespace-profile-gitea-release-binary
|
runs-on: namespace-profile-gitea-release-binary
|
||||||
permissions:
|
permissions:
|
||||||
|
contents: read
|
||||||
packages: write # to publish to ghcr.io
|
packages: write # to publish to ghcr.io
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||||
- run: git fetch --unshallow --quiet --tags --force
|
- run: git fetch --unshallow --quiet --tags --force
|
||||||
@ -70,12 +71,14 @@ jobs:
|
|||||||
gh release create ${{ github.ref_name }} --title ${{ github.ref_name }} --notes-from-tag dist/release/*
|
gh release create ${{ github.ref_name }} --title ${{ github.ref_name }} --notes-from-tag dist/release/*
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||||
|
|
||||||
container:
|
container:
|
||||||
runs-on: namespace-profile-gitea-release-docker
|
runs-on: namespace-profile-gitea-release-docker
|
||||||
permissions:
|
permissions:
|
||||||
|
contents: read
|
||||||
packages: write # to publish to ghcr.io
|
packages: write # to publish to ghcr.io
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||||
- run: git fetch --unshallow --quiet --tags --force
|
- run: git fetch --unshallow --quiet --tags --force
|
||||||
|
|||||||
@ -18,6 +18,23 @@ import (
|
|||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// AdminUserOrderByMap represents all possible admin user search orders
|
||||||
|
// This should only be used for admin API endpoints as we should not expose "updated" ordering which could expose recent user activity including logins.
|
||||||
|
var AdminUserOrderByMap = map[string]map[string]db.SearchOrderBy{
|
||||||
|
"asc": {
|
||||||
|
"name": db.SearchOrderByAlphabetically,
|
||||||
|
"created": db.SearchOrderByOldest,
|
||||||
|
"updated": db.SearchOrderByLeastUpdated,
|
||||||
|
"id": db.SearchOrderByID,
|
||||||
|
},
|
||||||
|
"desc": {
|
||||||
|
"name": db.SearchOrderByAlphabeticallyReverse,
|
||||||
|
"created": db.SearchOrderByNewest,
|
||||||
|
"updated": db.SearchOrderByRecentUpdated,
|
||||||
|
"id": db.SearchOrderByIDReverse,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
// SearchUserOptions contains the options for searching
|
// SearchUserOptions contains the options for searching
|
||||||
type SearchUserOptions struct {
|
type SearchUserOptions struct {
|
||||||
db.ListOptions
|
db.ListOptions
|
||||||
|
|||||||
@ -5,12 +5,10 @@ package charset
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
@ -23,60 +21,39 @@ import (
|
|||||||
var UTF8BOM = []byte{'\xef', '\xbb', '\xbf'}
|
var UTF8BOM = []byte{'\xef', '\xbb', '\xbf'}
|
||||||
|
|
||||||
type ConvertOpts struct {
|
type ConvertOpts struct {
|
||||||
KeepBOM bool
|
KeepBOM bool
|
||||||
|
ErrorReplacement []byte
|
||||||
|
ErrorReturnOrigin bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var ToUTF8WithFallbackReaderPrefetchSize = 16 * 1024
|
||||||
|
|
||||||
// ToUTF8WithFallbackReader detects the encoding of content and converts to UTF-8 reader if possible
|
// ToUTF8WithFallbackReader detects the encoding of content and converts to UTF-8 reader if possible
|
||||||
func ToUTF8WithFallbackReader(rd io.Reader, opts ConvertOpts) io.Reader {
|
func ToUTF8WithFallbackReader(rd io.Reader, opts ConvertOpts) io.Reader {
|
||||||
buf := make([]byte, 2048)
|
buf := make([]byte, ToUTF8WithFallbackReaderPrefetchSize)
|
||||||
n, err := util.ReadAtMost(rd, buf)
|
n, err := util.ReadAtMost(rd, buf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return io.MultiReader(bytes.NewReader(MaybeRemoveBOM(buf[:n], opts)), rd)
|
// read error occurs, don't do any processing
|
||||||
}
|
|
||||||
|
|
||||||
charsetLabel, err := DetectEncoding(buf[:n])
|
|
||||||
if err != nil || charsetLabel == "UTF-8" {
|
|
||||||
return io.MultiReader(bytes.NewReader(MaybeRemoveBOM(buf[:n], opts)), rd)
|
|
||||||
}
|
|
||||||
|
|
||||||
encoding, _ := charset.Lookup(charsetLabel)
|
|
||||||
if encoding == nil {
|
|
||||||
return io.MultiReader(bytes.NewReader(buf[:n]), rd)
|
return io.MultiReader(bytes.NewReader(buf[:n]), rd)
|
||||||
}
|
}
|
||||||
|
|
||||||
return transform.NewReader(
|
charsetLabel, _ := DetectEncoding(buf[:n])
|
||||||
io.MultiReader(
|
if charsetLabel == "UTF-8" {
|
||||||
bytes.NewReader(MaybeRemoveBOM(buf[:n], opts)),
|
// is utf-8, try to remove BOM and read it as-is
|
||||||
rd,
|
return io.MultiReader(bytes.NewReader(maybeRemoveBOM(buf[:n], opts)), rd)
|
||||||
),
|
|
||||||
encoding.NewDecoder(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToUTF8 converts content to UTF8 encoding
|
|
||||||
func ToUTF8(content []byte, opts ConvertOpts) (string, error) {
|
|
||||||
charsetLabel, err := DetectEncoding(content)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
} else if charsetLabel == "UTF-8" {
|
|
||||||
return string(MaybeRemoveBOM(content, opts)), nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
encoding, _ := charset.Lookup(charsetLabel)
|
encoding, _ := charset.Lookup(charsetLabel)
|
||||||
if encoding == nil {
|
if encoding == nil {
|
||||||
return string(content), fmt.Errorf("Unknown encoding: %s", charsetLabel)
|
// unknown charset, don't do any processing
|
||||||
|
return io.MultiReader(bytes.NewReader(buf[:n]), rd)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If there is an error, we concatenate the nicely decoded part and the
|
// convert from charset to utf-8
|
||||||
// original left over. This way we won't lose much data.
|
return transform.NewReader(
|
||||||
result, n, err := transform.Bytes(encoding.NewDecoder(), content)
|
io.MultiReader(bytes.NewReader(buf[:n]), rd),
|
||||||
if err != nil {
|
encoding.NewDecoder(),
|
||||||
result = append(result, content[n:]...)
|
)
|
||||||
}
|
|
||||||
|
|
||||||
result = MaybeRemoveBOM(result, opts)
|
|
||||||
|
|
||||||
return string(result), err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToUTF8WithFallback detects the encoding of content and converts to UTF-8 if possible
|
// ToUTF8WithFallback detects the encoding of content and converts to UTF-8 if possible
|
||||||
@ -85,73 +62,84 @@ func ToUTF8WithFallback(content []byte, opts ConvertOpts) []byte {
|
|||||||
return bs
|
return bs
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToUTF8DropErrors makes sure the return string is valid utf-8; attempts conversion if possible
|
func ToUTF8DropErrors(content []byte) []byte {
|
||||||
func ToUTF8DropErrors(content []byte, opts ConvertOpts) []byte {
|
return ToUTF8(content, ConvertOpts{ErrorReplacement: []byte{' '}})
|
||||||
charsetLabel, err := DetectEncoding(content)
|
}
|
||||||
if err != nil || charsetLabel == "UTF-8" {
|
|
||||||
return MaybeRemoveBOM(content, opts)
|
func ToUTF8(content []byte, opts ConvertOpts) []byte {
|
||||||
|
charsetLabel, _ := DetectEncoding(content)
|
||||||
|
if charsetLabel == "UTF-8" {
|
||||||
|
return maybeRemoveBOM(content, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
encoding, _ := charset.Lookup(charsetLabel)
|
encoding, _ := charset.Lookup(charsetLabel)
|
||||||
if encoding == nil {
|
if encoding == nil {
|
||||||
|
setting.PanicInDevOrTesting("unsupported detected charset %q, it shouldn't happen", charsetLabel)
|
||||||
return content
|
return content
|
||||||
}
|
}
|
||||||
|
|
||||||
// We ignore any non-decodable parts from the file.
|
|
||||||
// Some parts might be lost
|
|
||||||
var decoded []byte
|
var decoded []byte
|
||||||
decoder := encoding.NewDecoder()
|
decoder := encoding.NewDecoder()
|
||||||
idx := 0
|
idx := 0
|
||||||
for {
|
for idx < len(content) {
|
||||||
result, n, err := transform.Bytes(decoder, content[idx:])
|
result, n, err := transform.Bytes(decoder, content[idx:])
|
||||||
decoded = append(decoded, result...)
|
decoded = append(decoded, result...)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
decoded = append(decoded, ' ')
|
if opts.ErrorReturnOrigin {
|
||||||
idx = idx + n + 1
|
return content
|
||||||
if idx >= len(content) {
|
|
||||||
break
|
|
||||||
}
|
}
|
||||||
|
if opts.ErrorReplacement == nil {
|
||||||
|
decoded = append(decoded, content[idx+n])
|
||||||
|
} else {
|
||||||
|
decoded = append(decoded, opts.ErrorReplacement...)
|
||||||
|
}
|
||||||
|
idx += n + 1
|
||||||
}
|
}
|
||||||
|
return maybeRemoveBOM(decoded, opts)
|
||||||
return MaybeRemoveBOM(decoded, opts)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// MaybeRemoveBOM removes a UTF-8 BOM from a []byte when opts.KeepBOM is false
|
// maybeRemoveBOM removes a UTF-8 BOM from a []byte when opts.KeepBOM is false
|
||||||
func MaybeRemoveBOM(content []byte, opts ConvertOpts) []byte {
|
func maybeRemoveBOM(content []byte, opts ConvertOpts) []byte {
|
||||||
if opts.KeepBOM {
|
if opts.KeepBOM {
|
||||||
return content
|
return content
|
||||||
}
|
}
|
||||||
if len(content) > 2 && bytes.Equal(content[0:3], UTF8BOM) {
|
return bytes.TrimPrefix(content, UTF8BOM)
|
||||||
return content[3:]
|
|
||||||
}
|
|
||||||
return content
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// DetectEncoding detect the encoding of content
|
// DetectEncoding detect the encoding of content
|
||||||
func DetectEncoding(content []byte) (string, error) {
|
// it always returns a detected or guessed "encoding" string, no matter error happens or not
|
||||||
|
func DetectEncoding(content []byte) (encoding string, _ error) {
|
||||||
// First we check if the content represents valid utf8 content excepting a truncated character at the end.
|
// First we check if the content represents valid utf8 content excepting a truncated character at the end.
|
||||||
|
|
||||||
// Now we could decode all the runes in turn but this is not necessarily the cheapest thing to do
|
// Now we could decode all the runes in turn but this is not necessarily the cheapest thing to do
|
||||||
// instead we walk backwards from the end to trim off a the incomplete character
|
// instead we walk backwards from the end to trim off the incomplete character
|
||||||
toValidate := content
|
toValidate := content
|
||||||
end := len(toValidate) - 1
|
end := len(toValidate) - 1
|
||||||
|
|
||||||
if end < 0 {
|
// U+0000 U+007F 0yyyzzzz
|
||||||
// no-op
|
// U+0080 U+07FF 110xxxyy 10yyzzzz
|
||||||
} else if toValidate[end]>>5 == 0b110 {
|
// U+0800 U+FFFF 1110wwww 10xxxxyy 10yyzzzz
|
||||||
// Incomplete 1 byte extension e.g. © <c2><a9> which has been truncated to <c2>
|
// U+010000 U+10FFFF 11110uvv 10vvwwww 10xxxxyy 10yyzzzz
|
||||||
toValidate = toValidate[:end]
|
cnt := 0
|
||||||
} else if end > 0 && toValidate[end]>>6 == 0b10 && toValidate[end-1]>>4 == 0b1110 {
|
for end >= 0 && cnt < 4 {
|
||||||
// Incomplete 2 byte extension e.g. ⛔ <e2><9b><94> which has been truncated to <e2><9b>
|
c := toValidate[end]
|
||||||
toValidate = toValidate[:end-1]
|
if c>>5 == 0b110 || c>>4 == 0b1110 || c>>3 == 0b11110 {
|
||||||
} else if end > 1 && toValidate[end]>>6 == 0b10 && toValidate[end-1]>>6 == 0b10 && toValidate[end-2]>>3 == 0b11110 {
|
// a leading byte
|
||||||
// Incomplete 3 byte extension e.g. 💩 <f0><9f><92><a9> which has been truncated to <f0><9f><92>
|
toValidate = toValidate[:end]
|
||||||
toValidate = toValidate[:end-2]
|
break
|
||||||
|
} else if c>>6 == 0b10 {
|
||||||
|
// a continuation byte
|
||||||
|
end--
|
||||||
|
} else {
|
||||||
|
// not an utf-8 byte
|
||||||
|
break
|
||||||
|
}
|
||||||
|
cnt++
|
||||||
}
|
}
|
||||||
|
|
||||||
if utf8.Valid(toValidate) {
|
if utf8.Valid(toValidate) {
|
||||||
log.Debug("Detected encoding: utf-8 (fast)")
|
|
||||||
return "UTF-8", nil
|
return "UTF-8", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -160,7 +148,7 @@ func DetectEncoding(content []byte) (string, error) {
|
|||||||
if len(content) < 1024 {
|
if len(content) < 1024 {
|
||||||
// Check if original content is valid
|
// Check if original content is valid
|
||||||
if _, err := textDetector.DetectBest(content); err != nil {
|
if _, err := textDetector.DetectBest(content); err != nil {
|
||||||
return "", err
|
return util.IfZero(setting.Repository.AnsiCharset, "UTF-8"), err
|
||||||
}
|
}
|
||||||
times := 1024 / len(content)
|
times := 1024 / len(content)
|
||||||
detectContent = make([]byte, 0, times*len(content))
|
detectContent = make([]byte, 0, times*len(content))
|
||||||
@ -171,14 +159,10 @@ func DetectEncoding(content []byte) (string, error) {
|
|||||||
detectContent = content
|
detectContent = content
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now we can't use DetectBest or just results[0] because the result isn't stable - so we need a tie break
|
// Now we can't use DetectBest or just results[0] because the result isn't stable - so we need a tie-break
|
||||||
results, err := textDetector.DetectAll(detectContent)
|
results, err := textDetector.DetectAll(detectContent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err == chardet.NotDetectedError && len(setting.Repository.AnsiCharset) > 0 {
|
return util.IfZero(setting.Repository.AnsiCharset, "UTF-8"), err
|
||||||
log.Debug("Using default AnsiCharset: %s", setting.Repository.AnsiCharset)
|
|
||||||
return setting.Repository.AnsiCharset, nil
|
|
||||||
}
|
|
||||||
return "", err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
topConfidence := results[0].Confidence
|
topConfidence := results[0].Confidence
|
||||||
@ -201,11 +185,9 @@ func DetectEncoding(content []byte) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: to properly decouple this function the fallback ANSI charset should be passed as an argument
|
// FIXME: to properly decouple this function the fallback ANSI charset should be passed as an argument
|
||||||
if topResult.Charset != "UTF-8" && len(setting.Repository.AnsiCharset) > 0 {
|
if topResult.Charset != "UTF-8" && setting.Repository.AnsiCharset != "" {
|
||||||
log.Debug("Using default AnsiCharset: %s", setting.Repository.AnsiCharset)
|
|
||||||
return setting.Repository.AnsiCharset, err
|
return setting.Repository.AnsiCharset, err
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Debug("Detected encoding: %s", topResult.Charset)
|
return topResult.Charset, nil
|
||||||
return topResult.Charset, err
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,108 +4,89 @@
|
|||||||
package charset
|
package charset
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"io"
|
"io"
|
||||||
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/test"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func resetDefaultCharsetsOrder() {
|
func TestMain(m *testing.M) {
|
||||||
defaultDetectedCharsetsOrder := make([]string, 0, len(setting.Repository.DetectedCharsetsOrder))
|
|
||||||
for _, charset := range setting.Repository.DetectedCharsetsOrder {
|
|
||||||
defaultDetectedCharsetsOrder = append(defaultDetectedCharsetsOrder, strings.ToLower(strings.TrimSpace(charset)))
|
|
||||||
}
|
|
||||||
setting.Repository.DetectedCharsetScore = map[string]int{}
|
setting.Repository.DetectedCharsetScore = map[string]int{}
|
||||||
i := 0
|
for i, charset := range setting.Repository.DetectedCharsetsOrder {
|
||||||
for _, charset := range defaultDetectedCharsetsOrder {
|
setting.Repository.DetectedCharsetScore[strings.ToLower(charset)] = i
|
||||||
canonicalCharset := strings.ToLower(strings.TrimSpace(charset))
|
|
||||||
if _, has := setting.Repository.DetectedCharsetScore[canonicalCharset]; !has {
|
|
||||||
setting.Repository.DetectedCharsetScore[canonicalCharset] = i
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
os.Exit(m.Run())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMaybeRemoveBOM(t *testing.T) {
|
func TestMaybeRemoveBOM(t *testing.T) {
|
||||||
res := MaybeRemoveBOM([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
res := maybeRemoveBOM([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||||
|
|
||||||
res = MaybeRemoveBOM([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
res = maybeRemoveBOM([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestToUTF8(t *testing.T) {
|
func TestToUTF8(t *testing.T) {
|
||||||
resetDefaultCharsetsOrder()
|
|
||||||
|
|
||||||
// Note: golang compiler seems so behave differently depending on the current
|
// Note: golang compiler seems so behave differently depending on the current
|
||||||
// locale, so some conversions might behave differently. For that reason, we don't
|
// locale, so some conversions might behave differently. For that reason, we don't
|
||||||
// depend on particular conversions but in expected behaviors.
|
// depend on particular conversions but in expected behaviors.
|
||||||
|
|
||||||
res, err := ToUTF8([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
res := ToUTF8([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||||
assert.NoError(t, err)
|
assert.Equal(t, "ABC", string(res))
|
||||||
assert.Equal(t, "ABC", res)
|
|
||||||
|
|
||||||
// "áéíóú"
|
// "áéíóú"
|
||||||
res, err = ToUTF8([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
res = ToUTF8([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||||
assert.NoError(t, err)
|
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, []byte(res))
|
|
||||||
|
|
||||||
// "áéíóú"
|
// "áéíóú"
|
||||||
res, err = ToUTF8([]byte{
|
res = ToUTF8([]byte{
|
||||||
0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3,
|
0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3,
|
||||||
0xc3, 0xba,
|
0xc3, 0xba,
|
||||||
}, ConvertOpts{})
|
}, ConvertOpts{})
|
||||||
assert.NoError(t, err)
|
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, []byte(res))
|
|
||||||
|
|
||||||
res, err = ToUTF8([]byte{
|
res = ToUTF8([]byte{
|
||||||
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
||||||
0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
||||||
}, ConvertOpts{})
|
}, ConvertOpts{})
|
||||||
assert.NoError(t, err)
|
|
||||||
stringMustStartWith(t, "Hola,", res)
|
stringMustStartWith(t, "Hola,", res)
|
||||||
stringMustEndWith(t, "AAA.", res)
|
stringMustEndWith(t, "AAA.", res)
|
||||||
|
|
||||||
res, err = ToUTF8([]byte{
|
res = ToUTF8([]byte{
|
||||||
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
||||||
0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
||||||
}, ConvertOpts{})
|
}, ConvertOpts{})
|
||||||
assert.NoError(t, err)
|
|
||||||
stringMustStartWith(t, "Hola,", res)
|
stringMustStartWith(t, "Hola,", res)
|
||||||
stringMustEndWith(t, "AAA.", res)
|
stringMustEndWith(t, "AAA.", res)
|
||||||
|
|
||||||
res, err = ToUTF8([]byte{
|
res = ToUTF8([]byte{
|
||||||
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
||||||
0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
||||||
}, ConvertOpts{})
|
}, ConvertOpts{})
|
||||||
assert.NoError(t, err)
|
|
||||||
stringMustStartWith(t, "Hola,", res)
|
stringMustStartWith(t, "Hola,", res)
|
||||||
stringMustEndWith(t, "AAA.", res)
|
stringMustEndWith(t, "AAA.", res)
|
||||||
|
|
||||||
// Japanese (Shift-JIS)
|
// Japanese (Shift-JIS)
|
||||||
// 日属秘ぞしちゅ。
|
// 日属秘ぞしちゅ。
|
||||||
res, err = ToUTF8([]byte{
|
res = ToUTF8([]byte{
|
||||||
0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82,
|
0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82,
|
||||||
0xBF, 0x82, 0xE3, 0x81, 0x42,
|
0xBF, 0x82, 0xE3, 0x81, 0x42,
|
||||||
}, ConvertOpts{})
|
}, ConvertOpts{})
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, []byte{
|
assert.Equal(t, []byte{
|
||||||
0xE6, 0x97, 0xA5, 0xE5, 0xB1, 0x9E, 0xE7, 0xA7, 0x98, 0xE3,
|
0xE6, 0x97, 0xA5, 0xE5, 0xB1, 0x9E, 0xE7, 0xA7, 0x98, 0xE3,
|
||||||
0x81, 0x9E, 0xE3, 0x81, 0x97, 0xE3, 0x81, 0xA1, 0xE3, 0x82, 0x85, 0xE3, 0x80, 0x82,
|
0x81, 0x9E, 0xE3, 0x81, 0x97, 0xE3, 0x81, 0xA1, 0xE3, 0x82, 0x85, 0xE3, 0x80, 0x82,
|
||||||
},
|
}, res)
|
||||||
[]byte(res))
|
|
||||||
|
|
||||||
res, err = ToUTF8([]byte{0x00, 0x00, 0x00, 0x00}, ConvertOpts{})
|
res = ToUTF8([]byte{0x00, 0x00, 0x00, 0x00}, ConvertOpts{})
|
||||||
assert.NoError(t, err)
|
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, res)
|
||||||
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, []byte(res))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestToUTF8WithFallback(t *testing.T) {
|
func TestToUTF8WithFallback(t *testing.T) {
|
||||||
resetDefaultCharsetsOrder()
|
|
||||||
// "ABC"
|
// "ABC"
|
||||||
res := ToUTF8WithFallback([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
res := ToUTF8WithFallback([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||||
assert.Equal(t, []byte{0x41, 0x42, 0x43}, res)
|
assert.Equal(t, []byte{0x41, 0x42, 0x43}, res)
|
||||||
@ -152,54 +133,58 @@ func TestToUTF8WithFallback(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestToUTF8DropErrors(t *testing.T) {
|
func TestToUTF8DropErrors(t *testing.T) {
|
||||||
resetDefaultCharsetsOrder()
|
|
||||||
// "ABC"
|
// "ABC"
|
||||||
res := ToUTF8DropErrors([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
res := ToUTF8DropErrors([]byte{0x41, 0x42, 0x43})
|
||||||
assert.Equal(t, []byte{0x41, 0x42, 0x43}, res)
|
assert.Equal(t, []byte{0x41, 0x42, 0x43}, res)
|
||||||
|
|
||||||
// "áéíóú"
|
// "áéíóú"
|
||||||
res = ToUTF8DropErrors([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
res = ToUTF8DropErrors([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
|
||||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||||
|
|
||||||
// UTF8 BOM + "áéíóú"
|
// UTF8 BOM + "áéíóú"
|
||||||
res = ToUTF8DropErrors([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
res = ToUTF8DropErrors([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
|
||||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||||
|
|
||||||
// "Hola, así cómo ños"
|
// "Hola, así cómo ños"
|
||||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73}, ConvertOpts{})
|
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73})
|
||||||
assert.Equal(t, []byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73}, res[:8])
|
assert.Equal(t, []byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73}, res[:8])
|
||||||
assert.Equal(t, []byte{0x73}, res[len(res)-1:])
|
assert.Equal(t, []byte{0x73}, res[len(res)-1:])
|
||||||
|
|
||||||
// "Hola, así cómo "
|
// "Hola, así cómo "
|
||||||
minmatch := []byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xC3, 0xAD, 0x20, 0x63, 0xC3, 0xB3, 0x6D, 0x6F, 0x20}
|
minmatch := []byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xC3, 0xAD, 0x20, 0x63, 0xC3, 0xB3, 0x6D, 0x6F, 0x20}
|
||||||
|
|
||||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73}, ConvertOpts{})
|
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73})
|
||||||
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
|
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
|
||||||
assert.Equal(t, minmatch, res[0:len(minmatch)])
|
assert.Equal(t, minmatch, res[0:len(minmatch)])
|
||||||
|
|
||||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73}, ConvertOpts{})
|
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73})
|
||||||
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
|
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
|
||||||
assert.Equal(t, minmatch, res[0:len(minmatch)])
|
assert.Equal(t, minmatch, res[0:len(minmatch)])
|
||||||
|
|
||||||
// Japanese (Shift-JIS)
|
// Japanese (Shift-JIS)
|
||||||
// "日属秘ぞしちゅ。"
|
// "日属秘ぞしちゅ。"
|
||||||
res = ToUTF8DropErrors([]byte{0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82, 0xBF, 0x82, 0xE3, 0x81, 0x42}, ConvertOpts{})
|
res = ToUTF8DropErrors([]byte{0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82, 0xBF, 0x82, 0xE3, 0x81, 0x42})
|
||||||
assert.Equal(t, []byte{
|
assert.Equal(t, []byte{
|
||||||
0xE6, 0x97, 0xA5, 0xE5, 0xB1, 0x9E, 0xE7, 0xA7, 0x98, 0xE3,
|
0xE6, 0x97, 0xA5, 0xE5, 0xB1, 0x9E, 0xE7, 0xA7, 0x98, 0xE3,
|
||||||
0x81, 0x9E, 0xE3, 0x81, 0x97, 0xE3, 0x81, 0xA1, 0xE3, 0x82, 0x85, 0xE3, 0x80, 0x82,
|
0x81, 0x9E, 0xE3, 0x81, 0x97, 0xE3, 0x81, 0xA1, 0xE3, 0x82, 0x85, 0xE3, 0x80, 0x82,
|
||||||
}, res)
|
}, res)
|
||||||
|
|
||||||
res = ToUTF8DropErrors([]byte{0x00, 0x00, 0x00, 0x00}, ConvertOpts{})
|
res = ToUTF8DropErrors([]byte{0x00, 0x00, 0x00, 0x00})
|
||||||
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, res)
|
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, res)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDetectEncoding(t *testing.T) {
|
func TestDetectEncoding(t *testing.T) {
|
||||||
resetDefaultCharsetsOrder()
|
|
||||||
testSuccess := func(b []byte, expected string) {
|
testSuccess := func(b []byte, expected string) {
|
||||||
encoding, err := DetectEncoding(b)
|
encoding, err := DetectEncoding(b)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, expected, encoding)
|
assert.Equal(t, expected, encoding)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// invalid bytes
|
||||||
|
encoding, err := DetectEncoding([]byte{0xfa})
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Equal(t, "UTF-8", encoding)
|
||||||
|
|
||||||
// utf-8
|
// utf-8
|
||||||
b := []byte("just some ascii")
|
b := []byte("just some ascii")
|
||||||
testSuccess(b, "UTF-8")
|
testSuccess(b, "UTF-8")
|
||||||
@ -214,169 +199,49 @@ func TestDetectEncoding(t *testing.T) {
|
|||||||
|
|
||||||
// iso-8859-1: d<accented e>cor<newline>
|
// iso-8859-1: d<accented e>cor<newline>
|
||||||
b = []byte{0x44, 0xe9, 0x63, 0x6f, 0x72, 0x0a}
|
b = []byte{0x44, 0xe9, 0x63, 0x6f, 0x72, 0x0a}
|
||||||
encoding, err := DetectEncoding(b)
|
encoding, err = DetectEncoding(b)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, encoding, "ISO-8859-1")
|
assert.Contains(t, encoding, "ISO-8859-1")
|
||||||
|
|
||||||
old := setting.Repository.AnsiCharset
|
defer test.MockVariableValue(&setting.Repository.AnsiCharset, "MyEncoding")()
|
||||||
setting.Repository.AnsiCharset = "placeholder"
|
testSuccess(b, "MyEncoding")
|
||||||
defer func() {
|
|
||||||
setting.Repository.AnsiCharset = old
|
|
||||||
}()
|
|
||||||
testSuccess(b, "placeholder")
|
|
||||||
|
|
||||||
// invalid bytes
|
|
||||||
b = []byte{0xfa}
|
|
||||||
_, err = DetectEncoding(b)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func stringMustStartWith(t *testing.T, expected, value string) {
|
func stringMustStartWith(t *testing.T, expected string, value []byte) {
|
||||||
assert.Equal(t, expected, value[:len(expected)])
|
assert.Equal(t, expected, string(value[:len(expected)]))
|
||||||
}
|
}
|
||||||
|
|
||||||
func stringMustEndWith(t *testing.T, expected, value string) {
|
func stringMustEndWith(t *testing.T, expected string, value []byte) {
|
||||||
assert.Equal(t, expected, value[len(value)-len(expected):])
|
assert.Equal(t, expected, string(value[len(value)-len(expected):]))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestToUTF8WithFallbackReader(t *testing.T) {
|
func TestToUTF8WithFallbackReader(t *testing.T) {
|
||||||
resetDefaultCharsetsOrder()
|
test.MockVariableValue(&ToUTF8WithFallbackReaderPrefetchSize)
|
||||||
|
|
||||||
for testLen := range 2048 {
|
block := "aá啊🤔"
|
||||||
pattern := " test { () }\n"
|
runes := []rune(block)
|
||||||
input := ""
|
assert.Len(t, string(runes[0]), 1)
|
||||||
for len(input) < testLen {
|
assert.Len(t, string(runes[1]), 2)
|
||||||
input += pattern
|
assert.Len(t, string(runes[2]), 3)
|
||||||
}
|
assert.Len(t, string(runes[3]), 4)
|
||||||
input = input[:testLen]
|
|
||||||
input += "// Выключаем"
|
content := strings.Repeat(block, 2)
|
||||||
rd := ToUTF8WithFallbackReader(bytes.NewReader([]byte(input)), ConvertOpts{})
|
for i := 1; i < len(content); i++ {
|
||||||
|
encoding, err := DetectEncoding([]byte(content[:i]))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, "UTF-8", encoding)
|
||||||
|
|
||||||
|
ToUTF8WithFallbackReaderPrefetchSize = i
|
||||||
|
rd := ToUTF8WithFallbackReader(strings.NewReader(content), ConvertOpts{})
|
||||||
r, _ := io.ReadAll(rd)
|
r, _ := io.ReadAll(rd)
|
||||||
assert.Equalf(t, input, string(r), "testing string len=%d", testLen)
|
assert.Equal(t, content, string(r))
|
||||||
|
}
|
||||||
|
for _, r := range runes {
|
||||||
|
content = "abc abc " + string(r) + string(r) + string(r)
|
||||||
|
for i := 0; i < len(content); i++ {
|
||||||
|
encoding, err := DetectEncoding([]byte(content[:i]))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, "UTF-8", encoding)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
truncatedOneByteExtension := failFastBytes
|
|
||||||
encoding, _ := DetectEncoding(truncatedOneByteExtension)
|
|
||||||
assert.Equal(t, "UTF-8", encoding)
|
|
||||||
|
|
||||||
truncatedTwoByteExtension := failFastBytes
|
|
||||||
truncatedTwoByteExtension[len(failFastBytes)-1] = 0x9b
|
|
||||||
truncatedTwoByteExtension[len(failFastBytes)-2] = 0xe2
|
|
||||||
|
|
||||||
encoding, _ = DetectEncoding(truncatedTwoByteExtension)
|
|
||||||
assert.Equal(t, "UTF-8", encoding)
|
|
||||||
|
|
||||||
truncatedThreeByteExtension := failFastBytes
|
|
||||||
truncatedThreeByteExtension[len(failFastBytes)-1] = 0x92
|
|
||||||
truncatedThreeByteExtension[len(failFastBytes)-2] = 0x9f
|
|
||||||
truncatedThreeByteExtension[len(failFastBytes)-3] = 0xf0
|
|
||||||
|
|
||||||
encoding, _ = DetectEncoding(truncatedThreeByteExtension)
|
|
||||||
assert.Equal(t, "UTF-8", encoding)
|
|
||||||
}
|
|
||||||
|
|
||||||
var failFastBytes = []byte{
|
|
||||||
0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x74, 0x6f,
|
|
||||||
0x6f, 0x6c, 0x73, 0x2e, 0x61, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x73, 0x6b, 0x64, 0x65, 0x66, 0x73, 0x2e, 0x63, 0x6f, 0x6e,
|
|
||||||
0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x4f, 0x73, 0x0a, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6f, 0x72, 0x67,
|
|
||||||
0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f,
|
|
||||||
0x74, 0x2e, 0x67, 0x72, 0x61, 0x64, 0x6c, 0x65, 0x2e, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x2e, 0x72, 0x75, 0x6e, 0x2e, 0x42,
|
|
||||||
0x6f, 0x6f, 0x74, 0x52, 0x75, 0x6e, 0x0a, 0x0a, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x20, 0x7b, 0x0a, 0x20, 0x20,
|
|
||||||
0x20, 0x20, 0x69, 0x64, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d,
|
|
||||||
0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x22, 0x29, 0x0a, 0x7d, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x65,
|
|
||||||
0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65,
|
|
||||||
0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a,
|
|
||||||
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x61, 0x70, 0x69, 0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d,
|
|
||||||
0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74,
|
|
||||||
0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x61, 0x70, 0x69, 0x2d, 0x64, 0x6f, 0x63, 0x73, 0x22, 0x29,
|
|
||||||
0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e,
|
|
||||||
0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x64, 0x62,
|
|
||||||
0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69,
|
|
||||||
0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a,
|
|
||||||
0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65,
|
|
||||||
0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a,
|
|
||||||
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x66,
|
|
||||||
0x73, 0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74,
|
|
||||||
0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
|
|
||||||
0x3a, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x6d, 0x71, 0x22, 0x29, 0x29, 0x0a, 0x0a,
|
|
||||||
0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22,
|
|
||||||
0x6a, 0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e,
|
|
||||||
0x2d, 0x61, 0x75, 0x74, 0x68, 0x2d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2d, 0x73, 0x74, 0x61, 0x72, 0x74,
|
|
||||||
0x65, 0x72, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74,
|
|
||||||
0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6a, 0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63,
|
|
||||||
0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2d, 0x68, 0x61, 0x6c, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c,
|
|
||||||
0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6a, 0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e,
|
|
||||||
0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x29, 0x0a,
|
|
||||||
0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28,
|
|
||||||
0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b,
|
|
||||||
0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x2d, 0x73, 0x74,
|
|
||||||
0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x77, 0x65, 0x62, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c,
|
|
||||||
0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69,
|
|
||||||
0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x3a, 0x73, 0x70, 0x72,
|
|
||||||
0x69, 0x6e, 0x67, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x2d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x61, 0x6f, 0x70,
|
|
||||||
0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f,
|
|
||||||
0x6e, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f,
|
|
||||||
0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x2d,
|
|
||||||
0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x61, 0x63, 0x74, 0x75, 0x61, 0x74, 0x6f, 0x72, 0x22, 0x29, 0x0a, 0x20,
|
|
||||||
0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f,
|
|
||||||
0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x63,
|
|
||||||
0x6c, 0x6f, 0x75, 0x64, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2d, 0x73, 0x74,
|
|
||||||
0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x22, 0x29, 0x0a, 0x20, 0x20,
|
|
||||||
0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72,
|
|
||||||
0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x63, 0x6c,
|
|
||||||
0x6f, 0x75, 0x64, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2d, 0x73, 0x74, 0x61,
|
|
||||||
0x72, 0x74, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x2d, 0x61, 0x6c, 0x6c, 0x22, 0x29, 0x0a, 0x20, 0x20,
|
|
||||||
0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72,
|
|
||||||
0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x63, 0x6c,
|
|
||||||
0x6f, 0x75, 0x64, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2d, 0x73, 0x74, 0x61,
|
|
||||||
0x72, 0x74, 0x65, 0x72, 0x2d, 0x73, 0x6c, 0x65, 0x75, 0x74, 0x68, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d,
|
|
||||||
0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70,
|
|
||||||
0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x72, 0x65, 0x74, 0x72, 0x79, 0x3a,
|
|
||||||
0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x72, 0x65, 0x74, 0x72, 0x79, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20,
|
|
||||||
0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x63, 0x68, 0x2e, 0x71,
|
|
||||||
0x6f, 0x73, 0x2e, 0x6c, 0x6f, 0x67, 0x62, 0x61, 0x63, 0x6b, 0x3a, 0x6c, 0x6f, 0x67, 0x62, 0x61, 0x63, 0x6b, 0x2d, 0x63,
|
|
||||||
0x6c, 0x61, 0x73, 0x73, 0x69, 0x63, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d,
|
|
||||||
0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x69, 0x6f, 0x2e, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x6d, 0x65,
|
|
||||||
0x74, 0x65, 0x72, 0x3a, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2d, 0x72, 0x65, 0x67, 0x69, 0x73,
|
|
||||||
0x74, 0x72, 0x79, 0x2d, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20,
|
|
||||||
0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6b, 0x6f, 0x74,
|
|
||||||
0x6c, 0x69, 0x6e, 0x28, 0x22, 0x73, 0x74, 0x64, 0x6c, 0x69, 0x62, 0x22, 0x29, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20,
|
|
||||||
0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f,
|
|
||||||
0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x65, 0x73, 0x74, 0x20, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64,
|
|
||||||
0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f,
|
|
||||||
0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74,
|
|
||||||
0x65, 0x73, 0x74, 0x49, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6a,
|
|
||||||
0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2d,
|
|
||||||
0x74, 0x65, 0x73, 0x74, 0x22, 0x29, 0x0a, 0x7d, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x20, 0x70, 0x61, 0x74, 0x63, 0x68, 0x4a,
|
|
||||||
0x61, 0x72, 0x20, 0x62, 0x79, 0x20, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x2e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72,
|
|
||||||
0x69, 0x6e, 0x67, 0x28, 0x4a, 0x61, 0x72, 0x3a, 0x3a, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20,
|
|
||||||
0x20, 0x20, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x65, 0x72, 0x2e,
|
|
||||||
0x73, 0x65, 0x74, 0x28, 0x22, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x64, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20,
|
|
||||||
0x76, 0x61, 0x6c, 0x20, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x70, 0x61, 0x74, 0x68,
|
|
||||||
0x20, 0x62, 0x79, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x67,
|
|
||||||
0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74,
|
|
||||||
0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
|
|
||||||
0x73, 0x28, 0x22, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x2d, 0x50, 0x61, 0x74, 0x68, 0x22, 0x20, 0x74, 0x6f, 0x20, 0x6f, 0x62,
|
|
||||||
0x6a, 0x65, 0x63, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70,
|
|
||||||
0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x20, 0x76, 0x61, 0x6c, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x20, 0x3d,
|
|
||||||
0x20, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x3a, 0x2f, 0x2b, 0x22, 0x2e, 0x74, 0x6f, 0x52, 0x65, 0x67, 0x65, 0x78, 0x28, 0x29,
|
|
||||||
0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64,
|
|
||||||
0x65, 0x20, 0x66, 0x75, 0x6e, 0x20, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x3a, 0x20, 0x53, 0x74,
|
|
||||||
0x72, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x70,
|
|
||||||
0x61, 0x74, 0x68, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x6f, 0x53, 0x74, 0x72, 0x69,
|
|
||||||
0x6e, 0x67, 0x28, 0x22, 0x20, 0x22, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
|
|
||||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x74, 0x2e, 0x74, 0x6f, 0x55, 0x52, 0x49, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x55,
|
|
||||||
0x52, 0x4c, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c,
|
|
||||||
0x61, 0x63, 0x65, 0x46, 0x69, 0x72, 0x73, 0x74, 0x28, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x2c, 0x20, 0x22, 0x2f,
|
|
||||||
0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20,
|
|
||||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a, 0x0a, 0x74, 0x61, 0x73,
|
|
||||||
0x6b, 0x73, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x3c, 0x42, 0x6f, 0x6f, 0x74, 0x52, 0x75, 0x6e, 0x3e, 0x28, 0x22, 0x62,
|
|
||||||
0x6f, 0x6f, 0x74, 0x52, 0x75, 0x6e, 0x22, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x4f,
|
|
||||||
0x73, 0x2e, 0x69, 0x73, 0x46, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x28, 0x4f, 0x73, 0x2e, 0x46, 0x41, 0x4d, 0x49, 0x4c, 0x59,
|
|
||||||
0x5f, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x53, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
|
|
||||||
0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x70, 0x61, 0x74, 0x68, 0x20, 0x3d, 0x20, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x28, 0x73,
|
|
||||||
0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x65, 0x74, 0x73, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x28, 0x22, 0x6d, 0x61, 0x69,
|
|
||||||
0x6e, 0x22, 0x29, 0x2e, 0x6d, 0x61, 0x70, 0x20, 0x7b, 0x20, 0x69, 0x74, 0x2e, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x20,
|
|
||||||
0x7d, 0x2c, 0x20, 0x70, 0x61, 0x74, 0x63, 0x68, 0x4a, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a,
|
|
||||||
0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0xd0,
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -19,7 +19,6 @@ import (
|
|||||||
charsetModule "code.gitea.io/gitea/modules/charset"
|
charsetModule "code.gitea.io/gitea/modules/charset"
|
||||||
"code.gitea.io/gitea/modules/container"
|
"code.gitea.io/gitea/modules/container"
|
||||||
"code.gitea.io/gitea/modules/httpcache"
|
"code.gitea.io/gitea/modules/httpcache"
|
||||||
"code.gitea.io/gitea/modules/log"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/typesniffer"
|
"code.gitea.io/gitea/modules/typesniffer"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
@ -109,11 +108,7 @@ func setServeHeadersByFile(r *http.Request, w http.ResponseWriter, mineBuf []byt
|
|||||||
}
|
}
|
||||||
|
|
||||||
if isPlain {
|
if isPlain {
|
||||||
charset, err := charsetModule.DetectEncoding(mineBuf)
|
charset, _ := charsetModule.DetectEncoding(mineBuf)
|
||||||
if err != nil {
|
|
||||||
log.Error("Detect raw file %s charset failed: %v, using by default utf-8", opts.Filename, err)
|
|
||||||
charset = "utf-8"
|
|
||||||
}
|
|
||||||
opts.ContentTypeCharset = strings.ToLower(charset)
|
opts.ContentTypeCharset = strings.ToLower(charset)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -203,7 +203,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
|||||||
RepoID: repo.ID,
|
RepoID: repo.ID,
|
||||||
CommitID: commitSha,
|
CommitID: commitSha,
|
||||||
Filename: update.Filename,
|
Filename: update.Filename,
|
||||||
Content: string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
Content: string(charset.ToUTF8DropErrors(fileContents)),
|
||||||
Language: analyze.GetCodeLanguage(update.Filename, fileContents),
|
Language: analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||||
UpdatedAt: time.Now().UTC(),
|
UpdatedAt: time.Now().UTC(),
|
||||||
})
|
})
|
||||||
|
|||||||
@ -191,7 +191,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
|||||||
Doc(map[string]any{
|
Doc(map[string]any{
|
||||||
"repo_id": repo.ID,
|
"repo_id": repo.ID,
|
||||||
"filename": update.Filename,
|
"filename": update.Filename,
|
||||||
"content": string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
"content": string(charset.ToUTF8DropErrors(fileContents)),
|
||||||
"commit_id": sha,
|
"commit_id": sha,
|
||||||
"language": analyze.GetCodeLanguage(update.Filename, fileContents),
|
"language": analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||||
"updated_at": timeutil.TimeStampNow(),
|
"updated_at": timeutil.TimeStampNow(),
|
||||||
|
|||||||
@ -62,7 +62,28 @@ type PackageMetadata struct {
|
|||||||
Author User `json:"author"`
|
Author User `json:"author"`
|
||||||
ReadmeFilename string `json:"readmeFilename,omitempty"`
|
ReadmeFilename string `json:"readmeFilename,omitempty"`
|
||||||
Users map[string]bool `json:"users,omitempty"`
|
Users map[string]bool `json:"users,omitempty"`
|
||||||
License string `json:"license,omitempty"`
|
License License `json:"license,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type License string
|
||||||
|
|
||||||
|
func (l *License) UnmarshalJSON(data []byte) error {
|
||||||
|
switch data[0] {
|
||||||
|
case '"':
|
||||||
|
var value string
|
||||||
|
if err := json.Unmarshal(data, &value); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*l = License(value)
|
||||||
|
case '{':
|
||||||
|
var values map[string]any
|
||||||
|
if err := json.Unmarshal(data, &values); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
value, _ := values["type"].(string)
|
||||||
|
*l = License(value)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// PackageMetadataVersion documentation: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#version
|
// PackageMetadataVersion documentation: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#version
|
||||||
@ -74,7 +95,7 @@ type PackageMetadataVersion struct {
|
|||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
Author User `json:"author"`
|
Author User `json:"author"`
|
||||||
Homepage string `json:"homepage,omitempty"`
|
Homepage string `json:"homepage,omitempty"`
|
||||||
License string `json:"license,omitempty"`
|
License License `json:"license,omitempty"`
|
||||||
Repository Repository `json:"repository"`
|
Repository Repository `json:"repository"`
|
||||||
Keywords []string `json:"keywords,omitempty"`
|
Keywords []string `json:"keywords,omitempty"`
|
||||||
Dependencies map[string]string `json:"dependencies,omitempty"`
|
Dependencies map[string]string `json:"dependencies,omitempty"`
|
||||||
|
|||||||
@ -13,6 +13,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/json"
|
"code.gitea.io/gitea/modules/json"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParsePackage(t *testing.T) {
|
func TestParsePackage(t *testing.T) {
|
||||||
@ -291,11 +292,36 @@ func TestParsePackage(t *testing.T) {
|
|||||||
assert.Equal(t, packageDescription, p.Metadata.Readme)
|
assert.Equal(t, packageDescription, p.Metadata.Readme)
|
||||||
assert.Equal(t, packageAuthor, p.Metadata.Author)
|
assert.Equal(t, packageAuthor, p.Metadata.Author)
|
||||||
assert.Equal(t, packageBin, p.Metadata.Bin["bin"])
|
assert.Equal(t, packageBin, p.Metadata.Bin["bin"])
|
||||||
assert.Equal(t, "MIT", p.Metadata.License)
|
assert.Equal(t, "MIT", string(p.Metadata.License))
|
||||||
assert.Equal(t, "https://gitea.io/", p.Metadata.ProjectURL)
|
assert.Equal(t, "https://gitea.io/", p.Metadata.ProjectURL)
|
||||||
assert.Contains(t, p.Metadata.Dependencies, "package")
|
assert.Contains(t, p.Metadata.Dependencies, "package")
|
||||||
assert.Equal(t, "1.2.0", p.Metadata.Dependencies["package"])
|
assert.Equal(t, "1.2.0", p.Metadata.Dependencies["package"])
|
||||||
assert.Equal(t, repository.Type, p.Metadata.Repository.Type)
|
assert.Equal(t, repository.Type, p.Metadata.Repository.Type)
|
||||||
assert.Equal(t, repository.URL, p.Metadata.Repository.URL)
|
assert.Equal(t, repository.URL, p.Metadata.Repository.URL)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("ValidLicenseMap", func(t *testing.T) {
|
||||||
|
packageJSON := `{
|
||||||
|
"versions": {
|
||||||
|
"0.1.1": {
|
||||||
|
"name": "dev-null",
|
||||||
|
"version": "0.1.1",
|
||||||
|
"license": {
|
||||||
|
"type": "MIT"
|
||||||
|
},
|
||||||
|
"dist": {
|
||||||
|
"integrity": "sha256-"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"_attachments": {
|
||||||
|
"foo": {
|
||||||
|
"data": "AAAA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
p, err := ParsePackage(strings.NewReader(packageJSON))
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "MIT", string(p.Metadata.License))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -12,7 +12,7 @@ type Metadata struct {
|
|||||||
Name string `json:"name,omitempty"`
|
Name string `json:"name,omitempty"`
|
||||||
Description string `json:"description,omitempty"`
|
Description string `json:"description,omitempty"`
|
||||||
Author string `json:"author,omitempty"`
|
Author string `json:"author,omitempty"`
|
||||||
License string `json:"license,omitempty"`
|
License License `json:"license,omitempty"`
|
||||||
ProjectURL string `json:"project_url,omitempty"`
|
ProjectURL string `json:"project_url,omitempty"`
|
||||||
Keywords []string `json:"keywords,omitempty"`
|
Keywords []string `json:"keywords,omitempty"`
|
||||||
Dependencies map[string]string `json:"dependencies,omitempty"`
|
Dependencies map[string]string `json:"dependencies,omitempty"`
|
||||||
|
|||||||
@ -240,4 +240,5 @@ func PanicInDevOrTesting(msg string, a ...any) {
|
|||||||
if !IsProd || IsInTesting {
|
if !IsProd || IsInTesting {
|
||||||
panic(fmt.Sprintf(msg, a...))
|
panic(fmt.Sprintf(msg, a...))
|
||||||
}
|
}
|
||||||
|
log.Error(msg, a...)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -414,22 +414,116 @@ func SearchUsers(ctx *context.APIContext) {
|
|||||||
// in: query
|
// in: query
|
||||||
// description: page size of results
|
// description: page size of results
|
||||||
// type: integer
|
// type: integer
|
||||||
|
// - name: sort
|
||||||
|
// in: query
|
||||||
|
// description: sort users by attribute. Supported values are
|
||||||
|
// "name", "created", "updated" and "id".
|
||||||
|
// Default is "name"
|
||||||
|
// type: string
|
||||||
|
// - name: order
|
||||||
|
// in: query
|
||||||
|
// description: sort order, either "asc" (ascending) or "desc" (descending).
|
||||||
|
// Default is "asc", ignored if "sort" is not specified.
|
||||||
|
// type: string
|
||||||
|
// - name: q
|
||||||
|
// in: query
|
||||||
|
// description: search term (username, full name, email)
|
||||||
|
// type: string
|
||||||
|
// - name: visibility
|
||||||
|
// in: query
|
||||||
|
// description: visibility filter. Supported values are
|
||||||
|
// "public", "limited" and "private".
|
||||||
|
// type: string
|
||||||
|
// - name: is_active
|
||||||
|
// in: query
|
||||||
|
// description: filter active users
|
||||||
|
// type: boolean
|
||||||
|
// - name: is_admin
|
||||||
|
// in: query
|
||||||
|
// description: filter admin users
|
||||||
|
// type: boolean
|
||||||
|
// - name: is_restricted
|
||||||
|
// in: query
|
||||||
|
// description: filter restricted users
|
||||||
|
// type: boolean
|
||||||
|
// - name: is_2fa_enabled
|
||||||
|
// in: query
|
||||||
|
// description: filter 2FA enabled users
|
||||||
|
// type: boolean
|
||||||
|
// - name: is_prohibit_login
|
||||||
|
// in: query
|
||||||
|
// description: filter login prohibited users
|
||||||
|
// type: boolean
|
||||||
// responses:
|
// responses:
|
||||||
// "200":
|
// "200":
|
||||||
// "$ref": "#/responses/UserList"
|
// "$ref": "#/responses/UserList"
|
||||||
// "403":
|
// "403":
|
||||||
// "$ref": "#/responses/forbidden"
|
// "$ref": "#/responses/forbidden"
|
||||||
|
// "422":
|
||||||
|
// "$ref": "#/responses/validationError"
|
||||||
|
|
||||||
listOptions := utils.GetListOptions(ctx)
|
listOptions := utils.GetListOptions(ctx)
|
||||||
|
|
||||||
users, maxResults, err := user_model.SearchUsers(ctx, user_model.SearchUserOptions{
|
orderBy := db.SearchOrderByAlphabetically
|
||||||
Actor: ctx.Doer,
|
sortMode := ctx.FormString("sort")
|
||||||
Types: []user_model.UserType{user_model.UserTypeIndividual},
|
if len(sortMode) > 0 {
|
||||||
LoginName: ctx.FormTrim("login_name"),
|
sortOrder := ctx.FormString("order")
|
||||||
SourceID: ctx.FormInt64("source_id"),
|
if len(sortOrder) == 0 {
|
||||||
OrderBy: db.SearchOrderByAlphabetically,
|
sortOrder = "asc"
|
||||||
ListOptions: listOptions,
|
}
|
||||||
})
|
if searchModeMap, ok := user_model.AdminUserOrderByMap[sortOrder]; ok {
|
||||||
|
if order, ok := searchModeMap[sortMode]; ok {
|
||||||
|
orderBy = order
|
||||||
|
} else {
|
||||||
|
ctx.APIError(http.StatusUnprocessableEntity, fmt.Errorf("Invalid sort mode: \"%s\"", sortMode))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ctx.APIError(http.StatusUnprocessableEntity, fmt.Errorf("Invalid sort order: \"%s\"", sortOrder))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var visible []api.VisibleType
|
||||||
|
visibilityParam := ctx.FormString("visibility")
|
||||||
|
if len(visibilityParam) > 0 {
|
||||||
|
if visibility, ok := api.VisibilityModes[visibilityParam]; ok {
|
||||||
|
visible = []api.VisibleType{visibility}
|
||||||
|
} else {
|
||||||
|
ctx.APIError(http.StatusUnprocessableEntity, fmt.Errorf("Invalid visibility: \"%s\"", visibilityParam))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
searchOpts := user_model.SearchUserOptions{
|
||||||
|
Actor: ctx.Doer,
|
||||||
|
Types: []user_model.UserType{user_model.UserTypeIndividual},
|
||||||
|
LoginName: ctx.FormTrim("login_name"),
|
||||||
|
SourceID: ctx.FormInt64("source_id"),
|
||||||
|
Keyword: ctx.FormTrim("q"),
|
||||||
|
Visible: visible,
|
||||||
|
OrderBy: orderBy,
|
||||||
|
ListOptions: listOptions,
|
||||||
|
SearchByEmail: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.FormString("is_active") != "" {
|
||||||
|
searchOpts.IsActive = optional.Some(ctx.FormBool("is_active"))
|
||||||
|
}
|
||||||
|
if ctx.FormString("is_admin") != "" {
|
||||||
|
searchOpts.IsAdmin = optional.Some(ctx.FormBool("is_admin"))
|
||||||
|
}
|
||||||
|
if ctx.FormString("is_restricted") != "" {
|
||||||
|
searchOpts.IsRestricted = optional.Some(ctx.FormBool("is_restricted"))
|
||||||
|
}
|
||||||
|
if ctx.FormString("is_2fa_enabled") != "" {
|
||||||
|
searchOpts.IsTwoFactorEnabled = optional.Some(ctx.FormBool("is_2fa_enabled"))
|
||||||
|
}
|
||||||
|
if ctx.FormString("is_prohibit_login") != "" {
|
||||||
|
searchOpts.IsProhibitLogin = optional.Some(ctx.FormBool("is_prohibit_login"))
|
||||||
|
}
|
||||||
|
|
||||||
|
users, maxResults, err := user_model.SearchUsers(ctx, searchOpts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.APIErrorInternal(err)
|
ctx.APIErrorInternal(err)
|
||||||
return
|
return
|
||||||
|
|||||||
@ -317,11 +317,7 @@ func EditFile(ctx *context.Context) {
|
|||||||
ctx.ServerError("ReadAll", err)
|
ctx.ServerError("ReadAll", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if content, err := charset.ToUTF8(buf, charset.ConvertOpts{KeepBOM: true}); err != nil {
|
ctx.Data["FileContent"] = string(charset.ToUTF8(buf, charset.ConvertOpts{KeepBOM: true, ErrorReturnOrigin: true}))
|
||||||
ctx.Data["FileContent"] = string(buf)
|
|
||||||
} else {
|
|
||||||
ctx.Data["FileContent"] = content
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -36,9 +36,7 @@ func CherryPick(ctx *context.Context) {
|
|||||||
ctx.Data["commit_message"] = "revert " + cherryPickCommit.Message()
|
ctx.Data["commit_message"] = "revert " + cherryPickCommit.Message()
|
||||||
} else {
|
} else {
|
||||||
ctx.Data["CherryPickType"] = "cherry-pick"
|
ctx.Data["CherryPickType"] = "cherry-pick"
|
||||||
splits := strings.SplitN(cherryPickCommit.Message(), "\n", 2)
|
ctx.Data["commit_summary"], ctx.Data["commit_message"], _ = strings.Cut(cherryPickCommit.Message(), "\n")
|
||||||
ctx.Data["commit_summary"] = splits[0]
|
|
||||||
ctx.Data["commit_message"] = splits[1]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.HTML(http.StatusOK, tplCherryPick)
|
ctx.HTML(http.StatusOK, tplCherryPick)
|
||||||
|
|||||||
@ -835,11 +835,11 @@ parsingLoop:
|
|||||||
if buffer.Len() == 0 {
|
if buffer.Len() == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
charsetLabel, err := charset.DetectEncoding(buffer.Bytes())
|
charsetLabel, _ := charset.DetectEncoding(buffer.Bytes())
|
||||||
if charsetLabel != "UTF-8" && err == nil {
|
if charsetLabel != "UTF-8" {
|
||||||
encoding, _ := stdcharset.Lookup(charsetLabel)
|
charsetEncoding, _ := stdcharset.Lookup(charsetLabel)
|
||||||
if encoding != nil {
|
if charsetEncoding != nil {
|
||||||
diffLineTypeDecoders[lineType] = encoding.NewDecoder()
|
diffLineTypeDecoders[lineType] = charsetEncoding.NewDecoder()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1325,10 +1325,10 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
|||||||
shouldFullFileHighlight := !setting.Git.DisableDiffHighlight && attrDiff.Value() == ""
|
shouldFullFileHighlight := !setting.Git.DisableDiffHighlight && attrDiff.Value() == ""
|
||||||
if shouldFullFileHighlight {
|
if shouldFullFileHighlight {
|
||||||
if limitedContent.LeftContent != nil && limitedContent.LeftContent.buf.Len() < MaxDiffHighlightEntireFileSize {
|
if limitedContent.LeftContent != nil && limitedContent.LeftContent.buf.Len() < MaxDiffHighlightEntireFileSize {
|
||||||
diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.String())
|
diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.Bytes())
|
||||||
}
|
}
|
||||||
if limitedContent.RightContent != nil && limitedContent.RightContent.buf.Len() < MaxDiffHighlightEntireFileSize {
|
if limitedContent.RightContent != nil && limitedContent.RightContent.buf.Len() < MaxDiffHighlightEntireFileSize {
|
||||||
diffFile.highlightedRightLines = highlightCodeLines(diffFile, false /* right */, limitedContent.RightContent.buf.String())
|
diffFile.highlightedRightLines = highlightCodeLines(diffFile, false /* right */, limitedContent.RightContent.buf.Bytes())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1336,9 +1336,34 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
|||||||
return diff, nil
|
return diff, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func highlightCodeLines(diffFile *DiffFile, isLeft bool, content string) map[int]template.HTML {
|
func splitHighlightLines(buf []byte) (ret [][]byte) {
|
||||||
|
lineCount := bytes.Count(buf, []byte("\n")) + 1
|
||||||
|
ret = make([][]byte, 0, lineCount)
|
||||||
|
nlTagClose := []byte("\n</")
|
||||||
|
for {
|
||||||
|
pos := bytes.IndexByte(buf, '\n')
|
||||||
|
if pos == -1 {
|
||||||
|
ret = append(ret, buf)
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
// Chroma highlighting output sometimes have "</span>" right after \n, sometimes before.
|
||||||
|
// * "<span>text\n</span>"
|
||||||
|
// * "<span>text</span>\n"
|
||||||
|
if bytes.HasPrefix(buf[pos:], nlTagClose) {
|
||||||
|
pos1 := bytes.IndexByte(buf[pos:], '>')
|
||||||
|
if pos1 != -1 {
|
||||||
|
pos += pos1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ret = append(ret, buf[:pos+1])
|
||||||
|
buf = buf[pos+1:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func highlightCodeLines(diffFile *DiffFile, isLeft bool, rawContent []byte) map[int]template.HTML {
|
||||||
|
content := util.UnsafeBytesToString(charset.ToUTF8(rawContent, charset.ConvertOpts{}))
|
||||||
highlightedNewContent, _ := highlight.Code(diffFile.Name, diffFile.Language, content)
|
highlightedNewContent, _ := highlight.Code(diffFile.Name, diffFile.Language, content)
|
||||||
splitLines := strings.Split(string(highlightedNewContent), "\n")
|
splitLines := splitHighlightLines([]byte(highlightedNewContent))
|
||||||
lines := make(map[int]template.HTML, len(splitLines))
|
lines := make(map[int]template.HTML, len(splitLines))
|
||||||
// only save the highlighted lines we need, but not the whole file, to save memory
|
// only save the highlighted lines we need, but not the whole file, to save memory
|
||||||
for _, sec := range diffFile.Sections {
|
for _, sec := range diffFile.Sections {
|
||||||
|
|||||||
@ -5,6 +5,7 @@
|
|||||||
package gitdiff
|
package gitdiff
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"html/template"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
@ -1106,3 +1107,41 @@ func TestDiffLine_GetExpandDirection(t *testing.T) {
|
|||||||
assert.Equal(t, c.direction, c.diffLine.GetExpandDirection(), "case %s expected direction: %s", c.name, c.direction)
|
assert.Equal(t, c.direction, c.diffLine.GetExpandDirection(), "case %s expected direction: %s", c.name, c.direction)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestHighlightCodeLines(t *testing.T) {
|
||||||
|
t.Run("CharsetDetecting", func(t *testing.T) {
|
||||||
|
diffFile := &DiffFile{
|
||||||
|
Name: "a.c",
|
||||||
|
Language: "c",
|
||||||
|
Sections: []*DiffSection{
|
||||||
|
{
|
||||||
|
Lines: []*DiffLine{{LeftIdx: 1}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
ret := highlightCodeLines(diffFile, true, []byte("// abc\xcc def\xcd")) // ISO-8859-1 bytes
|
||||||
|
assert.Equal(t, "<span class=\"c1\">// abcÌ defÍ\n</span>", string(ret[0]))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("LeftLines", func(t *testing.T) {
|
||||||
|
diffFile := &DiffFile{
|
||||||
|
Name: "a.c",
|
||||||
|
Language: "c",
|
||||||
|
Sections: []*DiffSection{
|
||||||
|
{
|
||||||
|
Lines: []*DiffLine{
|
||||||
|
{LeftIdx: 1},
|
||||||
|
{LeftIdx: 2},
|
||||||
|
{LeftIdx: 3},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
const nl = "\n"
|
||||||
|
ret := highlightCodeLines(diffFile, true, []byte("a\nb\n"))
|
||||||
|
assert.Equal(t, map[int]template.HTML{
|
||||||
|
0: `<span class="n">a</span>` + nl,
|
||||||
|
1: `<span class="n">b</span>`,
|
||||||
|
}, ret)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|||||||
@ -25,12 +25,12 @@ func TestDiffWithHighlight(t *testing.T) {
|
|||||||
|
|
||||||
t.Run("CleanUp", func(t *testing.T) {
|
t.Run("CleanUp", func(t *testing.T) {
|
||||||
hcd := newHighlightCodeDiff()
|
hcd := newHighlightCodeDiff()
|
||||||
codeA := template.HTML(`<span class="cm>this is a comment</span>`)
|
codeA := template.HTML(`<span class="cm">this is a comment</span>`)
|
||||||
codeB := template.HTML(`<span class="cm>this is updated comment</span>`)
|
codeB := template.HTML(`<span class="cm">this is updated comment</span>`)
|
||||||
outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
|
outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
|
||||||
assert.Equal(t, `<span class="cm>this is <span class="removed-code">a</span> comment</span>`, string(outDel))
|
assert.Equal(t, `<span class="cm">this is <span class="removed-code">a</span> comment</span>`, string(outDel))
|
||||||
outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
|
outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
|
||||||
assert.Equal(t, `<span class="cm>this is <span class="added-code">updated</span> comment</span>`, string(outAdd))
|
assert.Equal(t, `<span class="cm">this is <span class="added-code">updated</span> comment</span>`, string(outAdd))
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("OpenCloseTags", func(t *testing.T) {
|
t.Run("OpenCloseTags", func(t *testing.T) {
|
||||||
|
|||||||
@ -4,10 +4,8 @@
|
|||||||
package sender
|
package sender
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Sender interface {
|
type Sender interface {
|
||||||
@ -16,23 +14,18 @@ type Sender interface {
|
|||||||
|
|
||||||
var Send = send
|
var Send = send
|
||||||
|
|
||||||
func send(sender Sender, msgs ...*Message) error {
|
func send(sender Sender, msg *Message) error {
|
||||||
if setting.MailService == nil {
|
m := msg.ToMessage()
|
||||||
log.Error("Mailer: Send is being invoked but mail service hasn't been initialized")
|
froms := m.GetFrom()
|
||||||
return nil
|
to, err := m.GetRecipients()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
for _, msg := range msgs {
|
|
||||||
m := msg.ToMessage()
|
|
||||||
froms := m.GetFrom()
|
|
||||||
to, err := m.GetRecipients()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: implement sending from multiple addresses
|
// TODO: implement sending from multiple addresses
|
||||||
if err := sender.Send(froms[0].Address, to, m); err != nil {
|
if len(froms) == 0 {
|
||||||
return err
|
// FIXME: no idea why sometimes the "froms" can be empty, need to figure out the root problem
|
||||||
}
|
return errors.New("no FROM specified")
|
||||||
}
|
}
|
||||||
return nil
|
return sender.Send(froms[0].Address, to, m)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -177,7 +177,7 @@ func substGiteaTemplateFile(ctx context.Context, tmpDir, tmpDirSubPath string, t
|
|||||||
}
|
}
|
||||||
|
|
||||||
generatedContent := generateExpansion(ctx, string(content), templateRepo, generateRepo)
|
generatedContent := generateExpansion(ctx, string(content), templateRepo, generateRepo)
|
||||||
substSubPath := filepath.Clean(filePathSanitize(generateExpansion(ctx, tmpDirSubPath, templateRepo, generateRepo)))
|
substSubPath := filePathSanitize(generateExpansion(ctx, tmpDirSubPath, templateRepo, generateRepo))
|
||||||
newLocalPath := filepath.Join(tmpDir, substSubPath)
|
newLocalPath := filepath.Join(tmpDir, substSubPath)
|
||||||
regular, err := util.IsRegularFile(newLocalPath)
|
regular, err := util.IsRegularFile(newLocalPath)
|
||||||
if canWrite := regular || errors.Is(err, fs.ErrNotExist); !canWrite {
|
if canWrite := regular || errors.Is(err, fs.ErrNotExist); !canWrite {
|
||||||
@ -358,5 +358,5 @@ func filePathSanitize(s string) string {
|
|||||||
}
|
}
|
||||||
fields[i] = field
|
fields[i] = field
|
||||||
}
|
}
|
||||||
return filepath.FromSlash(strings.Join(fields, "/"))
|
return filepath.Clean(filepath.FromSlash(strings.Trim(strings.Join(fields, "/"), "/")))
|
||||||
}
|
}
|
||||||
|
|||||||
@ -54,19 +54,24 @@ text/*.txt
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestFilePathSanitize(t *testing.T) {
|
func TestFilePathSanitize(t *testing.T) {
|
||||||
assert.Equal(t, "test_CON", filePathSanitize("test_CON"))
|
// path clean
|
||||||
assert.Equal(t, "test CON", filePathSanitize("test CON "))
|
assert.Equal(t, "a", filePathSanitize("//a/"))
|
||||||
assert.Equal(t, "__/traverse/__", filePathSanitize(".. /traverse/ .."))
|
assert.Equal(t, "_a", filePathSanitize(`\a`))
|
||||||
assert.Equal(t, "./__/a/_git/b_", filePathSanitize("./../a/.git/ b: "))
|
assert.Equal(t, "__/a/__", filePathSanitize(".. /a/ .."))
|
||||||
|
assert.Equal(t, "__/a/_git/b_", filePathSanitize("./../a/.git/ b: "))
|
||||||
|
|
||||||
|
// Windows reserved names
|
||||||
assert.Equal(t, "_", filePathSanitize("CoN"))
|
assert.Equal(t, "_", filePathSanitize("CoN"))
|
||||||
assert.Equal(t, "_", filePathSanitize("LpT1"))
|
assert.Equal(t, "_", filePathSanitize("LpT1"))
|
||||||
assert.Equal(t, "_", filePathSanitize("CoM1"))
|
assert.Equal(t, "_", filePathSanitize("CoM1"))
|
||||||
|
assert.Equal(t, "test_CON", filePathSanitize("test_CON"))
|
||||||
|
assert.Equal(t, "test CON", filePathSanitize("test CON "))
|
||||||
|
|
||||||
|
// special chars
|
||||||
assert.Equal(t, "_", filePathSanitize("\u0000"))
|
assert.Equal(t, "_", filePathSanitize("\u0000"))
|
||||||
assert.Equal(t, "目标", filePathSanitize("目标"))
|
assert.Equal(t, ".", filePathSanitize(""))
|
||||||
// unlike filepath.Clean, it only sanitizes, doesn't change the separator layout
|
|
||||||
assert.Equal(t, "", filePathSanitize("")) //nolint:testifylint // for easy reading
|
|
||||||
assert.Equal(t, ".", filePathSanitize("."))
|
assert.Equal(t, ".", filePathSanitize("."))
|
||||||
assert.Equal(t, "/", filePathSanitize("/"))
|
assert.Equal(t, ".", filePathSanitize("/"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessGiteaTemplateFile(t *testing.T) {
|
func TestProcessGiteaTemplateFile(t *testing.T) {
|
||||||
|
|||||||
@ -4,7 +4,7 @@
|
|||||||
<div class="ui form">
|
<div class="ui form">
|
||||||
<div class="field">
|
<div class="field">
|
||||||
<label>{{svg "octicon-terminal"}} {{ctx.Locale.Tr "packages.pypi.install"}}</label>
|
<label>{{svg "octicon-terminal"}} {{ctx.Locale.Tr "packages.pypi.install"}}</label>
|
||||||
<div class="markup"><pre class="code-block"><code>pip install --index-url <origin-url data-url="{{AppSubUrl}}/api/packages/{{.PackageDescriptor.Owner.Name}}/pypi/simple/"></origin-url> --extra-index-url https://pypi.org/ {{.PackageDescriptor.Package.Name}}</code></pre></div>
|
<div class="markup"><pre class="code-block"><code>pip install --index-url <origin-url data-url="{{AppSubUrl}}/api/packages/{{.PackageDescriptor.Owner.Name}}/pypi/simple/"></origin-url> --extra-index-url https://pypi.org/simple {{.PackageDescriptor.Package.Name}}</code></pre></div>
|
||||||
</div>
|
</div>
|
||||||
<div class="field">
|
<div class="field">
|
||||||
<label>{{ctx.Locale.Tr "packages.registry.documentation" "PyPI" "https://docs.gitea.com/usage/packages/pypi/"}}</label>
|
<label>{{ctx.Locale.Tr "packages.registry.documentation" "PyPI" "https://docs.gitea.com/usage/packages/pypi/"}}</label>
|
||||||
|
|||||||
@ -96,7 +96,7 @@
|
|||||||
</div>
|
</div>
|
||||||
{{else if eq .Type 2}}
|
{{else if eq .Type 2}}
|
||||||
<div class="timeline-item event" id="{{.HashTag}}">
|
<div class="timeline-item event" id="{{.HashTag}}">
|
||||||
<span class="badge tw-bg-red tw-text-white">{{svg "octicon-circle-slash"}}</span>
|
<span class="badge tw-bg-red tw-text-white">{{svg "octicon-issue-closed"}}</span>
|
||||||
{{if not .OriginalAuthor}}
|
{{if not .OriginalAuthor}}
|
||||||
{{template "shared/user/avatarlink" dict "user" .Poster}}
|
{{template "shared/user/avatarlink" dict "user" .Poster}}
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|||||||
@ -62,7 +62,7 @@
|
|||||||
{{if not .IsDisplayingSource}}data-raw-file-link="{{$.RawFileLink}}"{{end}}
|
{{if not .IsDisplayingSource}}data-raw-file-link="{{$.RawFileLink}}"{{end}}
|
||||||
data-tooltip-content="{{if .CanCopyContent}}{{ctx.Locale.Tr "copy_content"}}{{else}}{{ctx.Locale.Tr "copy_type_unsupported"}}{{end}}"
|
data-tooltip-content="{{if .CanCopyContent}}{{ctx.Locale.Tr "copy_content"}}{{else}}{{ctx.Locale.Tr "copy_type_unsupported"}}{{end}}"
|
||||||
>{{svg "octicon-copy"}}</a>
|
>{{svg "octicon-copy"}}</a>
|
||||||
{{if .EnableFeed}}
|
{{if and .EnableFeed .RefFullName.IsBranch}}
|
||||||
<a class="btn-octicon" href="{{$.RepoLink}}/rss/{{$.RefTypeNameSubURL}}/{{PathEscapeSegments .TreePath}}" data-tooltip-content="{{ctx.Locale.Tr "rss_feed"}}">
|
<a class="btn-octicon" href="{{$.RepoLink}}/rss/{{$.RefTypeNameSubURL}}/{{PathEscapeSegments .TreePath}}" data-tooltip-content="{{ctx.Locale.Tr "rss_feed"}}">
|
||||||
{{svg "octicon-rss"}}
|
{{svg "octicon-rss"}}
|
||||||
</a>
|
</a>
|
||||||
|
|||||||
57
templates/swagger/v1_json.tmpl
generated
57
templates/swagger/v1_json.tmpl
generated
@ -781,6 +781,60 @@
|
|||||||
"description": "page size of results",
|
"description": "page size of results",
|
||||||
"name": "limit",
|
"name": "limit",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "sort users by attribute. Supported values are \"name\", \"created\", \"updated\" and \"id\". Default is \"name\"",
|
||||||
|
"name": "sort",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "sort order, either \"asc\" (ascending) or \"desc\" (descending). Default is \"asc\", ignored if \"sort\" is not specified.",
|
||||||
|
"name": "order",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "search term (username, full name, email)",
|
||||||
|
"name": "q",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "visibility filter. Supported values are \"public\", \"limited\" and \"private\".",
|
||||||
|
"name": "visibility",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "filter active users",
|
||||||
|
"name": "is_active",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "filter admin users",
|
||||||
|
"name": "is_admin",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "filter restricted users",
|
||||||
|
"name": "is_restricted",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "filter 2FA enabled users",
|
||||||
|
"name": "is_2fa_enabled",
|
||||||
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "filter login prohibited users",
|
||||||
|
"name": "is_prohibit_login",
|
||||||
|
"in": "query"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"responses": {
|
"responses": {
|
||||||
@ -789,6 +843,9 @@
|
|||||||
},
|
},
|
||||||
"403": {
|
"403": {
|
||||||
"$ref": "#/responses/forbidden"
|
"$ref": "#/responses/forbidden"
|
||||||
|
},
|
||||||
|
"422": {
|
||||||
|
"$ref": "#/responses/validationError"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -4,6 +4,7 @@
|
|||||||
package migrations
|
package migrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
@ -21,7 +22,6 @@ import (
|
|||||||
"code.gitea.io/gitea/models/migrations"
|
"code.gitea.io/gitea/models/migrations"
|
||||||
migrate_base "code.gitea.io/gitea/models/migrations/base"
|
migrate_base "code.gitea.io/gitea/models/migrations/base"
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
"code.gitea.io/gitea/modules/charset"
|
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
@ -108,11 +108,11 @@ func readSQLFromFile(version string) (string, error) {
|
|||||||
}
|
}
|
||||||
defer gr.Close()
|
defer gr.Close()
|
||||||
|
|
||||||
bytes, err := io.ReadAll(gr)
|
buf, err := io.ReadAll(gr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return string(charset.MaybeRemoveBOM(bytes, charset.ConvertOpts{})), nil
|
return string(bytes.TrimPrefix(buf, []byte{'\xef', '\xbb', '\xbf'})), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func restoreOldDB(t *testing.T, version string) {
|
func restoreOldDB(t *testing.T, version string) {
|
||||||
|
|||||||
@ -39,6 +39,8 @@
|
|||||||
|
|
||||||
--gap-inline: 0.25rem; /* gap for inline texts and elements, for example: the spaces for sentence with labels, button text, etc */
|
--gap-inline: 0.25rem; /* gap for inline texts and elements, for example: the spaces for sentence with labels, button text, etc */
|
||||||
--gap-block: 0.5rem; /* gap for element blocks, for example: spaces between buttons, menu image & title, header icon & title etc */
|
--gap-block: 0.5rem; /* gap for element blocks, for example: spaces between buttons, menu image & title, header icon & title etc */
|
||||||
|
|
||||||
|
--background-view-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAG0lEQVQYlWN4+vTpf3SMDTAMBYXYBLFpHgoKAeiOf0SGE9kbAAAAAElFTkSuQmCC") right bottom var(--color-primary-light-7);
|
||||||
}
|
}
|
||||||
|
|
||||||
@media (min-width: 768px) and (max-width: 1200px) {
|
@media (min-width: 768px) and (max-width: 1200px) {
|
||||||
|
|||||||
@ -13,7 +13,7 @@
|
|||||||
|
|
||||||
.image-diff-container img {
|
.image-diff-container img {
|
||||||
border: 1px solid var(--color-primary-light-7);
|
border: 1px solid var(--color-primary-light-7);
|
||||||
background: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAG0lEQVQYlWN4+vTpf3SMDTAMBYXYBLFpHgoKAeiOf0SGE9kbAAAAAElFTkSuQmCC") right bottom var(--color-primary-light-7);
|
background: var(--background-view-image);
|
||||||
}
|
}
|
||||||
|
|
||||||
.image-diff-container .before-container {
|
.image-diff-container .before-container {
|
||||||
|
|||||||
@ -81,6 +81,7 @@
|
|||||||
.view-raw img[src$=".svg" i] {
|
.view-raw img[src$=".svg" i] {
|
||||||
max-height: 600px !important;
|
max-height: 600px !important;
|
||||||
max-width: 600px !important;
|
max-width: 600px !important;
|
||||||
|
background: var(--background-view-image);
|
||||||
}
|
}
|
||||||
|
|
||||||
.file-view-render-container {
|
.file-view-render-container {
|
||||||
|
|||||||
@ -2,20 +2,10 @@
|
|||||||
import {SvgIcon} from '../svg.ts';
|
import {SvgIcon} from '../svg.ts';
|
||||||
import {isPlainClick} from '../utils/dom.ts';
|
import {isPlainClick} from '../utils/dom.ts';
|
||||||
import {shallowRef} from 'vue';
|
import {shallowRef} from 'vue';
|
||||||
import {type createViewFileTreeStore} from './ViewFileTreeStore.ts';
|
import type {createViewFileTreeStore, FileTreeItem} from './ViewFileTreeStore.ts';
|
||||||
|
|
||||||
export type Item = {
|
|
||||||
entryName: string;
|
|
||||||
entryMode: 'blob' | 'exec' | 'tree' | 'commit' | 'symlink' | 'unknown';
|
|
||||||
entryIcon: string;
|
|
||||||
entryIconOpen: string;
|
|
||||||
fullPath: string;
|
|
||||||
submoduleUrl?: string;
|
|
||||||
children?: Item[];
|
|
||||||
};
|
|
||||||
|
|
||||||
const props = defineProps<{
|
const props = defineProps<{
|
||||||
item: Item,
|
item: FileTreeItem,
|
||||||
store: ReturnType<typeof createViewFileTreeStore>
|
store: ReturnType<typeof createViewFileTreeStore>
|
||||||
}>();
|
}>();
|
||||||
|
|
||||||
|
|||||||
@ -3,11 +3,20 @@ import {GET} from '../modules/fetch.ts';
|
|||||||
import {pathEscapeSegments} from '../utils/url.ts';
|
import {pathEscapeSegments} from '../utils/url.ts';
|
||||||
import {createElementFromHTML} from '../utils/dom.ts';
|
import {createElementFromHTML} from '../utils/dom.ts';
|
||||||
import {html} from '../utils/html.ts';
|
import {html} from '../utils/html.ts';
|
||||||
import type {Item} from './ViewFileTreeItem.vue';
|
|
||||||
|
export type FileTreeItem = {
|
||||||
|
entryName: string;
|
||||||
|
entryMode: 'blob' | 'exec' | 'tree' | 'commit' | 'symlink' | 'unknown';
|
||||||
|
entryIcon: string;
|
||||||
|
entryIconOpen: string;
|
||||||
|
fullPath: string;
|
||||||
|
submoduleUrl?: string;
|
||||||
|
children?: Array<FileTreeItem>;
|
||||||
|
};
|
||||||
|
|
||||||
export function createViewFileTreeStore(props: {repoLink: string, treePath: string, currentRefNameSubURL: string}) {
|
export function createViewFileTreeStore(props: {repoLink: string, treePath: string, currentRefNameSubURL: string}) {
|
||||||
const store = reactive({
|
const store = reactive({
|
||||||
rootFiles: [] as Array<Item>,
|
rootFiles: [] as Array<FileTreeItem>,
|
||||||
selectedItem: props.treePath,
|
selectedItem: props.treePath,
|
||||||
|
|
||||||
async loadChildren(treePath: string, subPath: string = '') {
|
async loadChildren(treePath: string, subPath: string = '') {
|
||||||
|
|||||||
@ -3,7 +3,33 @@ import {hideElem, loadElem, queryElemChildren, queryElems} from '../utils/dom.ts
|
|||||||
import {parseDom} from '../utils.ts';
|
import {parseDom} from '../utils.ts';
|
||||||
import {fomanticQuery} from '../modules/fomantic/base.ts';
|
import {fomanticQuery} from '../modules/fomantic/base.ts';
|
||||||
|
|
||||||
function getDefaultSvgBoundsIfUndefined(text: string, src: string) {
|
type ImageContext = {
|
||||||
|
imageBefore: HTMLImageElement | undefined,
|
||||||
|
imageAfter: HTMLImageElement | undefined,
|
||||||
|
sizeBefore: {width: number, height: number},
|
||||||
|
sizeAfter: {width: number, height: number},
|
||||||
|
maxSize: {width: number, height: number},
|
||||||
|
ratio: [number, number, number, number],
|
||||||
|
};
|
||||||
|
|
||||||
|
type ImageInfo = {
|
||||||
|
path: string | null,
|
||||||
|
mime: string | null,
|
||||||
|
images: NodeListOf<HTMLImageElement>,
|
||||||
|
boundsInfo: HTMLElement | null,
|
||||||
|
};
|
||||||
|
|
||||||
|
type Bounds = {
|
||||||
|
width: number,
|
||||||
|
height: number,
|
||||||
|
} | null;
|
||||||
|
|
||||||
|
type SvgBoundsInfo = {
|
||||||
|
before: Bounds,
|
||||||
|
after: Bounds,
|
||||||
|
};
|
||||||
|
|
||||||
|
function getDefaultSvgBoundsIfUndefined(text: string, src: string): Bounds | null {
|
||||||
const defaultSize = 300;
|
const defaultSize = 300;
|
||||||
const maxSize = 99999;
|
const maxSize = 99999;
|
||||||
|
|
||||||
@ -38,14 +64,14 @@ function getDefaultSvgBoundsIfUndefined(text: string, src: string) {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
function createContext(imageAfter: HTMLImageElement, imageBefore: HTMLImageElement) {
|
function createContext(imageAfter: HTMLImageElement, imageBefore: HTMLImageElement, svgBoundsInfo: SvgBoundsInfo): ImageContext {
|
||||||
const sizeAfter = {
|
const sizeAfter = {
|
||||||
width: imageAfter?.width || 0,
|
width: svgBoundsInfo.after?.width || imageAfter?.width || 0,
|
||||||
height: imageAfter?.height || 0,
|
height: svgBoundsInfo.after?.height || imageAfter?.height || 0,
|
||||||
};
|
};
|
||||||
const sizeBefore = {
|
const sizeBefore = {
|
||||||
width: imageBefore?.width || 0,
|
width: svgBoundsInfo.before?.width || imageBefore?.width || 0,
|
||||||
height: imageBefore?.height || 0,
|
height: svgBoundsInfo.before?.height || imageBefore?.height || 0,
|
||||||
};
|
};
|
||||||
const maxSize = {
|
const maxSize = {
|
||||||
width: Math.max(sizeBefore.width, sizeAfter.width),
|
width: Math.max(sizeBefore.width, sizeAfter.width),
|
||||||
@ -80,7 +106,7 @@ class ImageDiff {
|
|||||||
// the container may be hidden by "viewed" checkbox, so use the parent's width for reference
|
// the container may be hidden by "viewed" checkbox, so use the parent's width for reference
|
||||||
this.diffContainerWidth = Math.max(containerEl.closest('.diff-file-box')!.clientWidth - 300, 100);
|
this.diffContainerWidth = Math.max(containerEl.closest('.diff-file-box')!.clientWidth - 300, 100);
|
||||||
|
|
||||||
const imageInfos = [{
|
const imagePair: [ImageInfo, ImageInfo] = [{
|
||||||
path: containerEl.getAttribute('data-path-after'),
|
path: containerEl.getAttribute('data-path-after'),
|
||||||
mime: containerEl.getAttribute('data-mime-after'),
|
mime: containerEl.getAttribute('data-mime-after'),
|
||||||
images: containerEl.querySelectorAll<HTMLImageElement>('img.image-after'), // matches 3 <img>
|
images: containerEl.querySelectorAll<HTMLImageElement>('img.image-after'), // matches 3 <img>
|
||||||
@ -92,7 +118,8 @@ class ImageDiff {
|
|||||||
boundsInfo: containerEl.querySelector('.bounds-info-before'),
|
boundsInfo: containerEl.querySelector('.bounds-info-before'),
|
||||||
}];
|
}];
|
||||||
|
|
||||||
await Promise.all(imageInfos.map(async (info) => {
|
const svgBoundsInfo: SvgBoundsInfo = {before: null, after: null};
|
||||||
|
await Promise.all(imagePair.map(async (info, index) => {
|
||||||
const [success] = await Promise.all(Array.from(info.images, (img) => {
|
const [success] = await Promise.all(Array.from(info.images, (img) => {
|
||||||
return loadElem(img, info.path!);
|
return loadElem(img, info.path!);
|
||||||
}));
|
}));
|
||||||
@ -102,115 +129,112 @@ class ImageDiff {
|
|||||||
const resp = await GET(info.path!);
|
const resp = await GET(info.path!);
|
||||||
const text = await resp.text();
|
const text = await resp.text();
|
||||||
const bounds = getDefaultSvgBoundsIfUndefined(text, info.path!);
|
const bounds = getDefaultSvgBoundsIfUndefined(text, info.path!);
|
||||||
|
svgBoundsInfo[index === 0 ? 'after' : 'before'] = bounds;
|
||||||
if (bounds) {
|
if (bounds) {
|
||||||
for (const el of info.images) {
|
|
||||||
el.setAttribute('width', String(bounds.width));
|
|
||||||
el.setAttribute('height', String(bounds.height));
|
|
||||||
}
|
|
||||||
hideElem(info.boundsInfo!);
|
hideElem(info.boundsInfo!);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const imagesAfter = imageInfos[0].images;
|
const imagesAfter = imagePair[0].images;
|
||||||
const imagesBefore = imageInfos[1].images;
|
const imagesBefore = imagePair[1].images;
|
||||||
|
|
||||||
this.initSideBySide(createContext(imagesAfter[0], imagesBefore[0]));
|
this.initSideBySide(createContext(imagesAfter[0], imagesBefore[0], svgBoundsInfo));
|
||||||
if (imagesAfter.length > 0 && imagesBefore.length > 0) {
|
if (imagesAfter.length > 0 && imagesBefore.length > 0) {
|
||||||
this.initSwipe(createContext(imagesAfter[1], imagesBefore[1]));
|
this.initSwipe(createContext(imagesAfter[1], imagesBefore[1], svgBoundsInfo));
|
||||||
this.initOverlay(createContext(imagesAfter[2], imagesBefore[2]));
|
this.initOverlay(createContext(imagesAfter[2], imagesBefore[2], svgBoundsInfo));
|
||||||
}
|
}
|
||||||
queryElemChildren(containerEl, '.image-diff-tabs', (el) => el.classList.remove('is-loading'));
|
queryElemChildren(containerEl, '.image-diff-tabs', (el) => el.classList.remove('is-loading'));
|
||||||
}
|
}
|
||||||
|
|
||||||
initSideBySide(sizes: Record<string, any>) {
|
initSideBySide(ctx: ImageContext) {
|
||||||
let factor = 1;
|
let factor = 1;
|
||||||
if (sizes.maxSize.width > (this.diffContainerWidth - 24) / 2) {
|
if (ctx.maxSize.width > (this.diffContainerWidth - 24) / 2) {
|
||||||
factor = (this.diffContainerWidth - 24) / 2 / sizes.maxSize.width;
|
factor = (this.diffContainerWidth - 24) / 2 / ctx.maxSize.width;
|
||||||
}
|
}
|
||||||
|
|
||||||
const widthChanged = sizes.imageAfter && sizes.imageBefore && sizes.imageAfter.naturalWidth !== sizes.imageBefore.naturalWidth;
|
const widthChanged = ctx.imageAfter && ctx.imageBefore && ctx.imageAfter.naturalWidth !== ctx.imageBefore.naturalWidth;
|
||||||
const heightChanged = sizes.imageAfter && sizes.imageBefore && sizes.imageAfter.naturalHeight !== sizes.imageBefore.naturalHeight;
|
const heightChanged = ctx.imageAfter && ctx.imageBefore && ctx.imageAfter.naturalHeight !== ctx.imageBefore.naturalHeight;
|
||||||
if (sizes.imageAfter) {
|
if (ctx.imageAfter) {
|
||||||
const boundsInfoAfterWidth = this.containerEl.querySelector('.bounds-info-after .bounds-info-width');
|
const boundsInfoAfterWidth = this.containerEl.querySelector('.bounds-info-after .bounds-info-width');
|
||||||
if (boundsInfoAfterWidth) {
|
if (boundsInfoAfterWidth) {
|
||||||
boundsInfoAfterWidth.textContent = `${sizes.imageAfter.naturalWidth}px`;
|
boundsInfoAfterWidth.textContent = `${ctx.imageAfter.naturalWidth}px`;
|
||||||
boundsInfoAfterWidth.classList.toggle('green', widthChanged);
|
boundsInfoAfterWidth.classList.toggle('green', widthChanged);
|
||||||
}
|
}
|
||||||
const boundsInfoAfterHeight = this.containerEl.querySelector('.bounds-info-after .bounds-info-height');
|
const boundsInfoAfterHeight = this.containerEl.querySelector('.bounds-info-after .bounds-info-height');
|
||||||
if (boundsInfoAfterHeight) {
|
if (boundsInfoAfterHeight) {
|
||||||
boundsInfoAfterHeight.textContent = `${sizes.imageAfter.naturalHeight}px`;
|
boundsInfoAfterHeight.textContent = `${ctx.imageAfter.naturalHeight}px`;
|
||||||
boundsInfoAfterHeight.classList.toggle('green', heightChanged);
|
boundsInfoAfterHeight.classList.toggle('green', heightChanged);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sizes.imageBefore) {
|
if (ctx.imageBefore) {
|
||||||
const boundsInfoBeforeWidth = this.containerEl.querySelector('.bounds-info-before .bounds-info-width');
|
const boundsInfoBeforeWidth = this.containerEl.querySelector('.bounds-info-before .bounds-info-width');
|
||||||
if (boundsInfoBeforeWidth) {
|
if (boundsInfoBeforeWidth) {
|
||||||
boundsInfoBeforeWidth.textContent = `${sizes.imageBefore.naturalWidth}px`;
|
boundsInfoBeforeWidth.textContent = `${ctx.imageBefore.naturalWidth}px`;
|
||||||
boundsInfoBeforeWidth.classList.toggle('red', widthChanged);
|
boundsInfoBeforeWidth.classList.toggle('red', widthChanged);
|
||||||
}
|
}
|
||||||
const boundsInfoBeforeHeight = this.containerEl.querySelector('.bounds-info-before .bounds-info-height');
|
const boundsInfoBeforeHeight = this.containerEl.querySelector('.bounds-info-before .bounds-info-height');
|
||||||
if (boundsInfoBeforeHeight) {
|
if (boundsInfoBeforeHeight) {
|
||||||
boundsInfoBeforeHeight.textContent = `${sizes.imageBefore.naturalHeight}px`;
|
boundsInfoBeforeHeight.textContent = `${ctx.imageBefore.naturalHeight}px`;
|
||||||
boundsInfoBeforeHeight.classList.toggle('red', heightChanged);
|
boundsInfoBeforeHeight.classList.toggle('red', heightChanged);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sizes.imageAfter) {
|
if (ctx.imageAfter) {
|
||||||
const container = sizes.imageAfter.parentNode;
|
const container = ctx.imageAfter.parentNode as HTMLElement;
|
||||||
sizes.imageAfter.style.width = `${sizes.sizeAfter.width * factor}px`;
|
ctx.imageAfter.style.width = `${ctx.sizeAfter.width * factor}px`;
|
||||||
sizes.imageAfter.style.height = `${sizes.sizeAfter.height * factor}px`;
|
ctx.imageAfter.style.height = `${ctx.sizeAfter.height * factor}px`;
|
||||||
container.style.margin = '10px auto';
|
container.style.margin = '10px auto';
|
||||||
container.style.width = `${sizes.sizeAfter.width * factor + 2}px`;
|
container.style.width = `${ctx.sizeAfter.width * factor + 2}px`;
|
||||||
container.style.height = `${sizes.sizeAfter.height * factor + 2}px`;
|
container.style.height = `${ctx.sizeAfter.height * factor + 2}px`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sizes.imageBefore) {
|
if (ctx.imageBefore) {
|
||||||
const container = sizes.imageBefore.parentNode;
|
const container = ctx.imageBefore.parentNode as HTMLElement;
|
||||||
sizes.imageBefore.style.width = `${sizes.sizeBefore.width * factor}px`;
|
ctx.imageBefore.style.width = `${ctx.sizeBefore.width * factor}px`;
|
||||||
sizes.imageBefore.style.height = `${sizes.sizeBefore.height * factor}px`;
|
ctx.imageBefore.style.height = `${ctx.sizeBefore.height * factor}px`;
|
||||||
container.style.margin = '10px auto';
|
container.style.margin = '10px auto';
|
||||||
container.style.width = `${sizes.sizeBefore.width * factor + 2}px`;
|
container.style.width = `${ctx.sizeBefore.width * factor + 2}px`;
|
||||||
container.style.height = `${sizes.sizeBefore.height * factor + 2}px`;
|
container.style.height = `${ctx.sizeBefore.height * factor + 2}px`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
initSwipe(sizes: Record<string, any>) {
|
initSwipe(ctx: ImageContext) {
|
||||||
let factor = 1;
|
let factor = 1;
|
||||||
if (sizes.maxSize.width > this.diffContainerWidth - 12) {
|
if (ctx.maxSize.width > this.diffContainerWidth - 12) {
|
||||||
factor = (this.diffContainerWidth - 12) / sizes.maxSize.width;
|
factor = (this.diffContainerWidth - 12) / ctx.maxSize.width;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sizes.imageAfter) {
|
if (ctx.imageAfter) {
|
||||||
const imgParent = sizes.imageAfter.parentNode;
|
const imgParent = ctx.imageAfter.parentNode as HTMLElement;
|
||||||
const swipeFrame = imgParent.parentNode;
|
const swipeFrame = imgParent.parentNode as HTMLElement;
|
||||||
sizes.imageAfter.style.width = `${sizes.sizeAfter.width * factor}px`;
|
ctx.imageAfter.style.width = `${ctx.sizeAfter.width * factor}px`;
|
||||||
sizes.imageAfter.style.height = `${sizes.sizeAfter.height * factor}px`;
|
ctx.imageAfter.style.height = `${ctx.sizeAfter.height * factor}px`;
|
||||||
imgParent.style.margin = `0px ${sizes.ratio[0] * factor}px`;
|
imgParent.style.margin = `0px ${ctx.ratio[0] * factor}px`;
|
||||||
imgParent.style.width = `${sizes.sizeAfter.width * factor + 2}px`;
|
imgParent.style.width = `${ctx.sizeAfter.width * factor + 2}px`;
|
||||||
imgParent.style.height = `${sizes.sizeAfter.height * factor + 2}px`;
|
imgParent.style.height = `${ctx.sizeAfter.height * factor + 2}px`;
|
||||||
swipeFrame.style.padding = `${sizes.ratio[1] * factor}px 0 0 0`;
|
swipeFrame.style.padding = `${ctx.ratio[1] * factor}px 0 0 0`;
|
||||||
swipeFrame.style.width = `${sizes.maxSize.width * factor + 2}px`;
|
swipeFrame.style.width = `${ctx.maxSize.width * factor + 2}px`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sizes.imageBefore) {
|
if (ctx.imageBefore) {
|
||||||
const imgParent = sizes.imageBefore.parentNode;
|
const imgParent = ctx.imageBefore.parentNode as HTMLElement;
|
||||||
const swipeFrame = imgParent.parentNode;
|
const swipeFrame = imgParent.parentNode as HTMLElement;
|
||||||
sizes.imageBefore.style.width = `${sizes.sizeBefore.width * factor}px`;
|
ctx.imageBefore.style.width = `${ctx.sizeBefore.width * factor}px`;
|
||||||
sizes.imageBefore.style.height = `${sizes.sizeBefore.height * factor}px`;
|
ctx.imageBefore.style.height = `${ctx.sizeBefore.height * factor}px`;
|
||||||
imgParent.style.margin = `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`;
|
imgParent.style.margin = `${ctx.ratio[3] * factor}px ${ctx.ratio[2] * factor}px`;
|
||||||
imgParent.style.width = `${sizes.sizeBefore.width * factor + 2}px`;
|
imgParent.style.width = `${ctx.sizeBefore.width * factor + 2}px`;
|
||||||
imgParent.style.height = `${sizes.sizeBefore.height * factor + 2}px`;
|
imgParent.style.height = `${ctx.sizeBefore.height * factor + 2}px`;
|
||||||
swipeFrame.style.width = `${sizes.maxSize.width * factor + 2}px`;
|
swipeFrame.style.width = `${ctx.maxSize.width * factor + 2}px`;
|
||||||
swipeFrame.style.height = `${sizes.maxSize.height * factor + 2}px`;
|
swipeFrame.style.height = `${ctx.maxSize.height * factor + 2}px`;
|
||||||
}
|
}
|
||||||
|
|
||||||
// extra height for inner "position: absolute" elements
|
// extra height for inner "position: absolute" elements
|
||||||
const swipe = this.containerEl.querySelector<HTMLElement>('.diff-swipe');
|
const swipe = this.containerEl.querySelector<HTMLElement>('.diff-swipe');
|
||||||
if (swipe) {
|
if (swipe) {
|
||||||
swipe.style.width = `${sizes.maxSize.width * factor + 2}px`;
|
swipe.style.width = `${ctx.maxSize.width * factor + 2}px`;
|
||||||
swipe.style.height = `${sizes.maxSize.height * factor + 30}px`;
|
swipe.style.height = `${ctx.maxSize.height * factor + 30}px`;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.containerEl.querySelector('.swipe-bar')!.addEventListener('mousedown', (e) => {
|
this.containerEl.querySelector('.swipe-bar')!.addEventListener('mousedown', (e) => {
|
||||||
@ -237,40 +261,40 @@ class ImageDiff {
|
|||||||
document.addEventListener('mouseup', removeEventListeners);
|
document.addEventListener('mouseup', removeEventListeners);
|
||||||
}
|
}
|
||||||
|
|
||||||
initOverlay(sizes: Record<string, any>) {
|
initOverlay(ctx: ImageContext) {
|
||||||
let factor = 1;
|
let factor = 1;
|
||||||
if (sizes.maxSize.width > this.diffContainerWidth - 12) {
|
if (ctx.maxSize.width > this.diffContainerWidth - 12) {
|
||||||
factor = (this.diffContainerWidth - 12) / sizes.maxSize.width;
|
factor = (this.diffContainerWidth - 12) / ctx.maxSize.width;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sizes.imageAfter) {
|
if (ctx.imageAfter) {
|
||||||
const container = sizes.imageAfter.parentNode;
|
const container = ctx.imageAfter.parentNode as HTMLElement;
|
||||||
sizes.imageAfter.style.width = `${sizes.sizeAfter.width * factor}px`;
|
ctx.imageAfter.style.width = `${ctx.sizeAfter.width * factor}px`;
|
||||||
sizes.imageAfter.style.height = `${sizes.sizeAfter.height * factor}px`;
|
ctx.imageAfter.style.height = `${ctx.sizeAfter.height * factor}px`;
|
||||||
container.style.margin = `${sizes.ratio[1] * factor}px ${sizes.ratio[0] * factor}px`;
|
container.style.margin = `${ctx.ratio[1] * factor}px ${ctx.ratio[0] * factor}px`;
|
||||||
container.style.width = `${sizes.sizeAfter.width * factor + 2}px`;
|
container.style.width = `${ctx.sizeAfter.width * factor + 2}px`;
|
||||||
container.style.height = `${sizes.sizeAfter.height * factor + 2}px`;
|
container.style.height = `${ctx.sizeAfter.height * factor + 2}px`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sizes.imageBefore) {
|
if (ctx.imageBefore) {
|
||||||
const container = sizes.imageBefore.parentNode;
|
const container = ctx.imageBefore.parentNode as HTMLElement;
|
||||||
const overlayFrame = container.parentNode;
|
const overlayFrame = container.parentNode as HTMLElement;
|
||||||
sizes.imageBefore.style.width = `${sizes.sizeBefore.width * factor}px`;
|
ctx.imageBefore.style.width = `${ctx.sizeBefore.width * factor}px`;
|
||||||
sizes.imageBefore.style.height = `${sizes.sizeBefore.height * factor}px`;
|
ctx.imageBefore.style.height = `${ctx.sizeBefore.height * factor}px`;
|
||||||
container.style.margin = `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`;
|
container.style.margin = `${ctx.ratio[3] * factor}px ${ctx.ratio[2] * factor}px`;
|
||||||
container.style.width = `${sizes.sizeBefore.width * factor + 2}px`;
|
container.style.width = `${ctx.sizeBefore.width * factor + 2}px`;
|
||||||
container.style.height = `${sizes.sizeBefore.height * factor + 2}px`;
|
container.style.height = `${ctx.sizeBefore.height * factor + 2}px`;
|
||||||
|
|
||||||
// some inner elements are `position: absolute`, so the container's height must be large enough
|
// some inner elements are `position: absolute`, so the container's height must be large enough
|
||||||
overlayFrame.style.width = `${sizes.maxSize.width * factor + 2}px`;
|
overlayFrame.style.width = `${ctx.maxSize.width * factor + 2}px`;
|
||||||
overlayFrame.style.height = `${sizes.maxSize.height * factor + 2}px`;
|
overlayFrame.style.height = `${ctx.maxSize.height * factor + 2}px`;
|
||||||
}
|
}
|
||||||
|
|
||||||
const rangeInput = this.containerEl.querySelector<HTMLInputElement>('input[type="range"]')!;
|
const rangeInput = this.containerEl.querySelector<HTMLInputElement>('input[type="range"]')!;
|
||||||
|
|
||||||
function updateOpacity() {
|
function updateOpacity() {
|
||||||
if (sizes.imageAfter) {
|
if (ctx.imageAfter) {
|
||||||
sizes.imageAfter.parentNode.style.opacity = `${Number(rangeInput.value) / 100}`;
|
(ctx.imageAfter.parentNode as HTMLElement).style.opacity = `${Number(rangeInput.value) / 100}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
4
web_src/js/globals.d.ts
vendored
4
web_src/js/globals.d.ts
vendored
@ -12,8 +12,8 @@ declare module '*.vue' {
|
|||||||
import type {DefineComponent} from 'vue';
|
import type {DefineComponent} from 'vue';
|
||||||
const component: DefineComponent<unknown, unknown, any>;
|
const component: DefineComponent<unknown, unknown, any>;
|
||||||
export default component;
|
export default component;
|
||||||
// List of named exports from vue components, used to make `tsc` output clean.
|
// Here we declare all exports from vue files so `tsc` or `tsgo` can work for
|
||||||
// To actually lint .vue files, `vue-tsc` is used because `tsc` can not parse them.
|
// non-vue files. To lint .vue files, `vue-tsc` must be used.
|
||||||
export function initDashboardRepoList(): void;
|
export function initDashboardRepoList(): void;
|
||||||
export function initRepositoryActionView(): void;
|
export function initRepositoryActionView(): void;
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user