mirror of https://github.com/go-gitea/gitea.git
Compare commits
84 Commits
48cb4ddc81
...
0bbfaef053
| Author | SHA1 | Date |
|---|---|---|
|
|
0bbfaef053 | |
|
|
0991eb3839 | |
|
|
a9d4c149f6 | |
|
|
308119f1c7 | |
|
|
2acc230cf0 | |
|
|
bb6778500f | |
|
|
8cef3d3217 | |
|
|
6c62a513e8 | |
|
|
5684f5e707 | |
|
|
8c9518c130 | |
|
|
8de55d67c1 | |
|
|
88d70d305b | |
|
|
22f63d918f | |
|
|
19028303b1 | |
|
|
29b28002aa | |
|
|
618e2d8106 | |
|
|
485d8f1121 | |
|
|
181db69e0c | |
|
|
a46b16f10f | |
|
|
1748045285 | |
|
|
f114c388ff | |
|
|
94c6d46faa | |
|
|
7436c6297d | |
|
|
ddd1e6ca83 | |
|
|
0548c10293 | |
|
|
7de114a332 | |
|
|
4fc626daa1 | |
|
|
81adb01713 | |
|
|
0990eb44ce | |
|
|
40dec17b5c | |
|
|
90eb831418 | |
|
|
1c28c470f8 | |
|
|
e0f3b30895 | |
|
|
719b151058 | |
|
|
4f32d32812 | |
|
|
cda90eca31 | |
|
|
d462ce149d | |
|
|
b8c9a0c323 | |
|
|
7346ae7cd4 | |
|
|
0ea958dc58 | |
|
|
67083437cd | |
|
|
b18c047d62 | |
|
|
8efc4ca334 | |
|
|
46a1d52235 | |
|
|
a2ae7c69da | |
|
|
7954f25290 | |
|
|
416ff1fd31 | |
|
|
0e6c1224e5 | |
|
|
b38813878c | |
|
|
08c634b7b7 | |
|
|
dfea75371c | |
|
|
1f35435b81 | |
|
|
71e4740946 | |
|
|
ecc6685c20 | |
|
|
a14db5c5e3 | |
|
|
ee334886f3 | |
|
|
1376cf7481 | |
|
|
f214bb40a3 | |
|
|
224aa64cd9 | |
|
|
1e644e39f9 | |
|
|
037f72bdb3 | |
|
|
4cbb482554 | |
|
|
439ebe7031 | |
|
|
3a37d63d61 | |
|
|
24ce2058e8 | |
|
|
6b8b580218 | |
|
|
bbee652e29 | |
|
|
637070e07b | |
|
|
0d3e9956cd | |
|
|
28debdbe00 | |
|
|
dcc9206a59 | |
|
|
bc28654b49 | |
|
|
d21ce9fa07 | |
|
|
8fed27bf6a | |
|
|
65986f423f | |
|
|
18bafcc378 | |
|
|
8d135ef5cf | |
|
|
d5893ee260 | |
|
|
06ccb3a1d4 | |
|
|
94db956e31 | |
|
|
c9505a26b9 | |
|
|
fbc3796f9e | |
|
|
d5afdccde8 | |
|
|
17cfae82a5 |
|
|
@ -4,10 +4,10 @@
|
|||
"features": {
|
||||
// installs nodejs into container
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "20"
|
||||
"version": "lts"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/git-lfs:1.2.2": {},
|
||||
"ghcr.io/devcontainers-contrib/features/poetry:2": {},
|
||||
"ghcr.io/devcontainers-extra/features/poetry:2": {},
|
||||
"ghcr.io/devcontainers/features/python:1": {
|
||||
"version": "3.12"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -36,15 +36,6 @@ _testmain.go
|
|||
coverage.all
|
||||
cpu.out
|
||||
|
||||
/modules/migration/bindata.go
|
||||
/modules/migration/bindata.go.hash
|
||||
/modules/options/bindata.go
|
||||
/modules/options/bindata.go.hash
|
||||
/modules/public/bindata.go
|
||||
/modules/public/bindata.go.hash
|
||||
/modules/templates/bindata.go
|
||||
/modules/templates/bindata.go.hash
|
||||
|
||||
*.db
|
||||
*.log
|
||||
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ jobs:
|
|||
python-version: "3.12"
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: npm
|
||||
cache-dependency-path: package-lock.json
|
||||
- run: pip install poetry
|
||||
|
|
@ -66,7 +66,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: npm
|
||||
cache-dependency-path: package-lock.json
|
||||
- run: make deps-frontend
|
||||
|
|
@ -137,7 +137,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: npm
|
||||
cache-dependency-path: package-lock.json
|
||||
- run: make deps-frontend
|
||||
|
|
@ -186,7 +186,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: npm
|
||||
cache-dependency-path: package-lock.json
|
||||
- run: make deps-frontend
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ jobs:
|
|||
check-latest: true
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: npm
|
||||
cache-dependency-path: package-lock.json
|
||||
- run: make deps-frontend frontend deps-backend
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
check-latest: true
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: npm
|
||||
cache-dependency-path: package-lock.json
|
||||
- run: make deps-frontend deps-backend
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ jobs:
|
|||
check-latest: true
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: npm
|
||||
cache-dependency-path: package-lock.json
|
||||
- run: make deps-frontend deps-backend
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ jobs:
|
|||
check-latest: true
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 24
|
||||
cache: npm
|
||||
cache-dependency-path: package-lock.json
|
||||
- run: make deps-frontend deps-backend
|
||||
|
|
|
|||
|
|
@ -42,14 +42,10 @@ _testmain.go
|
|||
coverage.all
|
||||
cpu.out
|
||||
|
||||
/modules/migration/bindata.go
|
||||
/modules/migration/bindata.go.hash
|
||||
/modules/options/bindata.go
|
||||
/modules/options/bindata.go.hash
|
||||
/modules/public/bindata.go
|
||||
/modules/public/bindata.go.hash
|
||||
/modules/templates/bindata.go
|
||||
/modules/templates/bindata.go.hash
|
||||
/modules/migration/bindata.*
|
||||
/modules/options/bindata.*
|
||||
/modules/public/bindata.*
|
||||
/modules/templates/bindata.*
|
||||
|
||||
*.db
|
||||
*.log
|
||||
|
|
|
|||
3
.ignore
3
.ignore
|
|
@ -1,9 +1,6 @@
|
|||
*.min.css
|
||||
*.min.js
|
||||
/assets/*.json
|
||||
/modules/options/bindata.go
|
||||
/modules/public/bindata.go
|
||||
/modules/templates/bindata.go
|
||||
/options/gitignore
|
||||
/options/license
|
||||
/public/assets
|
||||
|
|
|
|||
423
CHANGELOG.md
423
CHANGELOG.md
|
|
@ -4,6 +4,429 @@ This changelog goes through the changes that have been made in each release
|
|||
without substantial changes to our git log; to see the highlights of what has
|
||||
been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
|
||||
## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/1.24.0) - 2025-05-26
|
||||
|
||||
* BREAKING
|
||||
* Make Gitea always use its internal config, ignore `/etc/gitconfig` (#33076)
|
||||
* Improve log format (#33814)
|
||||
* Fix markdown render behaviors (#34122)
|
||||
* Add package version api endpoints (#34173)
|
||||
|
||||
* FEATURES
|
||||
* Enforce two-factor auth (2FA: TOTP or WebAuthn) (#34187)
|
||||
* Add fullscreen mode as a more efficient operation way to view projects (#34081)
|
||||
* Add anonymous access support for private/unlisted repositories (#34051)
|
||||
* Support public code/issue access for private repositories (#33127)
|
||||
* Add middleware for request prioritization (#33951)
|
||||
* Add cli flags LDAP group configuration (#33933)
|
||||
* Add file tree to file view page (#32721)
|
||||
* Add material icons for file list (#33837)
|
||||
* Artifacts download api for artifact actions v4 (#33510)
|
||||
* Support choose email when creating a commit via web UI (#33432)
|
||||
* Add basic auth support to rss/atom feeds (#33371)
|
||||
* Add sorting by exclusive labels (issue priority) (#33206)
|
||||
* Add sub issue list support (#32940)
|
||||
* Private README.md for organization (#32872)
|
||||
* Email option to embed images as base64 instead of link (#32061)
|
||||
* Option to delay conflict checking of old pull requests until page view (#27779)
|
||||
* Worktime tracking for the organization level (#19808)
|
||||
|
||||
* PERFORMANCE
|
||||
* Add cache for common package queries (#22491)
|
||||
* Move issue pin to an standalone table for querying performance (#33452)
|
||||
* Improve commits list performance to reduce unnecessary database queries (#33528)
|
||||
* Optimize total count of feed when loading activities in user dashboard. (#33841)
|
||||
* Optimize heatmap query (#33853)
|
||||
* Only use prev and next buttons for pagination on user dashboard (#33981)
|
||||
* Improve pull request list API performance (#34052)
|
||||
* Cache GPG keys, emails and users when list commits (#34086)
|
||||
* Refactor Git Attribute & performance optimization (#34154)
|
||||
* Performance optimization for tags synchronization (#34355) #34522
|
||||
|
||||
* ENHANCEMENTS
|
||||
* Code
|
||||
* Display when a release attachment was uploaded (#34261)
|
||||
* Support creating relative link to raw path in markdown (#34105)
|
||||
* Improve code block readability and isolate copy button (#34009)
|
||||
* Improve repository commit view (#33877)
|
||||
* Full-file syntax highlighting for diff pages (#33766)
|
||||
* Clone repository with Tea CLI (#33725)
|
||||
* Improve sync fork behavior (#33319)
|
||||
* Make git clone URL could use current signed-in user (#33091)
|
||||
* Add submodule diff links (#33097)
|
||||
* Link to tree views of submodules if possible (#33424)
|
||||
* Only keep popular licenses (#33832)
|
||||
* De-emphasize signed commits (#31160)
|
||||
|
||||
* Actions
|
||||
* Add flat-square action badge style (#34062)
|
||||
* Update action status badge layout (#34018)
|
||||
* Download actions job logs from API (#33858)
|
||||
* Always show the "rerun" button for action jobs (#33692)
|
||||
* Add auto-expanding running actions step (#30058)
|
||||
* Update status check for all supported on.pull_request.types in Gitea (#33117)
|
||||
* Workflow_dispatch use workflow from trigger branch (#33098)
|
||||
* Add action auto-scroll (#30057)
|
||||
* Add workflow_job webhook (#33694)
|
||||
* Add a button editing action secret (#34462)
|
||||
|
||||
* Pull Request
|
||||
* Auto expand "New PR" form (#33971)
|
||||
* Mark parent directory as viewed when all files are viewed (#33958)
|
||||
* Show info about maintainers are allowed to edit a PR (#33738)
|
||||
* Automerge supports deleting branch automatically after merging (#32343)
|
||||
* Add additional command hints for PowerShell & CMD (#33548)
|
||||
|
||||
* Issues
|
||||
* Allow filtering issues by any assignee (#33343)
|
||||
* Show warning on navigation if currently editing comment or title (#32920)
|
||||
* Make tracked time representation display as hours (#33315)
|
||||
* Add No Results Prompt Message on Issue List Page (#33699)
|
||||
* Add sort option recentclose for issues and pulls (#34525) #34539
|
||||
|
||||
* Packages
|
||||
* Link to nuget dependencies (#26554)
|
||||
* Add composor source field (#33502)
|
||||
|
||||
* Administration
|
||||
* Improve navbar: add "admin" tip, add "active" style (#32927)
|
||||
* Add a option "--user-type bot" to admin user create, improve role display (#27885)
|
||||
* Improve admin user view page (#33735)
|
||||
* Support performance trace (#32973)
|
||||
* Change pprof labels to be prometheus compatible (#32865)
|
||||
* Allow admins and org owners to change org member public status (#28294)
|
||||
* Optimize the installation page (#32994)
|
||||
* Make public URL generation configurable (#34250)
|
||||
* Add a --fullname arg to gitea admin user create. (#34241)
|
||||
|
||||
* Others
|
||||
* Improve oauth2 error handling (#33969)
|
||||
* Fail mirroring more gracefully (#34002)
|
||||
* Align User Details Page Header Layout with Design Specifications (#34192)
|
||||
* Webhook add X-Gitea-Hook-Installation-Target-Type Header (#33752)
|
||||
* Optimize the dashboard (#32990)
|
||||
* Improve button layout on small screens (#33633)
|
||||
* Add cropping support when modifying the user/org/repo avatar (#33498)
|
||||
* Make ROOT_URL support using request Host header (#32564)
|
||||
* Add `show more` organizations icon in user's profile (#32986)
|
||||
* Introduce `--page-space-bottom` at 64px (#30692)
|
||||
* Improve theme display (#30671)
|
||||
* Add alphabetical project sorting (#33504)
|
||||
* Add global lock for migrations to make upgrade more safe with multiple replications (#33706)
|
||||
* Add descriptions for private repo public access settings and improve the UI (#34057)
|
||||
|
||||
* API
|
||||
* Actions Runner rest api (#33873)
|
||||
* Inclusion of rename organization api (#33303)
|
||||
* Add API to support link package to repository and unlink it (#33481)
|
||||
* Add API endpoint to request contents of multiple files simultaniously (#34139)
|
||||
* Actions artifacts API list/download check status upload confirmed (#34273)
|
||||
* Add API routes to lock and unlock issues (#34165)
|
||||
* Fix some user name usages (#33689)
|
||||
* Allow filtering /repos/{owner}/{repo}/pulls by target base branch queryparam (#33684)
|
||||
* Improve swagger generation (#33664)
|
||||
* Support Ephemeral action runners (#33570)
|
||||
* Support workflow event dispatch via API (#33545)
|
||||
* Support workflow event dispatch via API (#32059)
|
||||
* Added Description Field for Secrets and Variables (#33526)
|
||||
* Reject star-related requests if stars are disabled (#33208)
|
||||
* Let API create and edit system webhooks, attempt 2 (#33180)
|
||||
* Use `Project-URL` metadata field to get a PyPI package's homepage URL (#33089)
|
||||
* Add `last_committer_date` and `last_author_date` for file contents API (#32921)
|
||||
|
||||
* REFACTORS
|
||||
* Remove context from git struct (#33793)
|
||||
* Refactor admin/common.ts (#33788)
|
||||
* Refactor repo-settings.ts (#33785)
|
||||
* Refactor repo-issue.ts (#33784)
|
||||
* Small refactor to reduce unnecessary database queries and remove duplicated functions (#33779)
|
||||
* Refactor initRepoBranchTagSelector to use new init framework (#33776)
|
||||
* Refactor buttons to use new init framework (#33774)
|
||||
* Refactor markup and pdf-viewer to use new init framework (#33772)
|
||||
* Refactor error system (#33771)
|
||||
* Refactor mail code (#33768)
|
||||
* Update TypeScript types (#33799)
|
||||
* Refactor older tests to use testify (#33140)
|
||||
* Move notifywatch to service layer (#33825)
|
||||
* Decouple context from repository related structs (#33823)
|
||||
* Remove context from mail struct (#33811)
|
||||
* Refactor dropdown ellipsis (#34123)
|
||||
* Refactor functions to reduce repopath expose (#33892)
|
||||
* Refactor repo-diff.ts (#33746)
|
||||
* Refactor web route handler (#33488)
|
||||
* Refactor user & avatar (#33433)
|
||||
* Refactor user package (#33423)
|
||||
* Refactor decouple context from migration structs (#33399)
|
||||
* Refactor context flash msg and global variables (#33375)
|
||||
* Refactor response writer & access logger (#33323)
|
||||
* Refactor ref type (#33242)
|
||||
* Refactor context repository (#33202)
|
||||
* Refactor legacy JS (#33115)
|
||||
* Refactor legacy line-number and scroll code (#33094)
|
||||
* Refactor env var related code (#33075)
|
||||
* Move SetMerged to service layer (#33045)
|
||||
* Merge updatecommentattachment functions (#33044)
|
||||
* Refactor pull-request compare&create page (#33071)
|
||||
* Refactor repo-new.ts (#33070)
|
||||
* Refactor pagination (#33037)
|
||||
* Refactor tests (#33021)
|
||||
* Refactor markup render to fix various path problems (#34114)
|
||||
* Refactor Branch struct in package modules/git (#33980)
|
||||
* Don't create duplicated functions for code repositories and wiki repositories (#33924)
|
||||
* Move git references checking to gitrepo packages to reduce expose of repository path (#33891)
|
||||
* Refactor cache-control (#33861)
|
||||
* Decouple diff stats query from actual diffing (#33810)
|
||||
* Move part of updating protected branch logic to service layer (#33742)
|
||||
* Decouple Batch from git.Repository to simplify usage without requiring the creation of a Repository struct. (#34001)
|
||||
* Refactor tmpl and blob_excerpt (#32967)
|
||||
* Refactor template & test related code (#32938)
|
||||
* Refactor db package and remove unnecessary `DumpTables` (#32930)
|
||||
* Refactor pprof labels and process desc (#32909)
|
||||
* Refactor repo-projects.ts (#32892)
|
||||
* Refactor getpatch/getdiff functions and remove unnecessary fallback (#32817)
|
||||
* Uniform all temporary directories and allow customizing temp path (#32352)
|
||||
* Remove context from retry downloader (#33871)
|
||||
* Refactor global init code and add more comments (#33755)
|
||||
* Remove some unnecessary template helpers (#33069)
|
||||
* Move and rename UpdateRepository (#34136)
|
||||
* Move hooks function to gitrepo and reduce expose repopath (#33890)
|
||||
* Add abstraction layer to delete repository from disk (#33879)
|
||||
* Add abstraction layer to check if the repository exists on disk (#33874)
|
||||
* Move ParseCommitWithSSHSignature to service layer (#34087)
|
||||
* Move duplicated functions (#33977)
|
||||
* Extract code to their own functions for push update (#33944)
|
||||
* Move gitgraph from modules to services layer (#33527)
|
||||
* Move commits signature and verify functions to service layers (#33605)
|
||||
* Use `CloseIssue` and `ReopenIssue` instead of `ChangeStatus` (#32467)
|
||||
* Refactor arch route handlers (#32993)
|
||||
* Refactor "string truncate" (#32984)
|
||||
* Refactor arch route handlers (#32972)
|
||||
* Clarify path param naming (#32969)
|
||||
* Refactor request context (#32956)
|
||||
* Move some errors to their own sub packages (#32880)
|
||||
* Move RepoTransfer from models to models/repo sub package (#32506)
|
||||
* Move delete deploy keys into service layer (#32201)
|
||||
* Refactor webhook events (#33337)
|
||||
* Move some Actions related functions from `routers` to `services` (#33280)
|
||||
* Refactor RefName (#33234)
|
||||
* Refactor context RefName and RepoAssignment (#33226)
|
||||
* Refactor repository transfer (#33211)
|
||||
* Refactor error system (#33626)
|
||||
* Refactor error system (#33610)
|
||||
* Refactor package (routes and error handling, npm peer dependency) (#33111)
|
||||
* Use test context in tests and new loop system in benchmarks (#33648)
|
||||
* Some small refactors (#33144)
|
||||
* Simplify context ref name (#33267)
|
||||
|
||||
* BUGFIXES
|
||||
* Fix some dropdown problems on the issue sidebar (#34308) #34327
|
||||
* Do not return archive download URLs in API if downloads are disabled (#34324) #34338
|
||||
* Fix LFS files being editable in web UI (#34356) #34362
|
||||
* Fix only text/* being viewable in web UI (#34374) #34378
|
||||
* Fix LFS file not stored in LFS when uploaded/edited via API or web UI (#34367)
|
||||
* Grey out expired artifact on Artifacts list (#34314) #34404
|
||||
* Fix incorrect divergence cache after switching default branch (#34370) #34406
|
||||
* Refactor commit message rendering and fix bugs (#34412) #34414
|
||||
* Merge and tweak markup editor expander CSS (#34409) #34415
|
||||
* Fix GetUsersByEmails (#34423) #34425
|
||||
* Only git operations should update last changed of a repository (#34388) #34427
|
||||
* Fix comment textarea scroll issue in Firefox (#34438) #34446
|
||||
* Fix repo broken check (#34444) #34452
|
||||
* Fix remove org user failure on mssql (#34449) #34453
|
||||
* Fix Workflow run Not Found page (#34459) #34466
|
||||
* When updating comment, if the content is the same, just return and not update the database (#34422) #34464
|
||||
* Fix project board view (#34470) #34475
|
||||
* Fix get / delete runner to use consistent http 404 and 500 status (#34480) #34488
|
||||
* Fix url validation in webhook add/edit API (#34492) #34496
|
||||
* Fix edithook api can not update package, status and workflow_job events (#34495) #34499
|
||||
* Fix ephemeral runner deletion (#34447) #34513
|
||||
* Don't display error log when .git-blame-ignore-revs doesn't exist (#34457)
|
||||
* Only allow admins to rename default/protected branches (#33276)
|
||||
* Improve "lock conversation" UI (#34207)
|
||||
* Fix incorrect file links (#34189)
|
||||
* Optimize Overflow Menu (#34183)
|
||||
* Check user/org repo limit instead of doer (#34147)
|
||||
* Make markdown render match GitHub's behavior (#34129)
|
||||
* Fix team permission (#34128)
|
||||
* Correctly handle submodule view and avoid throwing 500 error (#34121)
|
||||
* Fix users being able bypass limits with repo transfers (#34031)
|
||||
* Avoid creating unnecessary temporary cat file sub process (#33942)
|
||||
* Refactor organization menu (#33928)
|
||||
* Fix various Fomantic UI and htmx problems (#33851)
|
||||
* Fix 500 error when error occurred in migration page (#33256)
|
||||
* Validate that the tag doesn't exist when creating a tag via the web (#33241)
|
||||
* Add missed transaction on setmerged (#33079)
|
||||
* Rework create/fork/adopt/generate repository to make sure resources will be cleanup once failed (#31035)
|
||||
* Valid email address should only start with alphanumeric (#28174)
|
||||
* Fix webhook url (#34186)
|
||||
* Fix "toAbsoluteLocaleDate" test when system locale is not en-US (#33939)
|
||||
* Fix file name could not be searched if the file was not a text file when using the Bleve indexer (#33959)
|
||||
* Fix cannot delete runners via the modal dialog (#33895)
|
||||
* Fix unpin hint on the pinned pull requests (#33207)
|
||||
* Fix parentCommit invalid memory address or nil pointer dereference. (#33204)
|
||||
* Fix comment header padding (#33377)
|
||||
* Fix some migration and repo name problems (#33986)
|
||||
* Fix various trivial frontend problems (#34263)
|
||||
* Fix Set Email Preference dropdown and button placement (#34255)
|
||||
* Fix quoted replies incorrectly render user input as part of the quote (#34216)
|
||||
* Fix button alignments and remove unnecessary styles (#34206)
|
||||
* Restore form inputs on organization create error (#34201)
|
||||
* Try to fix ACME (3rd) (#33807)
|
||||
* Fix incorrect ref "blob" (#33240)
|
||||
* Fix dynamic content loading init problem (#33748)
|
||||
* Fix git empty check and HEAD request (#33690)
|
||||
* Fix Untranslated Text on Actions Page (#33635)
|
||||
* Fix issue label delete incorrect labels webhook payload (#34575)
|
||||
* Fix incorrect page navigation with up and down arrow on last item of dashboard repos (#34570)
|
||||
* Fix/improve avatar sync from LDAP (#34573)
|
||||
* Fix some trivial problems (#34579)
|
||||
* Retain issue sort type when a keyword search is introduced (#34559)
|
||||
* Always use an empty line to separate the commit message and trailer (#34512)
|
||||
* Fix line-button issue after file selection in file tree (#34574)
|
||||
* Fix doctor deleting orphaned issues attachments (#34142)
|
||||
* Add webhook assigning test and fix possible bug (#34420)
|
||||
* Fix possible nil description of pull request when migrating from CodeCommit (#34541)
|
||||
* Refactor commit reader (#34542)
|
||||
* Fix possible pull request broken when leave the page immediately after clicking the update button #34509
|
||||
* Ignore "Close" error when uploading container blob (#34620)
|
||||
* Fix missed merge commit sha and time when migrating from codecommit (#34645)
|
||||
* Fix GetUsersByEmails (#34643)
|
||||
* Misc CSS fixes (#34638)
|
||||
* Add codecommit to supported services in api docs (#34626)
|
||||
* Validate hex colors when creating/editing labels (#34623)
|
||||
* Fix possible pull request broken when leave the page immediately after clicking the update button (#34509)
|
||||
* Fix margin issue in markup paragraph rendering (#34599)
|
||||
* Fix migration pull request title too long (#34577)
|
||||
* Fix footnote jump behavior on the issue page. (#34621)
|
||||
* Fix "oras" OCI client compatibility (#34666)
|
||||
* Fix last admin check when syncing users (#34649)
|
||||
* Fix skip paths check on tag push events in workflows (#34602) #34670
|
||||
|
||||
* MISC
|
||||
|
||||
* Bump to alpine 3.22 (#34613)
|
||||
* Make pull request and issue history more compact (#34588)
|
||||
* Run integration tests against postgres 14 (#34514) #34536
|
||||
* Enable addtional linters (#34085)
|
||||
* Enable testifylint rules (#34075)
|
||||
* Enable staticcheck QFxxxx rules (#34064)
|
||||
* Improve Actions test (#32883)
|
||||
* Drop fomantic build (#33845)
|
||||
* Go1.24 (#33562)
|
||||
* Run yamllint with strict mode, fix issue (#33551)
|
||||
* Disable cron task to update license (#33486)
|
||||
* Optimize makefile help information generation (#33390)
|
||||
* Convert github.com/xanzy/go-gitlab into gitlab.com/gitlab-org/api/client-go (#33126)
|
||||
* Add missed changelogs (#33649)
|
||||
* Update .changelog file to add performance label group (#33472)
|
||||
* Add missing POPULATE_SQUASH_COMMENT_WITH_COMMIT_MESSAGES in app.example.ini (#33363)
|
||||
* Update README screenshots (#33347)
|
||||
* Update unrs-resolver (#34279)
|
||||
* Update go&js dependencies (#34262)
|
||||
* Optimize the calling code of queryElems (#34235)
|
||||
* Update protected_branch.tmpl (#34193)
|
||||
* Feat/optimize span svg layout (#34185)
|
||||
* Set MERMAID_MAX_SOURCE_CHARACTERS to 50000 (#34152)
|
||||
* Update JS and PY deps (#34143)
|
||||
* Add Chinese translations for README files (#34132)
|
||||
* Use `overflow-wrap: anywhere` to replace `word-break: break-all` (#34126)
|
||||
* Clarify ownership in password change error messages (#34092)
|
||||
* Add toggleClass function in dom.ts (#34063)
|
||||
* Update to golangci-lint v2 (#34054)
|
||||
* Update Makefile test comments (#34013)
|
||||
* Update go mod dependencies (#33988)
|
||||
* Use filepath.Join instead of path.Join for file system file operations (#33978)
|
||||
* Prepare common tmpl functions in a middleware (#33957)
|
||||
* Remove unused or abused styles (#33918)
|
||||
* Update JS and PY deps, misc tweaks (#33903)
|
||||
* Try to figure out attribute checker problem (#33901)
|
||||
* Add lock for a repository pull mirror (#33876)
|
||||
* Fine tune push mirror UI (#33866)
|
||||
* Improve issue & code search (#33860)
|
||||
* Use pullrequestlist instead of []*pullrequest (#33765)
|
||||
* Upgrade act to 0.261.4 and actions-proto-go to v0.4.1 (#33760)
|
||||
* Align sidebar gears to the right (#33721)
|
||||
* Update Go dependencies (skip blevesearch, meilisearch) (#33655)
|
||||
* Add migrations and doctor fixes (#33556)
|
||||
* Remove "class-name" from svg icon (#33540)
|
||||
* Update MAINTAINERS (#33529)
|
||||
* Add "No data available" display when list is empty (#33517)
|
||||
* Use `git diff-tree` for `DiffFileTree` on diff pages (#33514)
|
||||
* Give organisation members access to organisation feeds (#33508)
|
||||
* Update feishu icon (#33470)
|
||||
* Hide/disable unusable UI elements when a repository is archived (#33459)
|
||||
* Update `@github/text-expander-element` to 2.9.0 (#33435)
|
||||
* Do not access GitRepo when a repo is being created (#33380)
|
||||
* Fix incorrect ref usages (#33301)
|
||||
* Prepare for support performance trace (#33286)
|
||||
* Enable Typescript `noImplicitThis` (#33250)
|
||||
* Remove unused CSS styles and move some styles to proper files (#33217)
|
||||
* Add .run to gitignore (#33175)
|
||||
* Fix typo in gitea downloader test and add missing codebase in `ToGitServiceType` (#33146)
|
||||
* Remove extended glob pattern from branch protection UI (#33125)
|
||||
* Clean up legacy form CSS styles (#33081)
|
||||
* Unset XDG_HOME_CONFIG as gitea manages configuration locations (#33067)
|
||||
* Add IntelliJ Gateway's .uuid to gitignore (#33052)
|
||||
* User facing messages for AGit errors (#33012)
|
||||
* Always show assignees on right (#33006)
|
||||
* Fix eslint (#33002)
|
||||
* Update JS dependencies (#32914)
|
||||
* Bump x/net (#32896) (#32900)
|
||||
* Only activity tab needs heatmap data loading (#34652)
|
||||
|
||||
## [1.23.8](https://github.com/go-gitea/gitea/releases/tag/1.23.8) - 2025-05-11
|
||||
|
||||
* SECURITY
|
||||
* Fix a bug when uploading file via lfs ssh command (#34408) (#34411)
|
||||
* Update net package (#34228) (#34232)
|
||||
* BUGFIXES
|
||||
* Fix releases sidebar navigation link (#34436) #34439
|
||||
* Fix bug webhook milestone is not right. (#34419) #34429
|
||||
* Fix two missed null value checks on the wiki page. (#34205) (#34215)
|
||||
* Swift files can be passed either as file or as form value (#34068) (#34236)
|
||||
* Fix bug when API get pull changed files for deleted head repository (#34333) (#34368)
|
||||
* Upgrade github v61 -> v71 to fix migrating bug (#34389)
|
||||
* Fix bug when visiting comparation page (#34334) (#34364)
|
||||
* Fix wrong review requests when updating the pull request (#34286) (#34304)
|
||||
* Fix github migration error when using multiple tokens (#34144) (#34302)
|
||||
* Explicitly not update indexes when sync database schemas (#34281) (#34295)
|
||||
* Fix panic when comment is nil (#34257) (#34277)
|
||||
* Fix project board links to related Pull Requests (#34213) (#34222)
|
||||
* Don't assume the default wiki branch is master in the wiki API (#34244) (#34245)
|
||||
* DOCUMENTATION
|
||||
* Update token creation API swagger documentation (#34288) (#34296)
|
||||
* MISC
|
||||
* Fix CI Build (#34315)
|
||||
* Add riscv64 support (#34199) (#34204)
|
||||
* Bump go version in go.mod (#34160)
|
||||
* remove hardcoded 'code' string in clone_panel.tmpl (#34153) (#34158)
|
||||
|
||||
## [1.23.7](https://github.com/go-gitea/gitea/releases/tag/1.23.7) - 2025-04-07
|
||||
|
||||
* Enhancements
|
||||
* Add a config option to block "expensive" pages for anonymous users (#34024) (#34071)
|
||||
* Also check default ssh-cert location for host (#34099) (#34100) (#34116)
|
||||
* BUGFIXES
|
||||
* Fix discord webhook 400 status code when description limit is exceeded (#34084) (#34124)
|
||||
* Get changed files based on merge base when checking `pull_request` actions trigger (#34106) (#34120)
|
||||
* Fix invalid version in RPM package path (#34112) (#34115)
|
||||
* Return default avatar url when user id is zero rather than updating database (#34094) (#34095)
|
||||
* Add additional ReplaceAll in pathsep to cater for different pathsep (#34061) (#34070)
|
||||
* Try to fix check-attr bug (#34029) (#34033)
|
||||
* Git client will follow 301 but 307 (#34005) (#34010)
|
||||
* Fix block expensive for 1.23 (#34127)
|
||||
* Fix markdown frontmatter rendering (#34102) (#34107)
|
||||
* Add new CLI flags to set name and scopes when creating a user with access token (#34080) (#34103)
|
||||
* Do not show 500 error when default branch doesn't exist (#34096) (#34097)
|
||||
* Hide activity contributors, recent commits and code frequrency left tabs if there is no code permission (#34053) (#34065)
|
||||
* Simplify emoji rendering (#34048) (#34049)
|
||||
* Adjust the layout of the toolbar on the Issues/Projects page (#33667) (#34047)
|
||||
* Pull request updates will also trigger code owners review requests (#33744) (#34045)
|
||||
* Fix org repo creation being limited by user limits (#34030) (#34044)
|
||||
* Fix git client accessing renamed repo (#34034) (#34043)
|
||||
* Fix the issue with error message logging for the `check-attr` command on Windows OS. (#34035) (#34036)
|
||||
* Polyfill WeakRef (#34025) (#34028)
|
||||
|
||||
## [1.23.6](https://github.com/go-gitea/gitea/releases/tag/v1.23.6) - 2025-03-24
|
||||
|
||||
* SECURITY
|
||||
|
|
|
|||
38
Makefile
38
Makefile
|
|
@ -36,7 +36,8 @@ XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
|
|||
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1
|
||||
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1
|
||||
ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1
|
||||
GOPLS_PACKAGE ?= golang.org/x/tools/gopls@v0.17.1
|
||||
GOPLS_PACKAGE ?= golang.org/x/tools/gopls@v0.19.0
|
||||
GOPLS_MODERNIZE_PACKAGE ?= golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@v0.19.0
|
||||
|
||||
DOCKER_IMAGE ?= gitea/gitea
|
||||
DOCKER_TAG ?= latest
|
||||
|
|
@ -120,8 +121,7 @@ WEBPACK_CONFIGS := webpack.config.js tailwind.config.js
|
|||
WEBPACK_DEST := public/assets/js/index.js public/assets/css/index.css
|
||||
WEBPACK_DEST_ENTRIES := public/assets/js public/assets/css public/assets/fonts
|
||||
|
||||
BINDATA_DEST := modules/public/bindata.go modules/options/bindata.go modules/templates/bindata.go
|
||||
BINDATA_HASH := $(addsuffix .hash,$(BINDATA_DEST))
|
||||
BINDATA_DEST_WILDCARD := modules/migration/bindata.* modules/public/bindata.* modules/options/bindata.* modules/templates/bindata.*
|
||||
|
||||
GENERATED_GO_DEST := modules/charset/invisible_gen.go modules/charset/ambiguous_gen.go
|
||||
|
||||
|
|
@ -149,14 +149,8 @@ SPELLCHECK_FILES := $(GO_DIRS) $(WEB_DIRS) templates options/locale/locale_en-US
|
|||
EDITORCONFIG_FILES := templates .github/workflows options/locale/locale_en-US.ini
|
||||
|
||||
GO_SOURCES := $(wildcard *.go)
|
||||
GO_SOURCES += $(shell find $(GO_DIRS) -type f -name "*.go" ! -path modules/options/bindata.go ! -path modules/public/bindata.go ! -path modules/templates/bindata.go)
|
||||
GO_SOURCES += $(shell find $(GO_DIRS) -type f -name "*.go")
|
||||
GO_SOURCES += $(GENERATED_GO_DEST)
|
||||
GO_SOURCES_NO_BINDATA := $(GO_SOURCES)
|
||||
|
||||
ifeq ($(filter $(TAGS_SPLIT),bindata),bindata)
|
||||
GO_SOURCES += $(BINDATA_DEST)
|
||||
GENERATED_GO_DEST += $(BINDATA_DEST)
|
||||
endif
|
||||
|
||||
# Force installation of playwright dependencies by setting this flag
|
||||
ifdef DEPS_PLAYWRIGHT
|
||||
|
|
@ -226,7 +220,7 @@ clean-all: clean ## delete backend, frontend and integration files
|
|||
|
||||
.PHONY: clean
|
||||
clean: ## delete backend and integration files
|
||||
rm -rf $(EXECUTABLE) $(DIST) $(BINDATA_DEST) $(BINDATA_HASH) \
|
||||
rm -rf $(EXECUTABLE) $(DIST) $(BINDATA_DEST_WILDCARD) \
|
||||
integrations*.test \
|
||||
e2e*.test \
|
||||
tests/integration/gitea-integration-* \
|
||||
|
|
@ -237,7 +231,7 @@ clean: ## delete backend and integration files
|
|||
tests/e2e/reports/ tests/e2e/test-artifacts/ tests/e2e/test-snapshots/
|
||||
|
||||
.PHONY: fmt
|
||||
fmt: ## format the Go code
|
||||
fmt: ## format the Go and template code
|
||||
@GOFUMPT_PACKAGE=$(GOFUMPT_PACKAGE) $(GO) run build/code-batch-process.go gitea-fmt -w '{file-list}'
|
||||
$(eval TEMPLATES := $(shell find templates -type f -name '*.tmpl'))
|
||||
@# strip whitespace after '{{' or '(' and before '}}' or ')' unless there is only
|
||||
|
|
@ -256,6 +250,19 @@ fmt-check: fmt
|
|||
exit 1; \
|
||||
fi
|
||||
|
||||
.PHONY: fix
|
||||
fix: ## apply automated fixes to Go code
|
||||
$(GO) run $(GOPLS_MODERNIZE_PACKAGE) -fix ./...
|
||||
|
||||
.PHONY: fix-check
|
||||
fix-check: fix
|
||||
@diff=$$(git diff --color=always $(GO_SOURCES)); \
|
||||
if [ -n "$$diff" ]; then \
|
||||
echo "Please run 'make fix' and commit the result:"; \
|
||||
printf "%s" "$${diff}"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
.PHONY: $(TAGS_EVIDENCE)
|
||||
$(TAGS_EVIDENCE):
|
||||
@mkdir -p $(MAKE_EVIDENCE_DIR)
|
||||
|
|
@ -268,7 +275,7 @@ endif
|
|||
.PHONY: generate-swagger
|
||||
generate-swagger: $(SWAGGER_SPEC) ## generate the swagger spec from code comments
|
||||
|
||||
$(SWAGGER_SPEC): $(GO_SOURCES_NO_BINDATA) $(SWAGGER_SPEC_INPUT)
|
||||
$(SWAGGER_SPEC): $(GO_SOURCES) $(SWAGGER_SPEC_INPUT)
|
||||
$(GO) run $(SWAGGER_PACKAGE) generate spec --exclude "$(SWAGGER_EXCLUDE)" --input "$(SWAGGER_SPEC_INPUT)" --output './$(SWAGGER_SPEC)'
|
||||
|
||||
.PHONY: swagger-check
|
||||
|
|
@ -295,7 +302,7 @@ checks: checks-frontend checks-backend ## run various consistency checks
|
|||
checks-frontend: lockfile-check svg-check ## check frontend files
|
||||
|
||||
.PHONY: checks-backend
|
||||
checks-backend: tidy-check swagger-check fmt-check swagger-validate security-check ## check backend files
|
||||
checks-backend: tidy-check swagger-check fmt-check fix-check swagger-validate security-check ## check backend files
|
||||
|
||||
.PHONY: lint
|
||||
lint: lint-frontend lint-backend lint-spell ## lint everything
|
||||
|
|
@ -373,7 +380,7 @@ lint-go-gitea-vet: ## lint go files with gitea-vet
|
|||
.PHONY: lint-go-gopls
|
||||
lint-go-gopls: ## lint go files with gopls
|
||||
@echo "Running gopls check..."
|
||||
@GO=$(GO) GOPLS_PACKAGE=$(GOPLS_PACKAGE) tools/lint-go-gopls.sh $(GO_SOURCES_NO_BINDATA)
|
||||
@GO=$(GO) GOPLS_PACKAGE=$(GOPLS_PACKAGE) tools/lint-go-gopls.sh $(GO_SOURCES)
|
||||
|
||||
.PHONY: lint-editorconfig
|
||||
lint-editorconfig:
|
||||
|
|
@ -816,6 +823,7 @@ deps-tools: ## install tool dependencies
|
|||
$(GO) install $(GOVULNCHECK_PACKAGE) & \
|
||||
$(GO) install $(ACTIONLINT_PACKAGE) & \
|
||||
$(GO) install $(GOPLS_PACKAGE) & \
|
||||
$(GO) install $(GOPLS_MODERNIZE_PACKAGE) & \
|
||||
wait
|
||||
|
||||
node_modules: package-lock.json
|
||||
|
|
|
|||
11
build.go
11
build.go
|
|
@ -5,19 +5,10 @@
|
|||
|
||||
package main
|
||||
|
||||
// Libraries that are included to vendor utilities used during build.
|
||||
// Libraries that are included to vendor utilities used during Makefile build.
|
||||
// These libraries will not be included in a normal compilation.
|
||||
|
||||
import (
|
||||
// for embed
|
||||
_ "github.com/shurcooL/vfsgen"
|
||||
|
||||
// for cover merge
|
||||
_ "golang.org/x/tools/cover"
|
||||
|
||||
// for vet
|
||||
_ "code.gitea.io/gitea-vet"
|
||||
|
||||
// for swagger
|
||||
_ "github.com/go-swagger/go-swagger/cmd/swagger"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,87 +6,22 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/shurcooL/vfsgen"
|
||||
"code.gitea.io/gitea/modules/assetfs"
|
||||
)
|
||||
|
||||
func needsUpdate(dir, filename string) (bool, []byte) {
|
||||
needRegen := false
|
||||
_, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
needRegen = true
|
||||
}
|
||||
|
||||
oldHash, err := os.ReadFile(filename + ".hash")
|
||||
if err != nil {
|
||||
oldHash = []byte{}
|
||||
}
|
||||
|
||||
hasher := sha1.New()
|
||||
|
||||
err = filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
info, err := d.Info()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, _ = hasher.Write([]byte(d.Name()))
|
||||
_, _ = hasher.Write([]byte(info.ModTime().String()))
|
||||
_, _ = hasher.Write([]byte(strconv.FormatInt(info.Size(), 16)))
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return true, oldHash
|
||||
}
|
||||
|
||||
newHash := hasher.Sum([]byte{})
|
||||
|
||||
if bytes.Compare(oldHash, newHash) != 0 {
|
||||
return true, newHash
|
||||
}
|
||||
|
||||
return needRegen, newHash
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) < 4 {
|
||||
log.Fatal("Insufficient number of arguments. Need: directory packageName filename")
|
||||
if len(os.Args) != 3 {
|
||||
fmt.Println("usage: ./generate-bindata {local-directory} {bindata-filename}")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
dir, packageName, filename := os.Args[1], os.Args[2], os.Args[3]
|
||||
var useGlobalModTime bool
|
||||
if len(os.Args) == 5 {
|
||||
useGlobalModTime, _ = strconv.ParseBool(os.Args[4])
|
||||
dir, filename := os.Args[1], os.Args[2]
|
||||
fmt.Printf("generating bindata for %s to %s\n", dir, filename)
|
||||
if err := assetfs.GenerateEmbedBindata(dir, filename); err != nil {
|
||||
fmt.Printf("failed: %s\n", err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
update, newHash := needsUpdate(dir, filename)
|
||||
|
||||
if !update {
|
||||
fmt.Printf("bindata for %s already up-to-date\n", packageName)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("generating bindata for %s\n", packageName)
|
||||
var fsTemplates http.FileSystem = http.Dir(dir)
|
||||
err := vfsgen.Generate(fsTemplates, vfsgen.Options{
|
||||
PackageName: packageName,
|
||||
BuildTags: "bindata",
|
||||
VariableName: "Assets",
|
||||
Filename: filename,
|
||||
UseGlobalModTime: useGlobalModTime,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("%v\n", err)
|
||||
}
|
||||
_ = os.WriteFile(filename+".hash", newHash, 0o666)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -39,12 +39,10 @@ func smtpCLIFlags() []cli.Flag {
|
|||
&cli.BoolFlag{
|
||||
Name: "force-smtps",
|
||||
Usage: "SMTPS is always used on port 465. Set this to force SMTPS on other ports.",
|
||||
Value: true,
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "skip-verify",
|
||||
Usage: "Skip TLS verify.",
|
||||
Value: true,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "helo-hostname",
|
||||
|
|
@ -54,7 +52,6 @@ func smtpCLIFlags() []cli.Flag {
|
|||
&cli.BoolFlag{
|
||||
Name: "disable-helo",
|
||||
Usage: "Disable SMTP helo.",
|
||||
Value: true,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "allowed-domains",
|
||||
|
|
@ -64,7 +61,6 @@ func smtpCLIFlags() []cli.Flag {
|
|||
&cli.BoolFlag{
|
||||
Name: "skip-local-2fa",
|
||||
Usage: "Skip 2FA to log on.",
|
||||
Value: true,
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "active",
|
||||
|
|
|
|||
|
|
@ -60,10 +60,8 @@ func TestAddSMTP(t *testing.T) {
|
|||
Auth: "PLAIN",
|
||||
Host: "localhost",
|
||||
Port: 25,
|
||||
// ForceSMTPS: true,
|
||||
// SkipVerify: true,
|
||||
},
|
||||
TwoFactorPolicy: "skip",
|
||||
TwoFactorPolicy: "",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -73,12 +71,12 @@ func TestAddSMTP(t *testing.T) {
|
|||
"--host", "localhost",
|
||||
"--port", "25",
|
||||
"--auth-type", "LOGIN",
|
||||
"--force-smtps=false",
|
||||
"--skip-verify=false",
|
||||
"--force-smtps",
|
||||
"--skip-verify",
|
||||
"--helo-hostname", "example.com",
|
||||
"--disable-helo=false",
|
||||
"--disable-helo=true",
|
||||
"--allowed-domains", "example.com,example.org",
|
||||
"--skip-local-2fa=false",
|
||||
"--skip-local-2fa",
|
||||
"--active=false",
|
||||
},
|
||||
source: &auth_model.Source{
|
||||
|
|
@ -89,13 +87,13 @@ func TestAddSMTP(t *testing.T) {
|
|||
Auth: "LOGIN",
|
||||
Host: "localhost",
|
||||
Port: 25,
|
||||
ForceSMTPS: false,
|
||||
SkipVerify: false,
|
||||
ForceSMTPS: true,
|
||||
SkipVerify: true,
|
||||
HeloHostname: "example.com",
|
||||
DisableHelo: false,
|
||||
DisableHelo: true,
|
||||
AllowedDomains: "example.com,example.org",
|
||||
},
|
||||
TwoFactorPolicy: "",
|
||||
TwoFactorPolicy: "skip",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
@ -157,13 +155,10 @@ func TestUpdateSMTP(t *testing.T) {
|
|||
Name: "old name",
|
||||
IsActive: true,
|
||||
Cfg: &smtp.Source{
|
||||
Auth: "PLAIN",
|
||||
Host: "old host",
|
||||
Port: 26,
|
||||
ForceSMTPS: true,
|
||||
SkipVerify: true,
|
||||
Auth: "PLAIN",
|
||||
Host: "old host",
|
||||
Port: 26,
|
||||
},
|
||||
TwoFactorPolicy: "",
|
||||
},
|
||||
args: []string{
|
||||
"--id", "1",
|
||||
|
|
@ -177,13 +172,10 @@ func TestUpdateSMTP(t *testing.T) {
|
|||
Name: "test",
|
||||
IsActive: true,
|
||||
Cfg: &smtp.Source{
|
||||
Auth: "PLAIN",
|
||||
Host: "localhost",
|
||||
Port: 25,
|
||||
ForceSMTPS: true,
|
||||
SkipVerify: true,
|
||||
Auth: "PLAIN",
|
||||
Host: "localhost",
|
||||
Port: 25,
|
||||
},
|
||||
TwoFactorPolicy: "skip",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -197,10 +189,7 @@ func TestUpdateSMTP(t *testing.T) {
|
|||
Auth: "PLAIN",
|
||||
Host: "old host",
|
||||
Port: 26,
|
||||
ForceSMTPS: true,
|
||||
SkipVerify: true,
|
||||
HeloHostname: "old.example.com",
|
||||
DisableHelo: false,
|
||||
AllowedDomains: "old.example.com",
|
||||
},
|
||||
TwoFactorPolicy: "",
|
||||
|
|
@ -211,12 +200,12 @@ func TestUpdateSMTP(t *testing.T) {
|
|||
"--host", "localhost",
|
||||
"--port", "25",
|
||||
"--auth-type", "LOGIN",
|
||||
"--force-smtps=false",
|
||||
"--skip-verify=false",
|
||||
"--force-smtps",
|
||||
"--skip-verify",
|
||||
"--helo-hostname", "example.com",
|
||||
"--disable-helo=true",
|
||||
"--disable-helo",
|
||||
"--allowed-domains", "example.com,example.org",
|
||||
"--skip-local-2fa=true",
|
||||
"--skip-local-2fa",
|
||||
"--active=false",
|
||||
},
|
||||
authSource: &auth_model.Source{
|
||||
|
|
@ -228,8 +217,8 @@ func TestUpdateSMTP(t *testing.T) {
|
|||
Auth: "LOGIN",
|
||||
Host: "localhost",
|
||||
Port: 25,
|
||||
ForceSMTPS: false,
|
||||
SkipVerify: false,
|
||||
ForceSMTPS: true,
|
||||
SkipVerify: true,
|
||||
HeloHostname: "example.com",
|
||||
DisableHelo: true,
|
||||
AllowedDomains: "example.com,example.org",
|
||||
|
|
@ -252,11 +241,8 @@ func TestUpdateSMTP(t *testing.T) {
|
|||
Name: "test",
|
||||
IsActive: true,
|
||||
Cfg: &smtp.Source{
|
||||
Auth: "PLAIN",
|
||||
SkipVerify: true,
|
||||
ForceSMTPS: true,
|
||||
Auth: "PLAIN",
|
||||
},
|
||||
TwoFactorPolicy: "skip",
|
||||
}, nil
|
||||
},
|
||||
|
||||
|
|
|
|||
|
|
@ -156,8 +156,8 @@ func runCert(_ context.Context, c *cli.Command) error {
|
|||
BasicConstraintsValid: true,
|
||||
}
|
||||
|
||||
hosts := strings.Split(c.String("host"), ",")
|
||||
for _, h := range hosts {
|
||||
hosts := strings.SplitSeq(c.String("host"), ",")
|
||||
for h := range hosts {
|
||||
if ip := net.ParseIP(h); ip != nil {
|
||||
template.IPAddresses = append(template.IPAddresses, ip)
|
||||
} else {
|
||||
|
|
|
|||
10
cmd/cmd.go
10
cmd/cmd.go
|
|
@ -132,3 +132,13 @@ func PrepareConsoleLoggerLevel(defaultLevel log.Level) func(context.Context, *cl
|
|||
return ctx, nil
|
||||
}
|
||||
}
|
||||
|
||||
func isValidDefaultSubCommand(cmd *cli.Command) (string, bool) {
|
||||
// Dirty patch for urfave/cli's strange design.
|
||||
// "./gitea bad-cmd" should not start the web server.
|
||||
rootArgs := cmd.Root().Args().Slice()
|
||||
if len(rootArgs) != 0 && rootArgs[0] != cmd.Name {
|
||||
return rootArgs[0], false
|
||||
}
|
||||
return "", true
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
func TestDefaultCommand(t *testing.T) {
|
||||
test := func(t *testing.T, args []string, expectedRetName string, expectedRetValid bool) {
|
||||
called := false
|
||||
cmd := &cli.Command{
|
||||
DefaultCommand: "test",
|
||||
Commands: []*cli.Command{
|
||||
{
|
||||
Name: "test",
|
||||
Action: func(ctx context.Context, command *cli.Command) error {
|
||||
retName, retValid := isValidDefaultSubCommand(command)
|
||||
assert.Equal(t, expectedRetName, retName)
|
||||
assert.Equal(t, expectedRetValid, retValid)
|
||||
called = true
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
assert.NoError(t, cmd.Run(t.Context(), args))
|
||||
assert.True(t, called)
|
||||
}
|
||||
test(t, []string{"./gitea"}, "", true)
|
||||
test(t, []string{"./gitea", "test"}, "", true)
|
||||
test(t, []string{"./gitea", "other"}, "other", false)
|
||||
}
|
||||
|
|
@ -137,8 +137,8 @@ func runDumpRepository(ctx context.Context, cmd *cli.Command) error {
|
|||
opts.PullRequests = true
|
||||
opts.ReleaseAssets = true
|
||||
} else {
|
||||
units := strings.Split(cmd.String("units"), ",")
|
||||
for _, unit := range units {
|
||||
units := strings.SplitSeq(cmd.String("units"), ",")
|
||||
for unit := range units {
|
||||
switch strings.ToLower(strings.TrimSpace(unit)) {
|
||||
case "":
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ func initEmbeddedExtractor(c *cli.Command) error {
|
|||
|
||||
func runList(_ context.Context, c *cli.Command) error {
|
||||
if err := runListDo(c); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||
_, _ = fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
|
|
@ -126,7 +126,7 @@ func runList(_ context.Context, c *cli.Command) error {
|
|||
|
||||
func runView(_ context.Context, c *cli.Command) error {
|
||||
if err := runViewDo(c); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||
_, _ = fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
|
|
@ -134,7 +134,7 @@ func runView(_ context.Context, c *cli.Command) error {
|
|||
|
||||
func runExtract(_ context.Context, c *cli.Command) error {
|
||||
if err := runExtractDo(c); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||
_, _ = fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
|
|
@ -217,7 +217,7 @@ func runExtractDo(c *cli.Command) error {
|
|||
for _, a := range matchedAssetFiles {
|
||||
if err := extractAsset(destdir, a, overwrite, rename); err != nil {
|
||||
// Non-fatal error
|
||||
fmt.Fprintf(os.Stderr, "%s: %v", a.path, err)
|
||||
_, _ = fmt.Fprintf(os.Stderr, "%s: %v\n", a.path, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -480,7 +480,7 @@ func hookPrintResult(output, isCreate bool, branch, url string) {
|
|||
func pushOptions() map[string]string {
|
||||
opts := make(map[string]string)
|
||||
if pushCount, err := strconv.Atoi(os.Getenv(private.GitPushOptionCount)); err == nil {
|
||||
for idx := 0; idx < pushCount; idx++ {
|
||||
for idx := range pushCount {
|
||||
opt := os.Getenv(fmt.Sprintf("GIT_PUSH_OPTION_%d", idx))
|
||||
kv := strings.SplitN(opt, "=", 2)
|
||||
if len(kv) == 2 {
|
||||
|
|
@ -732,7 +732,7 @@ func readPktLine(ctx context.Context, in *bufio.Reader, requestType pktLineType)
|
|||
|
||||
// read prefix
|
||||
lengthBytes := make([]byte, 4)
|
||||
for i := 0; i < 4; i++ {
|
||||
for i := range 4 {
|
||||
lengthBytes[i], err = in.ReadByte()
|
||||
if err != nil {
|
||||
return nil, fail(ctx, "Protocol: stdin error", "Pkt-Line: read stdin failed : %v", err)
|
||||
|
|
|
|||
|
|
@ -152,6 +152,8 @@ func NewMainApp(appVer AppVersion) *cli.Command {
|
|||
CmdDocs,
|
||||
}
|
||||
|
||||
// TODO: we should eventually drop the default command,
|
||||
// but not sure whether it would break Windows users who used to double-click the EXE to run.
|
||||
app.DefaultCommand = CmdWeb.Name
|
||||
|
||||
app.Flags = append(app.Flags, cli.VersionFlag)
|
||||
|
|
|
|||
|
|
@ -119,7 +119,6 @@ var (
|
|||
Name: "rotate",
|
||||
Aliases: []string{"r"},
|
||||
Usage: "Rotate logs",
|
||||
Value: true,
|
||||
},
|
||||
&cli.Int64Flag{
|
||||
Name: "max-size",
|
||||
|
|
@ -130,7 +129,6 @@ var (
|
|||
Name: "daily",
|
||||
Aliases: []string{"d"},
|
||||
Usage: "Rotate logs daily",
|
||||
Value: true,
|
||||
},
|
||||
&cli.IntFlag{
|
||||
Name: "max-days",
|
||||
|
|
@ -141,7 +139,6 @@ var (
|
|||
Name: "compress",
|
||||
Aliases: []string{"z"},
|
||||
Usage: "Compress rotated logs",
|
||||
Value: true,
|
||||
},
|
||||
&cli.IntFlag{
|
||||
Name: "compression-level",
|
||||
|
|
|
|||
|
|
@ -212,7 +212,7 @@ func runServ(ctx context.Context, c *cli.Command) error {
|
|||
if git.DefaultFeatures().SupportProcReceive {
|
||||
// for AGit Flow
|
||||
if cmd == "ssh_info" {
|
||||
fmt.Print(`{"type":"gitea","version":1}`)
|
||||
fmt.Print(`{"type":"agit","version":1}`)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -251,6 +251,10 @@ func runWeb(_ context.Context, cmd *cli.Command) error {
|
|||
}
|
||||
}()
|
||||
|
||||
if subCmdName, valid := isValidDefaultSubCommand(cmd); !valid {
|
||||
return fmt.Errorf("unknown command: %s", subCmdName)
|
||||
}
|
||||
|
||||
managerCtx, cancel := context.WithCancel(context.Background())
|
||||
graceful.InitManager(managerCtx)
|
||||
defer cancel()
|
||||
|
|
|
|||
|
|
@ -337,8 +337,8 @@ func determineRemote(ctx context.Context, forkUser string) (string, string, erro
|
|||
fmt.Fprintf(os.Stderr, "Unable to list git remotes:\n%s\n", string(out))
|
||||
return "", "", fmt.Errorf("unable to determine forked remote: %w", err)
|
||||
}
|
||||
lines := strings.Split(string(out), "\n")
|
||||
for _, line := range lines {
|
||||
lines := strings.SplitSeq(string(out), "\n")
|
||||
for line := range lines {
|
||||
fields := strings.Split(line, "\t")
|
||||
name, remote := fields[0], fields[1]
|
||||
// only look at pushers
|
||||
|
|
@ -356,12 +356,12 @@ func determineRemote(ctx context.Context, forkUser string) (string, string, erro
|
|||
if !strings.Contains(remote, forkUser) {
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(remote, "git@github.com:") {
|
||||
forkUser = strings.TrimPrefix(remote, "git@github.com:")
|
||||
} else if strings.HasPrefix(remote, "https://github.com/") {
|
||||
forkUser = strings.TrimPrefix(remote, "https://github.com/")
|
||||
} else if strings.HasPrefix(remote, "https://www.github.com/") {
|
||||
forkUser = strings.TrimPrefix(remote, "https://www.github.com/")
|
||||
if after, ok := strings.CutPrefix(remote, "git@github.com:"); ok {
|
||||
forkUser = after
|
||||
} else if after, ok := strings.CutPrefix(remote, "https://github.com/"); ok {
|
||||
forkUser = after
|
||||
} else if after, ok := strings.CutPrefix(remote, "https://www.github.com/"); ok {
|
||||
forkUser = after
|
||||
} else if forkUser == "" {
|
||||
return "", "", fmt.Errorf("unable to extract forkUser from remote %s: %s", name, remote)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1186,17 +1186,24 @@ LEVEL = Info
|
|||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;
|
||||
;; GPG key to use to sign commits, Defaults to the default - that is the value of git config --get user.signingkey
|
||||
;; GPG or SSH key to use to sign commits, Defaults to the default - that is the value of git config --get user.signingkey
|
||||
;; Depending on the value of SIGNING_FORMAT this is either:
|
||||
;; - openpgp: the GPG key ID
|
||||
;; - ssh: the path to the ssh public key "/path/to/key.pub": where "/path/to/key" is the private key, use ssh-keygen -t ed25519 to generate a new key pair without password
|
||||
;; run in the context of the RUN_USER
|
||||
;; Switch to none to stop signing completely
|
||||
;SIGNING_KEY = default
|
||||
;;
|
||||
;; If a SIGNING_KEY ID is provided and is not set to default, use the provided Name and Email address as the signer.
|
||||
;; If a SIGNING_KEY ID is provided and is not set to default, use the provided Name and Email address as the signer and the signing format.
|
||||
;; These should match a publicized name and email address for the key. (When SIGNING_KEY is default these are set to
|
||||
;; the results of git config --get user.name and git config --get user.email respectively and can only be overridden
|
||||
;; the results of git config --get user.name, git config --get user.email and git config --default openpgp --get gpg.format respectively and can only be overridden
|
||||
;; by setting the SIGNING_KEY ID to the correct ID.)
|
||||
;SIGNING_NAME =
|
||||
;SIGNING_EMAIL =
|
||||
;; SIGNING_FORMAT can be one of:
|
||||
;; - openpgp (default): use GPG to sign commits
|
||||
;; - ssh: use SSH to sign commits
|
||||
;SIGNING_FORMAT = openpgp
|
||||
;;
|
||||
;; Sets the default trust model for repositories. Options are: collaborator, committer, collaboratorcommitter
|
||||
;DEFAULT_TRUST_MODEL = collaborator
|
||||
|
|
@ -1223,6 +1230,13 @@ LEVEL = Info
|
|||
;; - commitssigned: require that all the commits in the head branch are signed.
|
||||
;; - approved: only sign when merging an approved pr to a protected branch
|
||||
;MERGES = pubkey, twofa, basesigned, commitssigned
|
||||
;;
|
||||
;; Determines which additional ssh keys are trusted for all signed commits regardless of the user
|
||||
;; This is useful for ssh signing key rotation.
|
||||
;; Exposes the provided SIGNING_NAME and SIGNING_EMAIL as the signer, regardless of the SIGNING_FORMAT value.
|
||||
;; Multiple keys should be comma separated.
|
||||
;; E.g."ssh-<algorithm> <key>". or "ssh-<algorithm> <key1>, ssh-<algorithm> <key2>".
|
||||
;TRUSTED_SSH_KEYS =
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
|
|
|||
49
go.mod
49
go.mod
|
|
@ -1,6 +1,6 @@
|
|||
module code.gitea.io/gitea
|
||||
|
||||
go 1.24.2
|
||||
go 1.24.4
|
||||
|
||||
// rfc5280 said: "The serial number is an integer assigned by the CA to each certificate."
|
||||
// But some CAs use negative serial number, just relax the check. related:
|
||||
|
|
@ -51,7 +51,7 @@ require (
|
|||
github.com/gliderlabs/ssh v0.3.8
|
||||
github.com/go-ap/activitypub v0.0.0-20250409143848-7113328b1f3d
|
||||
github.com/go-ap/jsonld v0.0.0-20221030091449-f2a191312c73
|
||||
github.com/go-chi/chi/v5 v5.2.1
|
||||
github.com/go-chi/chi/v5 v5.2.2
|
||||
github.com/go-chi/cors v1.2.1
|
||||
github.com/go-co-op/gocron v1.37.0
|
||||
github.com/go-enry/go-enry/v2 v2.9.2
|
||||
|
|
@ -60,7 +60,6 @@ require (
|
|||
github.com/go-ldap/ldap/v3 v3.4.11
|
||||
github.com/go-redsync/redsync/v4 v4.13.0
|
||||
github.com/go-sql-driver/mysql v1.9.2
|
||||
github.com/go-swagger/go-swagger v0.31.0
|
||||
github.com/go-webauthn/webauthn v0.12.3
|
||||
github.com/gobwas/glob v0.2.3
|
||||
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f
|
||||
|
|
@ -92,7 +91,7 @@ require (
|
|||
github.com/minio/minio-go/v7 v7.0.91
|
||||
github.com/msteinert/pam v1.2.0
|
||||
github.com/nektos/act v0.2.63
|
||||
github.com/niklasfasching/go-org v1.7.0
|
||||
github.com/niklasfasching/go-org v1.8.0
|
||||
github.com/olivere/elastic/v7 v7.0.32
|
||||
github.com/opencontainers/go-digest v1.0.0
|
||||
github.com/opencontainers/image-spec v1.1.1
|
||||
|
|
@ -105,7 +104,6 @@ require (
|
|||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1
|
||||
github.com/sassoftware/go-rpmutils v0.4.0
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3
|
||||
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/syndtr/goleveldb v1.0.0
|
||||
github.com/tstranex/u2f v1.0.0
|
||||
|
|
@ -126,7 +124,6 @@ require (
|
|||
golang.org/x/sync v0.15.0
|
||||
golang.org/x/sys v0.33.0
|
||||
golang.org/x/text v0.26.0
|
||||
golang.org/x/tools v0.33.0
|
||||
google.golang.org/grpc v1.72.0
|
||||
google.golang.org/protobuf v1.36.6
|
||||
gopkg.in/ini.v1 v1.67.0
|
||||
|
|
@ -144,15 +141,11 @@ require (
|
|||
git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/DataDog/zstd v1.5.7 // indirect
|
||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||
github.com/Masterminds/semver/v3 v3.3.1 // indirect
|
||||
github.com/Masterminds/sprig/v3 v3.3.0 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/RoaringBitmap/roaring/v2 v2.4.5 // indirect
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.3 // indirect
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 // indirect
|
||||
|
|
@ -195,7 +188,6 @@ require (
|
|||
github.com/dlclark/regexp2 v1.11.5 // indirect
|
||||
github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6 // indirect
|
||||
github.com/fatih/color v1.18.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fxamacker/cbor/v2 v2.8.0 // indirect
|
||||
github.com/git-lfs/pktline v0.0.0-20230103162542-ca444d533ef1 // indirect
|
||||
github.com/go-ap/errors v0.0.0-20250409143711-5686c11ae650 // indirect
|
||||
|
|
@ -204,18 +196,6 @@ require (
|
|||
github.com/go-fed/httpsig v1.1.1-0.20201223112313-55836744818e // indirect
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
|
||||
github.com/go-ini/ini v1.67.0 // indirect
|
||||
github.com/go-openapi/analysis v0.23.0 // indirect
|
||||
github.com/go-openapi/errors v0.22.1 // indirect
|
||||
github.com/go-openapi/inflect v0.21.2 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.21.1 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/loads v0.22.0 // indirect
|
||||
github.com/go-openapi/runtime v0.28.0 // indirect
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/strfmt v0.23.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.1 // indirect
|
||||
github.com/go-openapi/validate v0.24.0 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
|
||||
github.com/go-webauthn/x v0.1.20 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
|
||||
|
|
@ -229,7 +209,6 @@ require (
|
|||
github.com/google/go-querystring v1.1.0 // indirect
|
||||
github.com/google/go-tpm v0.9.3 // indirect
|
||||
github.com/gorilla/css v1.0.1 // indirect
|
||||
github.com/gorilla/handlers v1.5.2 // indirect
|
||||
github.com/gorilla/mux v1.8.1 // indirect
|
||||
github.com/gorilla/securecookie v1.1.2 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
|
|
@ -237,12 +216,9 @@ require (
|
|||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/jessevdk/go-flags v1.6.1 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
github.com/klauspost/pgzip v1.2.6 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
github.com/libdns/libdns v1.0.0-beta.1 // indirect
|
||||
github.com/mailru/easyjson v0.9.0 // indirect
|
||||
github.com/markbates/going v1.0.3 // indirect
|
||||
|
|
@ -253,19 +229,15 @@ require (
|
|||
github.com/miekg/dns v1.1.65 // indirect
|
||||
github.com/minio/crc64nvme v1.0.1 // indirect
|
||||
github.com/minio/md5-simd v1.1.2 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 // indirect
|
||||
github.com/mschoch/smat v0.2.0 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/nwaples/rardecode v1.1.3 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/olekukonko/tablewriter v0.0.5 // indirect
|
||||
github.com/onsi/ginkgo v1.16.5 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.2 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
|
|
@ -274,22 +246,11 @@ require (
|
|||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/rhysd/actionlint v1.7.7 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||
github.com/rs/xid v1.6.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/sagikazarmark/locafero v0.9.0 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/sourcegraph/conc v0.3.0 // indirect
|
||||
github.com/spf13/afero v1.14.0 // indirect
|
||||
github.com/spf13/cast v1.7.1 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/spf13/viper v1.20.1 // indirect
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/toqueteos/webbrowser v1.2.0 // indirect
|
||||
github.com/unknwon/com v1.0.1 // indirect
|
||||
github.com/valyala/fastjson v1.6.4 // indirect
|
||||
github.com/x448/float16 v0.8.4 // indirect
|
||||
|
|
@ -300,7 +261,6 @@ require (
|
|||
github.com/zeebo/assert v1.3.0 // indirect
|
||||
github.com/zeebo/blake3 v0.2.4 // indirect
|
||||
go.etcd.io/bbolt v1.4.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.3 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.uber.org/zap v1.27.0 // indirect
|
||||
|
|
@ -308,6 +268,7 @@ require (
|
|||
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect
|
||||
golang.org/x/mod v0.25.0 // indirect
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.33.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250422160041-2d3770c4ea7f // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
|
|
@ -315,8 +276,6 @@ require (
|
|||
|
||||
replace github.com/hashicorp/go-version => github.com/6543/go-version v1.3.1
|
||||
|
||||
replace github.com/shurcooL/vfsgen => github.com/lunny/vfsgen v0.0.0-20220105142115-2c99e1ffdfa0
|
||||
|
||||
replace github.com/nektos/act => gitea.com/gitea/act v0.261.6
|
||||
|
||||
// TODO: the only difference is in `PutObject`: the fork doesn't use `NewVerifyingReader(r, sha256.New(), oid, expectedSize)`, need to figure out why
|
||||
|
|
|
|||
83
go.sum
83
go.sum
|
|
@ -62,12 +62,6 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
|
|||
github.com/DataDog/zstd v1.5.7 h1:ybO8RBeh29qrxIhCA9E8gKY6xfONU9T6G6aP9DTKfLE=
|
||||
github.com/DataDog/zstd v1.5.7/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw=
|
||||
github.com/Julusian/godocdown v0.0.0-20170816220326-6d19f8ff2df8/go.mod h1:INZr5t32rG59/5xeltqoCJoNY7e5x/3xoY9WSWVWg74=
|
||||
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
|
||||
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
|
||||
github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4=
|
||||
github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
|
||||
github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs=
|
||||
github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0=
|
||||
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
|
|
@ -103,8 +97,6 @@ github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuW
|
|||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.3 h1:mJoei2CxPutQVxaATCzDUjcZEjVRdpsiiXi2o38yqWM=
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.3/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.67 h1:9KxtdcIA/5xPNQyZRgUSpYOE6j9Bc4+D7nZua0KGYOM=
|
||||
|
|
@ -274,12 +266,8 @@ github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
|||
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
||||
github.com/felixge/fgprof v0.9.5 h1:8+vR6yu2vvSKn08urWyEuxx75NWPEvybbkBirEpsbVY=
|
||||
github.com/felixge/fgprof v0.9.5/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZPpayhMM=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
|
||||
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
||||
|
|
@ -301,8 +289,8 @@ github.com/go-ap/jsonld v0.0.0-20221030091449-f2a191312c73/go.mod h1:jyveZeGw5La
|
|||
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ4S3TGls2FvczZtj5Re/2ZzkV9VwqPHH/3Bo=
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
|
||||
github.com/go-chi/chi/v5 v5.0.1/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||
github.com/go-chi/chi/v5 v5.2.1 h1:KOIHODQj58PmL80G2Eak4WdvUzjSJSm0vG72crDCqb8=
|
||||
github.com/go-chi/chi/v5 v5.2.1/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
|
||||
github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618=
|
||||
github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
|
||||
github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4=
|
||||
github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
|
||||
github.com/go-co-op/gocron v1.37.0 h1:ZYDJGtQ4OMhTLKOKMIch+/CY70Brbb1dGdooLEhh7b0=
|
||||
|
|
@ -325,28 +313,6 @@ github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
|||
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||
github.com/go-ldap/ldap/v3 v3.4.11 h1:4k0Yxweg+a3OyBLjdYn5OKglv18JNvfDykSoI8bW0gU=
|
||||
github.com/go-ldap/ldap/v3 v3.4.11/go.mod h1:bY7t0FLK8OAVpp/vV6sSlpz3EQDGcQwc8pF0ujLgKvM=
|
||||
github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU=
|
||||
github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo=
|
||||
github.com/go-openapi/errors v0.22.1 h1:kslMRRnK7NCb/CvR1q1VWuEQCEIsBGn5GgKD9e+HYhU=
|
||||
github.com/go-openapi/errors v0.22.1/go.mod h1:+n/5UdIqdVnLIJ6Q9Se8HNGUXYaY6CN8ImWzfi/Gzp0=
|
||||
github.com/go-openapi/inflect v0.21.2 h1:0gClGlGcxifcJR56zwvhaOulnNgnhc4qTAkob5ObnSM=
|
||||
github.com/go-openapi/inflect v0.21.2/go.mod h1:INezMuUu7SJQc2AyR3WO0DqqYUJSj8Kb4hBd7WtjlAw=
|
||||
github.com/go-openapi/jsonpointer v0.21.1 h1:whnzv/pNXtK2FbX/W9yJfRmE2gsmkfahjMKB0fZvcic=
|
||||
github.com/go-openapi/jsonpointer v0.21.1/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk=
|
||||
github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ=
|
||||
github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4=
|
||||
github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco=
|
||||
github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs=
|
||||
github.com/go-openapi/runtime v0.28.0 h1:gpPPmWSNGo214l6n8hzdXYhPuJcGtziTOgUpvsFWGIQ=
|
||||
github.com/go-openapi/runtime v0.28.0/go.mod h1:QN7OzcS+XuYmkQLw05akXk0jRH/eZ3kb18+1KwW9gyc=
|
||||
github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY=
|
||||
github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk=
|
||||
github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c=
|
||||
github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4=
|
||||
github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZU=
|
||||
github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0=
|
||||
github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58=
|
||||
github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ=
|
||||
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
|
||||
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
|
||||
github.com/go-redis/redis/v7 v7.4.1 h1:PASvf36gyUpr2zdOUS/9Zqc80GbM+9BDyiJSJDDOrTI=
|
||||
|
|
@ -357,13 +323,9 @@ github.com/go-redsync/redsync/v4 v4.13.0 h1:49X6GJfnbLGaIpBBREM/zA4uIMDXKAh1NDkv
|
|||
github.com/go-redsync/redsync/v4 v4.13.0/go.mod h1:HMW4Q224GZQz6x1Xc7040Yfgacukdzu7ifTDAKiyErQ=
|
||||
github.com/go-sql-driver/mysql v1.9.2 h1:4cNKDYQ1I84SXslGddlsrMhc8k4LeDVj6Ad6WRjiHuU=
|
||||
github.com/go-sql-driver/mysql v1.9.2/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/go-swagger/go-swagger v0.31.0 h1:H8eOYQnY2u7vNKWDNykv2xJP3pBhRG/R+SOCAmKrLlc=
|
||||
github.com/go-swagger/go-swagger v0.31.0/go.mod h1:WSigRRWEig8zV6t6Sm8Y+EmUjlzA/HoaZJ5edupq7po=
|
||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||
github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg=
|
||||
github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
|
||||
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
|
||||
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/go-webauthn/webauthn v0.12.3 h1:hHQl1xkUuabUU9uS+ISNCMLs9z50p9mDUZI/FmkayNE=
|
||||
github.com/go-webauthn/webauthn v0.12.3/go.mod h1:4JRe8Z3W7HIw8NGEWn2fnUwecoDzkkeach/NnvhkqGY=
|
||||
github.com/go-webauthn/x v0.1.20 h1:brEBDqfiPtNNCdS/peu8gARtq8fIPsHz0VzpPjGvgiw=
|
||||
|
|
@ -446,8 +408,6 @@ github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
|
|||
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
|
||||
github.com/gorilla/feeds v1.2.0 h1:O6pBiXJ5JHhPvqy53NsjKOThq+dNFm8+DFrxBEdzSCc=
|
||||
github.com/gorilla/feeds v1.2.0/go.mod h1:WMib8uJP3BbY+X8Szd1rA5Pzhdfh+HCCAYT2z7Fza6Y=
|
||||
github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE=
|
||||
github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w=
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1 h1:LqbZZ9sNMWVjeXS4NN5oVvhMjDyLhmA1LG86oSo+IqY=
|
||||
|
|
@ -497,8 +457,6 @@ github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh6
|
|||
github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
|
||||
github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
|
||||
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
|
||||
github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4=
|
||||
github.com/jessevdk/go-flags v1.6.1/go.mod h1:Mk8T1hIAWpOiJiHa9rJASDK2UGWji0EuPGBnNLMooyc=
|
||||
github.com/jhillyerd/enmime v1.3.0 h1:LV5kzfLidiOr8qRGIpYYmUZCnhrPbcFAnAFUnWn99rw=
|
||||
github.com/jhillyerd/enmime v1.3.0/go.mod h1:6c6jg5HdRRV2FtvVL69LjiX1M8oE0xDX9VEhV3oy4gs=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
|
|
@ -540,8 +498,6 @@ github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
|||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/libdns/libdns v1.0.0-beta.1 h1:KIf4wLfsrEpXpZ3vmc/poM8zCATXT2klbdPe6hyOBjQ=
|
||||
github.com/libdns/libdns v1.0.0-beta.1/go.mod h1:4Bj9+5CQiNMVGf87wjX4CY3HQJypUHRuLvlsfsZqLWQ=
|
||||
github.com/lunny/vfsgen v0.0.0-20220105142115-2c99e1ffdfa0 h1:F/3FfGmKdiKFa8kL3YrpZ7pe9H4l4AzA1pbaOUnRvPI=
|
||||
github.com/lunny/vfsgen v0.0.0-20220105142115-2c99e1ffdfa0/go.mod h1:JEfTc3+2DF9Z4PXhLLvXL42zexJyh8rIq3OzUj/0rAk=
|
||||
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4=
|
||||
|
|
@ -577,14 +533,10 @@ github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
|||
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
||||
github.com/minio/minio-go/v7 v7.0.91 h1:tWLZnEfo3OZl5PoXQwcwTAPNNrjyWwOh6cbZitW5JQc=
|
||||
github.com/minio/minio-go/v7 v7.0.91/go.mod h1:uvMUcGrpgeSAAI6+sD3818508nUyMULw94j2Nxku/Go=
|
||||
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
||||
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
||||
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
|
|
@ -599,16 +551,14 @@ github.com/msteinert/pam v1.2.0 h1:mYfjlvN2KYs2Pb9G6nb/1f/nPfAttT/Jee5Sq9r3bGE=
|
|||
github.com/msteinert/pam v1.2.0/go.mod h1:d2n0DCUK8rGecChV3JzvmsDjOY4R7AYbsNxAT+ftQl0=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
github.com/niklasfasching/go-org v1.7.0 h1:vyMdcMWWTe/XmANk19F4k8XGBYg0GQ/gJGMimOjGMek=
|
||||
github.com/niklasfasching/go-org v1.7.0/go.mod h1:WuVm4d45oePiE0eX25GqTDQIt/qPW1T9DGkRscqLW5o=
|
||||
github.com/niklasfasching/go-org v1.8.0 h1:WyGLaajLLp8JbQzkmapZ1y0MOzKuKV47HkZRloi+HGY=
|
||||
github.com/niklasfasching/go-org v1.8.0/go.mod h1:e2A9zJs7cdONrEGs3gvxCcaAEpwwPNPG7csDpXckMNg=
|
||||
github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
||||
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
|
||||
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/olivere/elastic/v7 v7.0.32 h1:R7CXvbu8Eq+WlsLgxmKVKPox0oOwAE/2T9Si5BnvK6E=
|
||||
|
|
@ -629,8 +579,6 @@ github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJw
|
|||
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
|
||||
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
|
||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
|
||||
github.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
|
||||
|
|
@ -674,7 +622,6 @@ github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
|||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||
github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
|
||||
|
|
@ -682,8 +629,6 @@ github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
|||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/sagikazarmark/locafero v0.9.0 h1:GbgQGNtTrEmddYDSAH9QLRyfAHY12md+8YFTqyMTC9k=
|
||||
github.com/sagikazarmark/locafero v0.9.0/go.mod h1:UBUyz37V+EdMS3hDF3QWIiVr/2dPrx49OMO0Bn0hJqk=
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4=
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY=
|
||||
github.com/sassoftware/go-rpmutils v0.4.0 h1:ojND82NYBxgwrV+mX1CWsd5QJvvEZTKddtCdFLPWhpg=
|
||||
|
|
@ -692,10 +637,6 @@ github.com/serenize/snaker v0.0.0-20171204205717-a683aaf2d516/go.mod h1:Yow6lPLS
|
|||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c h1:aqg5Vm5dwtvL+YgDpBcK1ITf3o96N/K7/wsRXQnUTEs=
|
||||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c/go.mod h1:owqhoLW1qZoYLZzLnBw+QkPP9WZnjlSWihhxAJC1+/M=
|
||||
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
|
|
@ -707,22 +648,12 @@ github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYl
|
|||
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337 h1:WN9BUFbdyOsSH/XohnWpXOlq9NBD5sGAB2FciQMUEe8=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||
github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA=
|
||||
github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo=
|
||||
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
|
||||
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
||||
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
|
||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||
github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
|
||||
github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf h1:pvbZ0lM0XWPBqUKqFU8cmavspvIl9nulOYwdy6IFRRo=
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02nZ62WenDCkgHFerpIOmW0iT7GKmXM=
|
||||
github.com/stephens2424/writerset v1.0.2/go.mod h1:aS2JhsMn6eA7e82oNmW4rfsgAOp9COBTTl8mzkwADnc=
|
||||
|
|
@ -745,13 +676,9 @@ github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOf
|
|||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stvp/tempredis v0.0.0-20181119212430-b82af8480203 h1:QVqDTf3h2WHt08YuiTGPZLls0Wq99X9bWd0Q5ZSBesM=
|
||||
github.com/stvp/tempredis v0.0.0-20181119212430-b82af8480203/go.mod h1:oqN97ltKNihBbwlX8dLpwxCl3+HnXKV/R0e+sRLd9C8=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
|
||||
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
|
||||
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||
github.com/toqueteos/webbrowser v1.2.0 h1:tVP/gpK69Fx+qMJKsLE7TD8LuGWPnEV71wBN9rrstGQ=
|
||||
github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJMir8RTqb4ayM=
|
||||
github.com/tstranex/u2f v1.0.0 h1:HhJkSzDDlVSVIVt7pDJwCHQj67k7A5EeBgPmeD+pVsQ=
|
||||
github.com/tstranex/u2f v1.0.0/go.mod h1:eahSLaqAS0zsIEv80+vXT7WanXs7MQQDg3j3wGBSayo=
|
||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
|
|
@ -809,8 +736,6 @@ gitlab.com/gitlab-org/api/client-go v0.127.0/go.mod h1:bYC6fPORKSmtuPRyD9Z2rtbAj
|
|||
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
||||
go.etcd.io/bbolt v1.4.0 h1:TU77id3TnN/zKr7CO/uk+fBCwF2jGcMuw2B/FMAzYIk=
|
||||
go.etcd.io/bbolt v1.4.0/go.mod h1:AsD+OCi/qPN1giOX1aiLAha3o1U8rAz65bvN4j0sRuk=
|
||||
go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ=
|
||||
go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
||||
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
|
||||
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
||||
|
|
|
|||
|
|
@ -166,6 +166,17 @@ func (run *ActionRun) GetPullRequestEventPayload() (*api.PullRequestPayload, err
|
|||
return nil, fmt.Errorf("event %s is not a pull request event", run.Event)
|
||||
}
|
||||
|
||||
func (run *ActionRun) GetWorkflowRunEventPayload() (*api.WorkflowRunPayload, error) {
|
||||
if run.Event == webhook_module.HookEventWorkflowRun {
|
||||
var payload api.WorkflowRunPayload
|
||||
if err := json.Unmarshal([]byte(run.EventPayload), &payload); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &payload, nil
|
||||
}
|
||||
return nil, fmt.Errorf("event %s is not a workflow run event", run.Event)
|
||||
}
|
||||
|
||||
func (run *ActionRun) IsSchedule() bool {
|
||||
return run.ScheduleID > 0
|
||||
}
|
||||
|
|
|
|||
|
|
@ -80,22 +80,31 @@ type FindRunJobOptions struct {
|
|||
func (opts FindRunJobOptions) ToConds() builder.Cond {
|
||||
cond := builder.NewCond()
|
||||
if opts.RunID > 0 {
|
||||
cond = cond.And(builder.Eq{"run_id": opts.RunID})
|
||||
cond = cond.And(builder.Eq{"`action_run_job`.run_id": opts.RunID})
|
||||
}
|
||||
if opts.RepoID > 0 {
|
||||
cond = cond.And(builder.Eq{"repo_id": opts.RepoID})
|
||||
}
|
||||
if opts.OwnerID > 0 {
|
||||
cond = cond.And(builder.Eq{"owner_id": opts.OwnerID})
|
||||
cond = cond.And(builder.Eq{"`action_run_job`.repo_id": opts.RepoID})
|
||||
}
|
||||
if opts.CommitSHA != "" {
|
||||
cond = cond.And(builder.Eq{"commit_sha": opts.CommitSHA})
|
||||
cond = cond.And(builder.Eq{"`action_run_job`.commit_sha": opts.CommitSHA})
|
||||
}
|
||||
if len(opts.Statuses) > 0 {
|
||||
cond = cond.And(builder.In("status", opts.Statuses))
|
||||
cond = cond.And(builder.In("`action_run_job`.status", opts.Statuses))
|
||||
}
|
||||
if opts.UpdatedBefore > 0 {
|
||||
cond = cond.And(builder.Lt{"updated": opts.UpdatedBefore})
|
||||
cond = cond.And(builder.Lt{"`action_run_job`.updated": opts.UpdatedBefore})
|
||||
}
|
||||
return cond
|
||||
}
|
||||
|
||||
func (opts FindRunJobOptions) ToJoins() []db.JoinFunc {
|
||||
if opts.OwnerID > 0 {
|
||||
return []db.JoinFunc{
|
||||
func(sess db.Engine) error {
|
||||
sess.Join("INNER", "repository", "repository.id = repo_id AND repository.owner_id = ?", opts.OwnerID)
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -72,39 +72,50 @@ type FindRunOptions struct {
|
|||
TriggerEvent webhook_module.HookEventType
|
||||
Approved bool // not util.OptionalBool, it works only when it's true
|
||||
Status []Status
|
||||
CommitSHA string
|
||||
}
|
||||
|
||||
func (opts FindRunOptions) ToConds() builder.Cond {
|
||||
cond := builder.NewCond()
|
||||
if opts.RepoID > 0 {
|
||||
cond = cond.And(builder.Eq{"repo_id": opts.RepoID})
|
||||
}
|
||||
if opts.OwnerID > 0 {
|
||||
cond = cond.And(builder.Eq{"owner_id": opts.OwnerID})
|
||||
cond = cond.And(builder.Eq{"`action_run`.repo_id": opts.RepoID})
|
||||
}
|
||||
if opts.WorkflowID != "" {
|
||||
cond = cond.And(builder.Eq{"workflow_id": opts.WorkflowID})
|
||||
cond = cond.And(builder.Eq{"`action_run`.workflow_id": opts.WorkflowID})
|
||||
}
|
||||
if opts.TriggerUserID > 0 {
|
||||
cond = cond.And(builder.Eq{"trigger_user_id": opts.TriggerUserID})
|
||||
cond = cond.And(builder.Eq{"`action_run`.trigger_user_id": opts.TriggerUserID})
|
||||
}
|
||||
if opts.Approved {
|
||||
cond = cond.And(builder.Gt{"approved_by": 0})
|
||||
cond = cond.And(builder.Gt{"`action_run`.approved_by": 0})
|
||||
}
|
||||
if len(opts.Status) > 0 {
|
||||
cond = cond.And(builder.In("status", opts.Status))
|
||||
cond = cond.And(builder.In("`action_run`.status", opts.Status))
|
||||
}
|
||||
if opts.Ref != "" {
|
||||
cond = cond.And(builder.Eq{"ref": opts.Ref})
|
||||
cond = cond.And(builder.Eq{"`action_run`.ref": opts.Ref})
|
||||
}
|
||||
if opts.TriggerEvent != "" {
|
||||
cond = cond.And(builder.Eq{"trigger_event": opts.TriggerEvent})
|
||||
cond = cond.And(builder.Eq{"`action_run`.trigger_event": opts.TriggerEvent})
|
||||
}
|
||||
if opts.CommitSHA != "" {
|
||||
cond = cond.And(builder.Eq{"`action_run`.commit_sha": opts.CommitSHA})
|
||||
}
|
||||
return cond
|
||||
}
|
||||
|
||||
func (opts FindRunOptions) ToJoins() []db.JoinFunc {
|
||||
if opts.OwnerID > 0 {
|
||||
return []db.JoinFunc{func(sess db.Engine) error {
|
||||
sess.Join("INNER", "repository", "repository.id = repo_id AND repository.owner_id = ?", opts.OwnerID)
|
||||
return nil
|
||||
}}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (opts FindRunOptions) ToOrders() string {
|
||||
return "`id` DESC"
|
||||
return "`action_run`.`id` DESC"
|
||||
}
|
||||
|
||||
type StatusInfo struct {
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@
|
|||
package actions
|
||||
|
||||
import (
|
||||
"slices"
|
||||
|
||||
"code.gitea.io/gitea/modules/translation"
|
||||
|
||||
runnerv1 "code.gitea.io/actions-proto-go/runner/v1"
|
||||
|
|
@ -88,12 +90,7 @@ func (s Status) IsBlocked() bool {
|
|||
|
||||
// In returns whether s is one of the given statuses
|
||||
func (s Status) In(statuses ...Status) bool {
|
||||
for _, v := range statuses {
|
||||
if s == v {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(statuses, s)
|
||||
}
|
||||
|
||||
func (s Status) AsResult() runnerv1.Result {
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import (
|
|||
"fmt"
|
||||
"net/url"
|
||||
"path"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
|
@ -125,12 +126,7 @@ func (at ActionType) String() string {
|
|||
}
|
||||
|
||||
func (at ActionType) InActions(actions ...string) bool {
|
||||
for _, action := range actions {
|
||||
if action == at.String() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(actions, at.String())
|
||||
}
|
||||
|
||||
// Action represents user operation type and other information to
|
||||
|
|
@ -191,7 +187,7 @@ func (a *Action) LoadActUser(ctx context.Context) {
|
|||
return
|
||||
}
|
||||
var err error
|
||||
a.ActUser, err = user_model.GetUserByID(ctx, a.ActUserID)
|
||||
a.ActUser, err = user_model.GetPossibleUserByID(ctx, a.ActUserID)
|
||||
if err == nil {
|
||||
return
|
||||
} else if user_model.IsErrUserNotExist(err) {
|
||||
|
|
|
|||
|
|
@ -208,10 +208,7 @@ func (nl NotificationList) LoadRepos(ctx context.Context) (repo_model.Repository
|
|||
repos := make(map[int64]*repo_model.Repository, len(repoIDs))
|
||||
left := len(repoIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("id", repoIDs[:limit]).
|
||||
Rows(new(repo_model.Repository))
|
||||
|
|
@ -282,10 +279,7 @@ func (nl NotificationList) LoadIssues(ctx context.Context) ([]int, error) {
|
|||
issues := make(map[int64]*issues_model.Issue, len(issueIDs))
|
||||
left := len(issueIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("id", issueIDs[:limit]).
|
||||
Rows(new(issues_model.Issue))
|
||||
|
|
@ -377,10 +371,7 @@ func (nl NotificationList) LoadUsers(ctx context.Context) ([]int, error) {
|
|||
users := make(map[int64]*user_model.User, len(userIDs))
|
||||
left := len(userIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("id", userIDs[:limit]).
|
||||
Rows(new(user_model.User))
|
||||
|
|
@ -428,10 +419,7 @@ func (nl NotificationList) LoadComments(ctx context.Context) ([]int, error) {
|
|||
comments := make(map[int64]*issues_model.Comment, len(commentIDs))
|
||||
left := len(commentIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("id", commentIDs[:limit]).
|
||||
Rows(new(issues_model.Comment))
|
||||
|
|
|
|||
|
|
@ -139,10 +139,7 @@ func GetActivityStatsTopAuthors(ctx context.Context, repo *repo_model.Repository
|
|||
return v[i].Commits > v[j].Commits
|
||||
})
|
||||
|
||||
cnt := count
|
||||
if cnt > len(v) {
|
||||
cnt = len(v)
|
||||
}
|
||||
cnt := min(count, len(v))
|
||||
|
||||
return v[:cnt], nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -213,12 +213,7 @@ func GetRequiredScopes(level AccessTokenScopeLevel, scopeCategories ...AccessTok
|
|||
|
||||
// ContainsCategory checks if a list of categories contains a specific category
|
||||
func ContainsCategory(categories []AccessTokenScopeCategory, category AccessTokenScopeCategory) bool {
|
||||
for _, c := range categories {
|
||||
if c == category {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(categories, category)
|
||||
}
|
||||
|
||||
// GetScopeLevelFromAccessMode converts permission access mode to scope level
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import (
|
|||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
|
|
@ -511,12 +512,7 @@ func (grant *OAuth2Grant) IncreaseCounter(ctx context.Context) error {
|
|||
|
||||
// ScopeContains returns true if the grant scope contains the specified scope
|
||||
func (grant *OAuth2Grant) ScopeContains(scope string) bool {
|
||||
for _, currentScope := range strings.Split(grant.Scope, " ") {
|
||||
if scope == currentScope {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(strings.Split(grant.Scope, " "), scope)
|
||||
}
|
||||
|
||||
// SetNonce updates the current nonce value of a grant
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ func contextSafetyCheck(e Engine) {
|
|||
_ = e.SQL("SELECT 1").Iterate(&m{}, func(int, any) error {
|
||||
callers := make([]uintptr, 32)
|
||||
callerNum := runtime.Callers(1, callers)
|
||||
for i := 0; i < callerNum; i++ {
|
||||
for i := range callerNum {
|
||||
if funcName := runtime.FuncForPC(callers[i]).Name(); funcName == "xorm.io/xorm.(*Session).Iterate" {
|
||||
contextSafetyDeniedFuncPCs = append(contextSafetyDeniedFuncPCs, callers[i])
|
||||
}
|
||||
|
|
@ -82,7 +82,7 @@ func contextSafetyCheck(e Engine) {
|
|||
// it should be very fast: xxxx ns/op
|
||||
callers := make([]uintptr, 32)
|
||||
callerNum := runtime.Callers(3, callers) // skip 3: runtime.Callers, contextSafetyCheck, GetEngine
|
||||
for i := 0; i < callerNum; i++ {
|
||||
for i := range callerNum {
|
||||
if slices.Contains(contextSafetyDeniedFuncPCs, callers[i]) {
|
||||
panic(errors.New("using database context in an iterator would cause corrupted results"))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ package db
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
|
|
@ -80,10 +81,8 @@ func IsUsableName(reservedNames, reservedPatterns []string, name string) error {
|
|||
return util.NewInvalidArgumentErrorf("name is empty")
|
||||
}
|
||||
|
||||
for i := range reservedNames {
|
||||
if name == reservedNames[i] {
|
||||
return ErrNameReserved{name}
|
||||
}
|
||||
if slices.Contains(reservedNames, name) {
|
||||
return ErrNameReserved{name}
|
||||
}
|
||||
|
||||
for _, pat := range reservedPatterns {
|
||||
|
|
|
|||
|
|
@ -46,10 +46,7 @@ func (f *file) readAt(fileMeta *dbfsMeta, offset int64, p []byte) (n int, err er
|
|||
blobPos := int(offset % f.blockSize)
|
||||
blobOffset := offset - int64(blobPos)
|
||||
blobRemaining := int(f.blockSize) - blobPos
|
||||
needRead := len(p)
|
||||
if needRead > blobRemaining {
|
||||
needRead = blobRemaining
|
||||
}
|
||||
needRead := min(len(p), blobRemaining)
|
||||
if blobOffset+int64(blobPos)+int64(needRead) > fileMeta.FileSize {
|
||||
needRead = int(fileMeta.FileSize - blobOffset - int64(blobPos))
|
||||
}
|
||||
|
|
@ -66,14 +63,8 @@ func (f *file) readAt(fileMeta *dbfsMeta, offset int64, p []byte) (n int, err er
|
|||
blobData = nil
|
||||
}
|
||||
|
||||
canCopy := len(blobData) - blobPos
|
||||
if canCopy <= 0 {
|
||||
canCopy = 0
|
||||
}
|
||||
realRead := needRead
|
||||
if realRead > canCopy {
|
||||
realRead = canCopy
|
||||
}
|
||||
canCopy := max(len(blobData)-blobPos, 0)
|
||||
realRead := min(needRead, canCopy)
|
||||
if realRead > 0 {
|
||||
copy(p[:realRead], fileData.BlobData[blobPos:blobPos+realRead])
|
||||
}
|
||||
|
|
@ -113,10 +104,7 @@ func (f *file) Write(p []byte) (n int, err error) {
|
|||
blobPos := int(f.offset % f.blockSize)
|
||||
blobOffset := f.offset - int64(blobPos)
|
||||
blobRemaining := int(f.blockSize) - blobPos
|
||||
needWrite := len(p)
|
||||
if needWrite > blobRemaining {
|
||||
needWrite = blobRemaining
|
||||
}
|
||||
needWrite := min(len(p), blobRemaining)
|
||||
buf := make([]byte, f.blockSize)
|
||||
readBytes, err := f.readAt(fileMeta, blobOffset, buf)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@
|
|||
ref: "refs/heads/master"
|
||||
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
|
||||
event: "push"
|
||||
trigger_event: "push"
|
||||
is_fork_pull_request: 0
|
||||
status: 1
|
||||
started: 1683636528
|
||||
|
|
@ -28,6 +29,7 @@
|
|||
ref: "refs/heads/master"
|
||||
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
|
||||
event: "push"
|
||||
trigger_event: "push"
|
||||
is_fork_pull_request: 0
|
||||
status: 1
|
||||
started: 1683636528
|
||||
|
|
@ -47,6 +49,7 @@
|
|||
ref: "refs/heads/master"
|
||||
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
|
||||
event: "push"
|
||||
trigger_event: "push"
|
||||
is_fork_pull_request: 0
|
||||
status: 6 # running
|
||||
started: 1683636528
|
||||
|
|
@ -66,6 +69,47 @@
|
|||
ref: "refs/heads/test"
|
||||
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
|
||||
event: "push"
|
||||
trigger_event: "push"
|
||||
is_fork_pull_request: 0
|
||||
status: 1
|
||||
started: 1683636528
|
||||
stopped: 1683636626
|
||||
created: 1683636108
|
||||
updated: 1683636626
|
||||
need_approval: 0
|
||||
approved_by: 0
|
||||
-
|
||||
id: 802
|
||||
title: "workflow run list"
|
||||
repo_id: 5
|
||||
owner_id: 3
|
||||
workflow_id: "test.yaml"
|
||||
index: 191
|
||||
trigger_user_id: 1
|
||||
ref: "refs/heads/test"
|
||||
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
|
||||
event: "push"
|
||||
trigger_event: "push"
|
||||
is_fork_pull_request: 0
|
||||
status: 1
|
||||
started: 1683636528
|
||||
stopped: 1683636626
|
||||
created: 1683636108
|
||||
updated: 1683636626
|
||||
need_approval: 0
|
||||
approved_by: 0
|
||||
-
|
||||
id: 803
|
||||
title: "workflow run list for user"
|
||||
repo_id: 2
|
||||
owner_id: 0
|
||||
workflow_id: "test.yaml"
|
||||
index: 192
|
||||
trigger_user_id: 1
|
||||
ref: "refs/heads/test"
|
||||
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
|
||||
event: "push"
|
||||
trigger_event: "push"
|
||||
is_fork_pull_request: 0
|
||||
status: 1
|
||||
started: 1683636528
|
||||
|
|
@ -86,6 +130,7 @@
|
|||
ref: "refs/heads/test"
|
||||
commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0"
|
||||
event: "push"
|
||||
trigger_event: "push"
|
||||
is_fork_pull_request: 0
|
||||
status: 2
|
||||
started: 1683636528
|
||||
|
|
|
|||
|
|
@ -99,3 +99,33 @@
|
|||
status: 2
|
||||
started: 1683636528
|
||||
stopped: 1683636626
|
||||
-
|
||||
id: 203
|
||||
run_id: 802
|
||||
repo_id: 5
|
||||
owner_id: 0
|
||||
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
|
||||
is_fork_pull_request: 0
|
||||
name: job2
|
||||
attempt: 1
|
||||
job_id: job2
|
||||
needs: '["job1"]'
|
||||
task_id: 51
|
||||
status: 5
|
||||
started: 1683636528
|
||||
stopped: 1683636626
|
||||
-
|
||||
id: 204
|
||||
run_id: 803
|
||||
repo_id: 2
|
||||
owner_id: 0
|
||||
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
|
||||
is_fork_pull_request: 0
|
||||
name: job2
|
||||
attempt: 1
|
||||
job_id: job2
|
||||
needs: '["job1"]'
|
||||
task_id: 51
|
||||
status: 5
|
||||
started: 1683636528
|
||||
stopped: 1683636626
|
||||
|
|
|
|||
|
|
@ -201,3 +201,15 @@
|
|||
is_deleted: false
|
||||
deleted_by_id: 0
|
||||
deleted_unix: 0
|
||||
|
||||
-
|
||||
id: 25
|
||||
repo_id: 54
|
||||
name: 'master'
|
||||
commit_id: '73cf03db6ece34e12bf91e8853dc58f678f2f82d'
|
||||
commit_message: 'Initial commit'
|
||||
commit_time: 1671663402
|
||||
pusher_id: 2
|
||||
is_deleted: false
|
||||
deleted_by_id: 0
|
||||
deleted_unix: 0
|
||||
|
|
|
|||
|
|
@ -112,7 +112,6 @@ type LFSMetaObject struct {
|
|||
ID int64 `xorm:"pk autoincr"`
|
||||
lfs.Pointer `xorm:"extends"`
|
||||
RepositoryID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"`
|
||||
Existing bool `xorm:"-"`
|
||||
CreatedUnix timeutil.TimeStamp `xorm:"created"`
|
||||
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
|
||||
}
|
||||
|
|
@ -146,7 +145,6 @@ func NewLFSMetaObject(ctx context.Context, repoID int64, p lfs.Pointer) (*LFSMet
|
|||
if err != nil {
|
||||
return nil, err
|
||||
} else if exist {
|
||||
m.Existing = true
|
||||
return m, committer.Commit()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -246,7 +246,7 @@ func (protectBranch *ProtectedBranch) GetUnprotectedFilePatterns() []glob.Glob {
|
|||
|
||||
func getFilePatterns(filePatterns string) []glob.Glob {
|
||||
extarr := make([]glob.Glob, 0, 10)
|
||||
for _, expr := range strings.Split(strings.ToLower(filePatterns), ";") {
|
||||
for expr := range strings.SplitSeq(strings.ToLower(filePatterns), ";") {
|
||||
expr = strings.TrimSpace(expr)
|
||||
if expr != "" {
|
||||
if g, err := glob.Compile(expr, '.', '/'); err != nil {
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import (
|
|||
"context"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"slices"
|
||||
"strconv"
|
||||
"unicode/utf8"
|
||||
|
||||
|
|
@ -196,12 +197,7 @@ func (t CommentType) HasMailReplySupport() bool {
|
|||
}
|
||||
|
||||
func (t CommentType) CountedAsConversation() bool {
|
||||
for _, ct := range ConversationCountedCommentType() {
|
||||
if t == ct {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(ConversationCountedCommentType(), t)
|
||||
}
|
||||
|
||||
// ConversationCountedCommentType returns the comment types that are counted as a conversation
|
||||
|
|
@ -614,7 +610,7 @@ func UpdateCommentAttachments(ctx context.Context, c *Comment, uuids []string) e
|
|||
if err != nil {
|
||||
return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", uuids, err)
|
||||
}
|
||||
for i := 0; i < len(attachments); i++ {
|
||||
for i := range attachments {
|
||||
attachments[i].IssueID = c.IssueID
|
||||
attachments[i].CommentID = c.ID
|
||||
if err := repo_model.UpdateAttachment(ctx, attachments[i]); err != nil {
|
||||
|
|
|
|||
|
|
@ -57,10 +57,7 @@ func (comments CommentList) loadLabels(ctx context.Context) error {
|
|||
commentLabels := make(map[int64]*Label, len(labelIDs))
|
||||
left := len(labelIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("id", labelIDs[:limit]).
|
||||
Rows(new(Label))
|
||||
|
|
@ -107,10 +104,7 @@ func (comments CommentList) loadMilestones(ctx context.Context) error {
|
|||
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
|
||||
left := len(milestoneIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
err := db.GetEngine(ctx).
|
||||
In("id", milestoneIDs[:limit]).
|
||||
Find(&milestoneMaps)
|
||||
|
|
@ -146,10 +140,7 @@ func (comments CommentList) loadOldMilestones(ctx context.Context) error {
|
|||
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
|
||||
left := len(milestoneIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
err := db.GetEngine(ctx).
|
||||
In("id", milestoneIDs[:limit]).
|
||||
Find(&milestoneMaps)
|
||||
|
|
@ -184,10 +175,7 @@ func (comments CommentList) loadAssignees(ctx context.Context) error {
|
|||
assignees := make(map[int64]*user_model.User, len(assigneeIDs))
|
||||
left := len(assigneeIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("id", assigneeIDs[:limit]).
|
||||
Rows(new(user_model.User))
|
||||
|
|
@ -256,10 +244,7 @@ func (comments CommentList) LoadIssues(ctx context.Context) error {
|
|||
issues := make(map[int64]*Issue, len(issueIDs))
|
||||
left := len(issueIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("id", issueIDs[:limit]).
|
||||
Rows(new(Issue))
|
||||
|
|
@ -313,10 +298,7 @@ func (comments CommentList) loadDependentIssues(ctx context.Context) error {
|
|||
issues := make(map[int64]*Issue, len(issueIDs))
|
||||
left := len(issueIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := e.
|
||||
In("id", issueIDs[:limit]).
|
||||
Rows(new(Issue))
|
||||
|
|
@ -392,10 +374,7 @@ func (comments CommentList) LoadAttachments(ctx context.Context) (err error) {
|
|||
commentsIDs := comments.getAttachmentCommentIDs()
|
||||
left := len(commentsIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("comment_id", commentsIDs[:limit]).
|
||||
Rows(new(repo_model.Attachment))
|
||||
|
|
|
|||
|
|
@ -42,10 +42,7 @@ func (issues IssueList) LoadRepositories(ctx context.Context) (repo_model.Reposi
|
|||
repoMaps := make(map[int64]*repo_model.Repository, len(repoIDs))
|
||||
left := len(repoIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
err := db.GetEngine(ctx).
|
||||
In("id", repoIDs[:limit]).
|
||||
Find(&repoMaps)
|
||||
|
|
@ -116,10 +113,7 @@ func (issues IssueList) LoadLabels(ctx context.Context) error {
|
|||
issueIDs := issues.getIssueIDs()
|
||||
left := len(issueIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).Table("label").
|
||||
Join("LEFT", "issue_label", "issue_label.label_id = label.id").
|
||||
In("issue_label.issue_id", issueIDs[:limit]).
|
||||
|
|
@ -171,10 +165,7 @@ func (issues IssueList) LoadMilestones(ctx context.Context) error {
|
|||
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
|
||||
left := len(milestoneIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
err := db.GetEngine(ctx).
|
||||
In("id", milestoneIDs[:limit]).
|
||||
Find(&milestoneMaps)
|
||||
|
|
@ -203,10 +194,7 @@ func (issues IssueList) LoadProjects(ctx context.Context) error {
|
|||
}
|
||||
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
|
||||
projects := make([]*projectWithIssueID, 0, limit)
|
||||
err := db.GetEngine(ctx).
|
||||
|
|
@ -245,10 +233,7 @@ func (issues IssueList) LoadAssignees(ctx context.Context) error {
|
|||
issueIDs := issues.getIssueIDs()
|
||||
left := len(issueIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).Table("issue_assignees").
|
||||
Join("INNER", "`user`", "`user`.id = `issue_assignees`.assignee_id").
|
||||
In("`issue_assignees`.issue_id", issueIDs[:limit]).OrderBy(user_model.GetOrderByName()).
|
||||
|
|
@ -306,10 +291,7 @@ func (issues IssueList) LoadPullRequests(ctx context.Context) error {
|
|||
pullRequestMaps := make(map[int64]*PullRequest, len(issuesIDs))
|
||||
left := len(issuesIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("issue_id", issuesIDs[:limit]).
|
||||
Rows(new(PullRequest))
|
||||
|
|
@ -354,10 +336,7 @@ func (issues IssueList) LoadAttachments(ctx context.Context) (err error) {
|
|||
issuesIDs := issues.getIssueIDs()
|
||||
left := len(issuesIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).
|
||||
In("issue_id", issuesIDs[:limit]).
|
||||
Rows(new(repo_model.Attachment))
|
||||
|
|
@ -399,10 +378,7 @@ func (issues IssueList) loadComments(ctx context.Context, cond builder.Cond) (er
|
|||
issuesIDs := issues.getIssueIDs()
|
||||
left := len(issuesIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
rows, err := db.GetEngine(ctx).Table("comment").
|
||||
Join("INNER", "issue", "issue.id = comment.issue_id").
|
||||
In("issue.id", issuesIDs[:limit]).
|
||||
|
|
@ -466,10 +442,7 @@ func (issues IssueList) loadTotalTrackedTimes(ctx context.Context) (err error) {
|
|||
|
||||
left := len(ids)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
|
||||
// select issue_id, sum(time) from tracked_time where issue_id in (<issue ids in current page>) group by issue_id
|
||||
rows, err := db.GetEngine(ctx).Table("tracked_time").
|
||||
|
|
|
|||
|
|
@ -73,8 +73,8 @@ func (o *IssuesOptions) Copy(edit ...func(options *IssuesOptions)) *IssuesOption
|
|||
// sortType string
|
||||
func applySorts(sess *xorm.Session, sortType string, priorityRepoID int64) {
|
||||
// Since this sortType is dynamically created, it has to be treated specially.
|
||||
if strings.HasPrefix(sortType, ScopeSortPrefix) {
|
||||
scope := strings.TrimPrefix(sortType, ScopeSortPrefix)
|
||||
if after, ok := strings.CutPrefix(sortType, ScopeSortPrefix); ok {
|
||||
scope := after
|
||||
sess.Join("LEFT", "issue_label", "issue.id = issue_label.issue_id")
|
||||
// "exclusive_order=0" means "no order is set", so exclude it from the JOIN criteria and then "LEFT JOIN" result is also null
|
||||
sess.Join("LEFT", "label", "label.id = issue_label.label_id AND label.exclusive_order <> 0 AND label.name LIKE ?", scope+"/%")
|
||||
|
|
|
|||
|
|
@ -94,10 +94,7 @@ func GetIssueStats(ctx context.Context, opts *IssuesOptions) (*IssueStats, error
|
|||
// ids in a temporary table and join from them.
|
||||
accum := &IssueStats{}
|
||||
for i := 0; i < len(opts.IssueIDs); {
|
||||
chunk := i + MaxQueryParameters
|
||||
if chunk > len(opts.IssueIDs) {
|
||||
chunk = len(opts.IssueIDs)
|
||||
}
|
||||
chunk := min(i+MaxQueryParameters, len(opts.IssueIDs))
|
||||
stats, err := getIssueStatsChunk(ctx, opts, opts.IssueIDs[i:chunk])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ package issues_test
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"sort"
|
||||
"sync"
|
||||
"testing"
|
||||
|
|
@ -270,7 +271,7 @@ func TestIssue_ResolveMentions(t *testing.T) {
|
|||
for i, user := range resolved {
|
||||
ids[i] = user.ID
|
||||
}
|
||||
sort.Slice(ids, func(i, j int) bool { return ids[i] < ids[j] })
|
||||
slices.Sort(ids)
|
||||
assert.Equal(t, expected, ids)
|
||||
}
|
||||
|
||||
|
|
@ -292,7 +293,7 @@ func TestResourceIndex(t *testing.T) {
|
|||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for i := 0; i < 100; i++ {
|
||||
for i := range 100 {
|
||||
wg.Add(1)
|
||||
go func(i int) {
|
||||
testInsertIssue(t, fmt.Sprintf("issue %d", i+1), "my issue", 0)
|
||||
|
|
@ -314,7 +315,7 @@ func TestCorrectIssueStats(t *testing.T) {
|
|||
issueAmount := issues_model.MaxQueryParameters + 10
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for i := 0; i < issueAmount; i++ {
|
||||
for i := range issueAmount {
|
||||
wg.Add(1)
|
||||
go func(i int) {
|
||||
testInsertIssue(t, fmt.Sprintf("Issue %d", i+1), "Bugs are nasty", 0)
|
||||
|
|
|
|||
|
|
@ -304,7 +304,7 @@ func UpdateIssueAttachments(ctx context.Context, issueID int64, uuids []string)
|
|||
if err != nil {
|
||||
return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", uuids, err)
|
||||
}
|
||||
for i := 0; i < len(attachments); i++ {
|
||||
for i := range attachments {
|
||||
attachments[i].IssueID = issueID
|
||||
if err := repo_model.UpdateAttachment(ctx, attachments[i]); err != nil {
|
||||
return fmt.Errorf("update attachment [id: %d]: %w", attachments[i].ID, err)
|
||||
|
|
|
|||
|
|
@ -649,12 +649,6 @@ func GetAllUnmergedAgitPullRequestByPoster(ctx context.Context, uid int64) ([]*P
|
|||
return pulls, err
|
||||
}
|
||||
|
||||
// Update updates all fields of pull request.
|
||||
func (pr *PullRequest) Update(ctx context.Context) error {
|
||||
_, err := db.GetEngine(ctx).ID(pr.ID).AllCols().Update(pr)
|
||||
return err
|
||||
}
|
||||
|
||||
// UpdateCols updates specific fields of pull request.
|
||||
func (pr *PullRequest) UpdateCols(ctx context.Context, cols ...string) error {
|
||||
_, err := db.GetEngine(ctx).ID(pr.ID).Cols(cols...).Update(pr)
|
||||
|
|
|
|||
|
|
@ -248,19 +248,6 @@ func TestGetPullRequestByIssueID(t *testing.T) {
|
|||
assert.True(t, issues_model.IsErrPullRequestNotExist(err))
|
||||
}
|
||||
|
||||
func TestPullRequest_Update(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
|
||||
pr.BaseBranch = "baseBranch"
|
||||
pr.HeadBranch = "headBranch"
|
||||
pr.Update(db.DefaultContext)
|
||||
|
||||
pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: pr.ID})
|
||||
assert.Equal(t, "baseBranch", pr.BaseBranch)
|
||||
assert.Equal(t, "headBranch", pr.HeadBranch)
|
||||
unittest.CheckConsistencyFor(t, pr)
|
||||
}
|
||||
|
||||
func TestPullRequest_UpdateCols(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
pr := &issues_model.PullRequest{
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ type ReviewList []*Review
|
|||
// LoadReviewers loads reviewers
|
||||
func (reviews ReviewList) LoadReviewers(ctx context.Context) error {
|
||||
reviewerIDs := make([]int64, len(reviews))
|
||||
for i := 0; i < len(reviews); i++ {
|
||||
for i := range reviews {
|
||||
reviewerIDs[i] = reviews[i].ReviewerID
|
||||
}
|
||||
reviewers, err := user_model.GetPossibleUserByIDs(ctx, reviewerIDs)
|
||||
|
|
|
|||
|
|
@ -350,10 +350,7 @@ func GetIssueTotalTrackedTime(ctx context.Context, opts *IssuesOptions, isClosed
|
|||
// we get the statistics in smaller chunks and get accumulates
|
||||
var accum int64
|
||||
for i := 0; i < len(opts.IssueIDs); {
|
||||
chunk := i + MaxQueryParameters
|
||||
if chunk > len(opts.IssueIDs) {
|
||||
chunk = len(opts.IssueIDs)
|
||||
}
|
||||
chunk := min(i+MaxQueryParameters, len(opts.IssueIDs))
|
||||
time, err := getIssueTotalTrackedTimeChunk(ctx, opts, isClosed, opts.IssueIDs[i:chunk])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
|
|
|
|||
|
|
@ -518,7 +518,7 @@ func ModifyColumn(x *xorm.Engine, tableName string, col *schemas.Column) error {
|
|||
|
||||
func removeAllWithRetry(dir string) error {
|
||||
var err error
|
||||
for i := 0; i < 20; i++ {
|
||||
for range 20 {
|
||||
err = os.RemoveAll(dir)
|
||||
if err == nil {
|
||||
break
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ package v1_11 //nolint
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
|
@ -344,10 +345,8 @@ func AddBranchProtectionCanPushAndEnableWhitelist(x *xorm.Engine) error {
|
|||
}
|
||||
return AccessModeWrite <= perm.UnitsMode[UnitTypeCode], nil
|
||||
}
|
||||
for _, id := range protectedBranch.ApprovalsWhitelistUserIDs {
|
||||
if id == reviewer.ID {
|
||||
return true, nil
|
||||
}
|
||||
if slices.Contains(protectedBranch.ApprovalsWhitelistUserIDs, reviewer.ID) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// isUserInTeams
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ func copyOldAvatarToNewLocation(userID int64, oldAvatar string) (string, error)
|
|||
return "", fmt.Errorf("io.ReadAll: %w", err)
|
||||
}
|
||||
|
||||
newAvatar := fmt.Sprintf("%x", md5.Sum([]byte(fmt.Sprintf("%d-%x", userID, md5.Sum(data)))))
|
||||
newAvatar := fmt.Sprintf("%x", md5.Sum(fmt.Appendf(nil, "%d-%x", userID, md5.Sum(data))))
|
||||
if newAvatar == oldAvatar {
|
||||
return newAvatar, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -329,7 +329,7 @@ func ConvertScopedAccessTokens(x *xorm.Engine) error {
|
|||
for _, token := range tokens {
|
||||
var scopes []string
|
||||
allNewScopesMap := make(map[AccessTokenScope]bool)
|
||||
for _, oldScope := range strings.Split(token.Scope, ",") {
|
||||
for oldScope := range strings.SplitSeq(token.Scope, ",") {
|
||||
if newScopes, exists := accessTokenScopeMap[OldAccessTokenScope(oldScope)]; exists {
|
||||
for _, newScope := range newScopes {
|
||||
allNewScopesMap[newScope] = true
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@
|
|||
package v1_22 //nolint
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/migrations/base"
|
||||
|
|
@ -44,7 +43,7 @@ func Test_AddUniqueIndexForProjectIssue(t *testing.T) {
|
|||
for _, index := range tables[0].Indexes {
|
||||
if index.Type == schemas.UniqueType {
|
||||
found = true
|
||||
slices.Equal(index.Cols, []string{"project_id", "issue_id"})
|
||||
assert.ElementsMatch(t, index.Cols, []string{"project_id", "issue_id"})
|
||||
break
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ type BlobSearchOptions struct {
|
|||
Digest string
|
||||
Tag string
|
||||
IsManifest bool
|
||||
OnlyLead bool
|
||||
Repository string
|
||||
}
|
||||
|
||||
|
|
@ -43,7 +44,10 @@ func (opts *BlobSearchOptions) toConds() builder.Cond {
|
|||
cond = cond.And(builder.Eq{"package_version.lower_version": strings.ToLower(opts.Tag)})
|
||||
}
|
||||
if opts.IsManifest {
|
||||
cond = cond.And(builder.Eq{"package_file.lower_name": ManifestFilename})
|
||||
cond = cond.And(builder.Eq{"package_file.lower_name": container_module.ManifestFilename})
|
||||
}
|
||||
if opts.OnlyLead {
|
||||
cond = cond.And(builder.Eq{"package_file.is_lead": true})
|
||||
}
|
||||
if opts.Digest != "" {
|
||||
var propsCond builder.Cond = builder.Eq{
|
||||
|
|
@ -73,11 +77,9 @@ func GetContainerBlob(ctx context.Context, opts *BlobSearchOptions) (*packages.P
|
|||
pfds, err := getContainerBlobsLimit(ctx, opts, 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(pfds) != 1 {
|
||||
} else if len(pfds) == 0 {
|
||||
return nil, ErrContainerBlobNotExist
|
||||
}
|
||||
|
||||
return pfds[0], nil
|
||||
}
|
||||
|
||||
|
|
@ -233,7 +235,7 @@ func SearchImageTags(ctx context.Context, opts *ImageTagsSearchOptions) ([]*pack
|
|||
func SearchExpiredUploadedBlobs(ctx context.Context, olderThan time.Duration) ([]*packages.PackageFile, error) {
|
||||
var cond builder.Cond = builder.Eq{
|
||||
"package_version.is_internal": true,
|
||||
"package_version.lower_version": UploadVersion,
|
||||
"package_version.lower_version": container_module.UploadVersion,
|
||||
"package.type": packages.TypeContainer,
|
||||
}
|
||||
cond = cond.And(builder.Lt{"package_file.created_unix": time.Now().Add(-olderThan).Unix()})
|
||||
|
|
|
|||
|
|
@ -103,10 +103,10 @@ func (pd *PackageDescriptor) CalculateBlobSize() int64 {
|
|||
|
||||
// GetPackageDescriptor gets the package description for a version
|
||||
func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDescriptor, error) {
|
||||
return getPackageDescriptor(ctx, pv, cache.NewEphemeralCache())
|
||||
return GetPackageDescriptorWithCache(ctx, pv, cache.NewEphemeralCache())
|
||||
}
|
||||
|
||||
func getPackageDescriptor(ctx context.Context, pv *PackageVersion, c *cache.EphemeralCache) (*PackageDescriptor, error) {
|
||||
func GetPackageDescriptorWithCache(ctx context.Context, pv *PackageVersion, c *cache.EphemeralCache) (*PackageDescriptor, error) {
|
||||
p, err := cache.GetWithEphemeralCache(ctx, c, "package", pv.PackageID, GetPackageByID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -270,7 +270,7 @@ func GetPackageDescriptors(ctx context.Context, pvs []*PackageVersion) ([]*Packa
|
|||
func getPackageDescriptors(ctx context.Context, pvs []*PackageVersion, c *cache.EphemeralCache) ([]*PackageDescriptor, error) {
|
||||
pds := make([]*PackageDescriptor, 0, len(pvs))
|
||||
for _, pv := range pvs {
|
||||
pd, err := getPackageDescriptor(ctx, pv, c)
|
||||
pd, err := GetPackageDescriptorWithCache(ctx, pv, c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ func SearchVersions(ctx context.Context, opts *packages_model.PackageSearchOptio
|
|||
Where(cond).
|
||||
OrderBy("package.name ASC")
|
||||
if opts.Paginator != nil {
|
||||
skip, take := opts.GetSkipTake()
|
||||
skip, take := opts.Paginator.GetSkipTake()
|
||||
inner = inner.Limit(take, skip)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -115,6 +115,11 @@ func DeleteFileByID(ctx context.Context, fileID int64) error {
|
|||
return err
|
||||
}
|
||||
|
||||
func UpdateFile(ctx context.Context, pf *PackageFile, cols []string) error {
|
||||
_, err := db.GetEngine(ctx).ID(pf.ID).Cols(cols...).Update(pf)
|
||||
return err
|
||||
}
|
||||
|
||||
// PackageFileSearchOptions are options for SearchXXX methods
|
||||
type PackageFileSearchOptions struct {
|
||||
OwnerID int64
|
||||
|
|
|
|||
|
|
@ -66,6 +66,20 @@ func UpdateProperty(ctx context.Context, pp *PackageProperty) error {
|
|||
return err
|
||||
}
|
||||
|
||||
func InsertOrUpdateProperty(ctx context.Context, refType PropertyType, refID int64, name, value string) error {
|
||||
pp := PackageProperty{RefType: refType, RefID: refID, Name: name}
|
||||
ok, err := db.GetEngine(ctx).Get(&pp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ok {
|
||||
_, err = db.GetEngine(ctx).Where("ref_type=? AND ref_id=? AND name=?", refType, refID, name).Cols("value").Update(&PackageProperty{Value: value})
|
||||
return err
|
||||
}
|
||||
_, err = InsertProperty(ctx, refType, refID, name, value)
|
||||
return err
|
||||
}
|
||||
|
||||
// DeleteAllProperties deletes all properties of a ref
|
||||
func DeleteAllProperties(ctx context.Context, refType PropertyType, refID int64) error {
|
||||
_, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).Delete(&PackageProperty{})
|
||||
|
|
@ -78,8 +92,8 @@ func DeletePropertyByID(ctx context.Context, propertyID int64) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// DeletePropertyByName deletes properties by name
|
||||
func DeletePropertyByName(ctx context.Context, refType PropertyType, refID int64, name string) error {
|
||||
// DeletePropertiesByName deletes properties by name
|
||||
func DeletePropertiesByName(ctx context.Context, refType PropertyType, refID int64, name string) error {
|
||||
_, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ? AND name = ?", refType, refID, name).Delete(&PackageProperty{})
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import (
|
|||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"xorm.io/builder"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
// ErrDuplicatePackageVersion indicates a duplicated package version error
|
||||
|
|
@ -187,7 +188,7 @@ type PackageSearchOptions struct {
|
|||
HasFileWithName string // only results are found which are associated with a file with the specific name
|
||||
HasFiles optional.Option[bool] // only results are found which have associated files
|
||||
Sort VersionSort
|
||||
db.Paginator
|
||||
Paginator db.Paginator
|
||||
}
|
||||
|
||||
func (opts *PackageSearchOptions) ToConds() builder.Cond {
|
||||
|
|
@ -282,6 +283,18 @@ func (opts *PackageSearchOptions) configureOrderBy(e db.Engine) {
|
|||
e.Desc("package_version.id") // Sort by id for stable order with duplicates in the other field
|
||||
}
|
||||
|
||||
func searchVersionsBySession(sess *xorm.Session, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) {
|
||||
opts.configureOrderBy(sess)
|
||||
pvs := make([]*PackageVersion, 0, 10)
|
||||
if opts.Paginator != nil {
|
||||
sess = db.SetSessionPagination(sess, opts.Paginator)
|
||||
count, err := sess.FindAndCount(&pvs)
|
||||
return pvs, count, err
|
||||
}
|
||||
err := sess.Find(&pvs)
|
||||
return pvs, int64(len(pvs)), err
|
||||
}
|
||||
|
||||
// SearchVersions gets all versions of packages matching the search options
|
||||
func SearchVersions(ctx context.Context, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) {
|
||||
sess := db.GetEngine(ctx).
|
||||
|
|
@ -289,16 +302,7 @@ func SearchVersions(ctx context.Context, opts *PackageSearchOptions) ([]*Package
|
|||
Table("package_version").
|
||||
Join("INNER", "package", "package.id = package_version.package_id").
|
||||
Where(opts.ToConds())
|
||||
|
||||
opts.configureOrderBy(sess)
|
||||
|
||||
if opts.Paginator != nil {
|
||||
sess = db.SetSessionPagination(sess, opts)
|
||||
}
|
||||
|
||||
pvs := make([]*PackageVersion, 0, 10)
|
||||
count, err := sess.FindAndCount(&pvs)
|
||||
return pvs, count, err
|
||||
return searchVersionsBySession(sess, opts)
|
||||
}
|
||||
|
||||
// SearchLatestVersions gets the latest version of every package matching the search options
|
||||
|
|
@ -316,15 +320,7 @@ func SearchLatestVersions(ctx context.Context, opts *PackageSearchOptions) ([]*P
|
|||
Join("INNER", "package", "package.id = package_version.package_id").
|
||||
Where(builder.In("package_version.id", in))
|
||||
|
||||
opts.configureOrderBy(sess)
|
||||
|
||||
if opts.Paginator != nil {
|
||||
sess = db.SetSessionPagination(sess, opts)
|
||||
}
|
||||
|
||||
pvs := make([]*PackageVersion, 0, 10)
|
||||
count, err := sess.FindAndCount(&pvs)
|
||||
return pvs, count, err
|
||||
return searchVersionsBySession(sess, opts)
|
||||
}
|
||||
|
||||
// ExistVersion checks if a version matching the search options exist
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ func Test_NewColumn(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
assert.Len(t, columns, 3)
|
||||
|
||||
for i := 0; i < maxProjectColumns-3; i++ {
|
||||
for i := range maxProjectColumns - 3 {
|
||||
err := NewColumn(db.DefaultContext, &Column{
|
||||
Title: fmt.Sprintf("column-%d", i+4),
|
||||
ProjectID: project1.ID,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ package pull
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"maps"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
|
@ -100,9 +101,7 @@ func mergeFiles(oldFiles, newFiles map[string]ViewedState) map[string]ViewedStat
|
|||
return oldFiles
|
||||
}
|
||||
|
||||
for file, viewed := range newFiles {
|
||||
oldFiles[file] = viewed
|
||||
}
|
||||
maps.Copy(oldFiles, newFiles)
|
||||
return oldFiles
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@ func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs
|
|||
}
|
||||
attachments[i].ReleaseID = releaseID
|
||||
// No assign value could be 0, so ignore AllCols().
|
||||
if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Update(attachments[i]); err != nil {
|
||||
if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Cols("release_id").Update(attachments[i]); err != nil {
|
||||
return fmt.Errorf("update attachment [%d]: %w", attachments[i].ID, err)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -653,7 +653,7 @@ func (repo *Repository) AllowsPulls(ctx context.Context) bool {
|
|||
|
||||
// CanEnableEditor returns true if repository meets the requirements of web editor.
|
||||
func (repo *Repository) CanEnableEditor() bool {
|
||||
return !repo.IsMirror
|
||||
return !repo.IsMirror && !repo.IsArchived
|
||||
}
|
||||
|
||||
// DescriptionHTML does special handles to description and return HTML string.
|
||||
|
|
|
|||
|
|
@ -449,7 +449,7 @@ func SearchRepositoryCondition(opts SearchRepoOptions) builder.Cond {
|
|||
if opts.Keyword != "" {
|
||||
// separate keyword
|
||||
subQueryCond := builder.NewCond()
|
||||
for _, v := range strings.Split(opts.Keyword, ",") {
|
||||
for v := range strings.SplitSeq(opts.Keyword, ",") {
|
||||
if opts.TopicOnly {
|
||||
subQueryCond = subQueryCond.Or(builder.Eq{"topic.name": strings.ToLower(v)})
|
||||
} else {
|
||||
|
|
@ -464,7 +464,7 @@ func SearchRepositoryCondition(opts SearchRepoOptions) builder.Cond {
|
|||
keywordCond := builder.In("id", subQuery)
|
||||
if !opts.TopicOnly {
|
||||
likes := builder.NewCond()
|
||||
for _, v := range strings.Split(opts.Keyword, ",") {
|
||||
for v := range strings.SplitSeq(opts.Keyword, ",") {
|
||||
likes = likes.Or(builder.Like{"lower_name", strings.ToLower(v)})
|
||||
|
||||
// If the string looks like "org/repo", match against that pattern too
|
||||
|
|
|
|||
|
|
@ -185,10 +185,8 @@ func (cfg *ActionsConfig) IsWorkflowDisabled(file string) bool {
|
|||
}
|
||||
|
||||
func (cfg *ActionsConfig) DisableWorkflow(file string) {
|
||||
for _, workflow := range cfg.DisabledWorkflows {
|
||||
if file == workflow {
|
||||
return
|
||||
}
|
||||
if slices.Contains(cfg.DisabledWorkflows, file) {
|
||||
return
|
||||
}
|
||||
|
||||
cfg.DisabledWorkflows = append(cfg.DisabledWorkflows, file)
|
||||
|
|
|
|||
|
|
@ -42,12 +42,18 @@ func UpdateRepositoryUpdatedTime(ctx context.Context, repoID int64, updateTime t
|
|||
|
||||
// UpdateRepositoryColsWithAutoTime updates repository's columns
|
||||
func UpdateRepositoryColsWithAutoTime(ctx context.Context, repo *Repository, cols ...string) error {
|
||||
if len(cols) == 0 {
|
||||
return nil
|
||||
}
|
||||
_, err := db.GetEngine(ctx).ID(repo.ID).Cols(cols...).Update(repo)
|
||||
return err
|
||||
}
|
||||
|
||||
// UpdateRepositoryColsNoAutoTime updates repository's columns and but applies time change automatically
|
||||
func UpdateRepositoryColsNoAutoTime(ctx context.Context, repo *Repository, cols ...string) error {
|
||||
if len(cols) == 0 {
|
||||
return nil
|
||||
}
|
||||
_, err := db.GetEngine(ctx).ID(repo.ID).Cols(cols...).NoAutoTime().Update(repo)
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ func DeleteUploads(ctx context.Context, uploads ...*Upload) (err error) {
|
|||
defer committer.Close()
|
||||
|
||||
ids := make([]int64, len(uploads))
|
||||
for i := 0; i < len(uploads); i++ {
|
||||
for i := range uploads {
|
||||
ids[i] = uploads[i].ID
|
||||
}
|
||||
if err = db.DeleteByIDs[Upload](ctx, ids...); err != nil {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ package unit
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
|
||||
|
|
@ -204,22 +205,12 @@ func LoadUnitConfig() error {
|
|||
|
||||
// UnitGlobalDisabled checks if unit type is global disabled
|
||||
func (u Type) UnitGlobalDisabled() bool {
|
||||
for _, ud := range DisabledRepoUnitsGet() {
|
||||
if u == ud {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(DisabledRepoUnitsGet(), u)
|
||||
}
|
||||
|
||||
// CanBeDefault checks if the unit type can be a default repo unit
|
||||
func (u *Type) CanBeDefault() bool {
|
||||
for _, nadU := range NotAllowedDefaultRepoUnits {
|
||||
if *u == nadU {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
return !slices.Contains(NotAllowedDefaultRepoUnits, *u)
|
||||
}
|
||||
|
||||
// Unit is a section of one repository
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
package user_test
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
|
|
@ -100,12 +101,7 @@ func TestListEmails(t *testing.T) {
|
|||
assert.Greater(t, count, int64(5))
|
||||
|
||||
contains := func(match func(s *user_model.SearchEmailResult) bool) bool {
|
||||
for _, v := range emails {
|
||||
if match(v) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.ContainsFunc(emails, match)
|
||||
}
|
||||
|
||||
assert.True(t, contains(func(s *user_model.SearchEmailResult) bool { return s.UID == 18 }))
|
||||
|
|
|
|||
|
|
@ -831,6 +831,20 @@ type CountUserFilter struct {
|
|||
IsActive optional.Option[bool]
|
||||
}
|
||||
|
||||
// HasUsers checks whether there are any users in the database, or only one user exists.
|
||||
func HasUsers(ctx context.Context) (ret struct {
|
||||
HasAnyUser, HasOnlyOneUser bool
|
||||
}, err error,
|
||||
) {
|
||||
res, err := db.GetEngine(ctx).Table(&User{}).Cols("id").Limit(2).Query()
|
||||
if err != nil {
|
||||
return ret, fmt.Errorf("error checking user existence: %w", err)
|
||||
}
|
||||
ret.HasAnyUser = len(res) != 0
|
||||
ret.HasOnlyOneUser = len(res) == 1
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// CountUsers returns number of users.
|
||||
func CountUsers(ctx context.Context, opts *CountUserFilter) int64 {
|
||||
return countUsers(ctx, opts)
|
||||
|
|
|
|||
|
|
@ -17,10 +17,7 @@ func GetUsersMapByIDs(ctx context.Context, userIDs []int64) (map[int64]*User, er
|
|||
|
||||
left := len(userIDs)
|
||||
for left > 0 {
|
||||
limit := db.DefaultMaxInSize
|
||||
if left < limit {
|
||||
limit = left
|
||||
}
|
||||
limit := min(left, db.DefaultMaxInSize)
|
||||
err := db.GetEngine(ctx).
|
||||
In("id", userIDs[:limit]).
|
||||
Find(&userMaps)
|
||||
|
|
|
|||
|
|
@ -204,9 +204,9 @@ func TestHashPasswordDeterministic(t *testing.T) {
|
|||
b := make([]byte, 16)
|
||||
u := &user_model.User{}
|
||||
algos := hash.RecommendedHashAlgorithms
|
||||
for j := 0; j < len(algos); j++ {
|
||||
for j := range algos {
|
||||
u.PasswdHashAlgo = algos[j]
|
||||
for i := 0; i < 50; i++ {
|
||||
for range 50 {
|
||||
// generate a random password
|
||||
rand.Read(b)
|
||||
pass := string(b)
|
||||
|
|
|
|||
|
|
@ -240,7 +240,7 @@ func CreateWebhooks(ctx context.Context, ws []*Webhook) error {
|
|||
if len(ws) == 0 {
|
||||
return nil
|
||||
}
|
||||
for i := 0; i < len(ws); i++ {
|
||||
for i := range ws {
|
||||
ws[i].Type = strings.TrimSpace(ws[i].Type)
|
||||
}
|
||||
return db.Insert(ctx, ws)
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ func TestWebhook_EventsArray(t *testing.T) {
|
|||
"pull_request", "pull_request_assign", "pull_request_label", "pull_request_milestone",
|
||||
"pull_request_comment", "pull_request_review_approved", "pull_request_review_rejected",
|
||||
"pull_request_review_comment", "pull_request_sync", "pull_request_review_request", "wiki", "repository", "release",
|
||||
"package", "status", "workflow_job",
|
||||
"package", "status", "workflow_run", "workflow_job",
|
||||
},
|
||||
(&Webhook{
|
||||
HookEvent: &webhook_module.HookEvent{SendEverything: true},
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ package actions
|
|||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
|
|
@ -245,6 +246,10 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web
|
|||
webhook_module.HookEventPackage:
|
||||
return matchPackageEvent(payload.(*api.PackagePayload), evt)
|
||||
|
||||
case // workflow_run
|
||||
webhook_module.HookEventWorkflowRun:
|
||||
return matchWorkflowRunEvent(payload.(*api.WorkflowRunPayload), evt)
|
||||
|
||||
default:
|
||||
log.Warn("unsupported event %q", triggedEvent)
|
||||
return false
|
||||
|
|
@ -564,21 +569,12 @@ func matchPullRequestReviewEvent(prPayload *api.PullRequestPayload, evt *jobpars
|
|||
actions = append(actions, "submitted", "edited")
|
||||
}
|
||||
|
||||
matched := false
|
||||
for _, val := range vals {
|
||||
for _, action := range actions {
|
||||
if glob.MustCompile(val, '/').Match(action) {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if matched {
|
||||
if slices.ContainsFunc(actions, glob.MustCompile(val, '/').Match) {
|
||||
matchTimes++
|
||||
break
|
||||
}
|
||||
}
|
||||
if matched {
|
||||
matchTimes++
|
||||
}
|
||||
default:
|
||||
log.Warn("pull request review event unsupported condition %q", cond)
|
||||
}
|
||||
|
|
@ -613,21 +609,12 @@ func matchPullRequestReviewCommentEvent(prPayload *api.PullRequestPayload, evt *
|
|||
actions = append(actions, "created", "edited")
|
||||
}
|
||||
|
||||
matched := false
|
||||
for _, val := range vals {
|
||||
for _, action := range actions {
|
||||
if glob.MustCompile(val, '/').Match(action) {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if matched {
|
||||
if slices.ContainsFunc(actions, glob.MustCompile(val, '/').Match) {
|
||||
matchTimes++
|
||||
break
|
||||
}
|
||||
}
|
||||
if matched {
|
||||
matchTimes++
|
||||
}
|
||||
default:
|
||||
log.Warn("pull request review comment event unsupported condition %q", cond)
|
||||
}
|
||||
|
|
@ -708,3 +695,53 @@ func matchPackageEvent(payload *api.PackagePayload, evt *jobparser.Event) bool {
|
|||
}
|
||||
return matchTimes == len(evt.Acts())
|
||||
}
|
||||
|
||||
func matchWorkflowRunEvent(payload *api.WorkflowRunPayload, evt *jobparser.Event) bool {
|
||||
// with no special filter parameters
|
||||
if len(evt.Acts()) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
matchTimes := 0
|
||||
// all acts conditions should be satisfied
|
||||
for cond, vals := range evt.Acts() {
|
||||
switch cond {
|
||||
case "types":
|
||||
action := payload.Action
|
||||
for _, val := range vals {
|
||||
if glob.MustCompile(val, '/').Match(action) {
|
||||
matchTimes++
|
||||
break
|
||||
}
|
||||
}
|
||||
case "workflows":
|
||||
workflow := payload.Workflow
|
||||
patterns, err := workflowpattern.CompilePatterns(vals...)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
if !workflowpattern.Skip(patterns, []string{workflow.Name}, &workflowpattern.EmptyTraceWriter{}) {
|
||||
matchTimes++
|
||||
}
|
||||
case "branches":
|
||||
patterns, err := workflowpattern.CompilePatterns(vals...)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
if !workflowpattern.Skip(patterns, []string{payload.WorkflowRun.HeadBranch}, &workflowpattern.EmptyTraceWriter{}) {
|
||||
matchTimes++
|
||||
}
|
||||
case "branches-ignore":
|
||||
patterns, err := workflowpattern.CompilePatterns(vals...)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
if !workflowpattern.Filter(patterns, []string{payload.WorkflowRun.HeadBranch}, &workflowpattern.EmptyTraceWriter{}) {
|
||||
matchTimes++
|
||||
}
|
||||
default:
|
||||
log.Warn("workflow run event unsupported condition %q", cond)
|
||||
}
|
||||
}
|
||||
return matchTimes == len(evt.Acts())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,375 @@
|
|||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package assetfs
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
type EmbeddedFile interface {
|
||||
io.ReadSeeker
|
||||
fs.ReadDirFile
|
||||
ReadDir(n int) ([]fs.DirEntry, error)
|
||||
}
|
||||
|
||||
type EmbeddedFileInfo interface {
|
||||
fs.FileInfo
|
||||
fs.DirEntry
|
||||
GetGzipContent() ([]byte, bool)
|
||||
}
|
||||
|
||||
type decompressor interface {
|
||||
io.Reader
|
||||
Close() error
|
||||
Reset(io.Reader) error
|
||||
}
|
||||
|
||||
type embeddedFileInfo struct {
|
||||
fs *embeddedFS
|
||||
fullName string
|
||||
data []byte
|
||||
|
||||
BaseName string `json:"n"`
|
||||
OriginSize int64 `json:"s,omitempty"`
|
||||
DataBegin int64 `json:"b,omitempty"`
|
||||
DataLen int64 `json:"l,omitempty"`
|
||||
Children []*embeddedFileInfo `json:"c,omitempty"`
|
||||
}
|
||||
|
||||
func (fi *embeddedFileInfo) GetGzipContent() ([]byte, bool) {
|
||||
// when generating the bindata, if the compressed data equals or is larger than the original data, we store the original data
|
||||
if fi.DataLen == fi.OriginSize {
|
||||
return nil, false
|
||||
}
|
||||
return fi.data, true
|
||||
}
|
||||
|
||||
type EmbeddedFileBase struct {
|
||||
info *embeddedFileInfo
|
||||
dataReader io.ReadSeeker
|
||||
seekPos int64
|
||||
}
|
||||
|
||||
func (f *EmbeddedFileBase) ReadDir(n int) ([]fs.DirEntry, error) {
|
||||
// this method is used to satisfy the "func (f ioFile) ReadDir(...)" in httpfs
|
||||
l, err := f.info.fs.ReadDir(f.info.fullName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if n < 0 || n > len(l) {
|
||||
return l, nil
|
||||
}
|
||||
return l[:n], nil
|
||||
}
|
||||
|
||||
type EmbeddedOriginFile struct {
|
||||
EmbeddedFileBase
|
||||
}
|
||||
|
||||
type EmbeddedCompressedFile struct {
|
||||
EmbeddedFileBase
|
||||
decompressor decompressor
|
||||
decompressorPos int64
|
||||
}
|
||||
|
||||
type embeddedFS struct {
|
||||
meta func() *EmbeddedMeta
|
||||
|
||||
files map[string]*embeddedFileInfo
|
||||
filesMu sync.RWMutex
|
||||
|
||||
data []byte
|
||||
}
|
||||
|
||||
type EmbeddedMeta struct {
|
||||
Root *embeddedFileInfo
|
||||
}
|
||||
|
||||
func NewEmbeddedFS(data []byte) fs.ReadDirFS {
|
||||
efs := &embeddedFS{data: data, files: make(map[string]*embeddedFileInfo)}
|
||||
efs.meta = sync.OnceValue(func() *EmbeddedMeta {
|
||||
var meta EmbeddedMeta
|
||||
p := bytes.LastIndexByte(data, '\n')
|
||||
if p < 0 {
|
||||
return &meta
|
||||
}
|
||||
if err := json.Unmarshal(data[p+1:], &meta); err != nil {
|
||||
panic("embedded file is not valid")
|
||||
}
|
||||
return &meta
|
||||
})
|
||||
return efs
|
||||
}
|
||||
|
||||
var _ fs.ReadDirFS = (*embeddedFS)(nil)
|
||||
|
||||
func (e *embeddedFS) ReadDir(name string) (l []fs.DirEntry, err error) {
|
||||
fi, err := e.getFileInfo(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return nil, fs.ErrNotExist
|
||||
}
|
||||
l = make([]fs.DirEntry, len(fi.Children))
|
||||
for i, child := range fi.Children {
|
||||
l[i], err = e.getFileInfo(name + "/" + child.BaseName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return l, nil
|
||||
}
|
||||
|
||||
func (e *embeddedFS) getFileInfo(fullName string) (*embeddedFileInfo, error) {
|
||||
// no need to do heavy "path.Clean()" because we don't want to support "foo/../bar" or absolute paths
|
||||
fullName = strings.TrimPrefix(fullName, "./")
|
||||
if fullName == "" {
|
||||
fullName = "."
|
||||
}
|
||||
|
||||
e.filesMu.RLock()
|
||||
fi := e.files[fullName]
|
||||
e.filesMu.RUnlock()
|
||||
if fi != nil {
|
||||
return fi, nil
|
||||
}
|
||||
|
||||
fields := strings.Split(fullName, "/")
|
||||
fi = e.meta().Root
|
||||
if fullName != "." {
|
||||
found := true
|
||||
for _, field := range fields {
|
||||
for _, child := range fi.Children {
|
||||
if found = child.BaseName == field; found {
|
||||
fi = child
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return nil, fs.ErrNotExist
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
e.filesMu.Lock()
|
||||
defer e.filesMu.Unlock()
|
||||
if fi != nil {
|
||||
fi.fs = e
|
||||
fi.fullName = fullName
|
||||
fi.data = e.data[fi.DataBegin : fi.DataBegin+fi.DataLen]
|
||||
e.files[fullName] = fi // do not cache nil, otherwise keeping accessing random non-existing file will cause OOM
|
||||
return fi, nil
|
||||
}
|
||||
return nil, fs.ErrNotExist
|
||||
}
|
||||
|
||||
func (e *embeddedFS) Open(name string) (fs.File, error) {
|
||||
info, err := e.getFileInfo(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
base := EmbeddedFileBase{info: info}
|
||||
base.dataReader = bytes.NewReader(base.info.data)
|
||||
if info.DataLen != info.OriginSize {
|
||||
decomp, err := gzip.NewReader(base.dataReader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &EmbeddedCompressedFile{EmbeddedFileBase: base, decompressor: decomp}, nil
|
||||
}
|
||||
return &EmbeddedOriginFile{base}, nil
|
||||
}
|
||||
|
||||
var (
|
||||
_ EmbeddedFileInfo = (*embeddedFileInfo)(nil)
|
||||
_ EmbeddedFile = (*EmbeddedOriginFile)(nil)
|
||||
_ EmbeddedFile = (*EmbeddedCompressedFile)(nil)
|
||||
)
|
||||
|
||||
func (f *EmbeddedOriginFile) Read(p []byte) (n int, err error) {
|
||||
return f.dataReader.Read(p)
|
||||
}
|
||||
|
||||
func (f *EmbeddedCompressedFile) Read(p []byte) (n int, err error) {
|
||||
if f.decompressorPos > f.seekPos {
|
||||
if err = f.decompressor.Reset(bytes.NewReader(f.info.data)); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
f.decompressorPos = 0
|
||||
}
|
||||
if f.decompressorPos < f.seekPos {
|
||||
if _, err = io.CopyN(io.Discard, f.decompressor, f.seekPos-f.decompressorPos); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
f.decompressorPos = f.seekPos
|
||||
}
|
||||
n, err = f.decompressor.Read(p)
|
||||
f.decompressorPos += int64(n)
|
||||
f.seekPos = f.decompressorPos
|
||||
return n, err
|
||||
}
|
||||
|
||||
func (f *EmbeddedFileBase) Seek(offset int64, whence int) (int64, error) {
|
||||
switch whence {
|
||||
case io.SeekStart:
|
||||
f.seekPos = offset
|
||||
case io.SeekCurrent:
|
||||
f.seekPos += offset
|
||||
case io.SeekEnd:
|
||||
f.seekPos = f.info.OriginSize + offset
|
||||
}
|
||||
return f.seekPos, nil
|
||||
}
|
||||
|
||||
func (f *EmbeddedFileBase) Stat() (fs.FileInfo, error) {
|
||||
return f.info, nil
|
||||
}
|
||||
|
||||
func (f *EmbeddedOriginFile) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *EmbeddedCompressedFile) Close() error {
|
||||
return f.decompressor.Close()
|
||||
}
|
||||
|
||||
func (fi *embeddedFileInfo) Name() string {
|
||||
return fi.BaseName
|
||||
}
|
||||
|
||||
func (fi *embeddedFileInfo) Size() int64 {
|
||||
return fi.OriginSize
|
||||
}
|
||||
|
||||
func (fi *embeddedFileInfo) Mode() fs.FileMode {
|
||||
return util.Iif(fi.IsDir(), fs.ModeDir|0o555, 0o444)
|
||||
}
|
||||
|
||||
func (fi *embeddedFileInfo) ModTime() time.Time {
|
||||
return getExecutableModTime()
|
||||
}
|
||||
|
||||
func (fi *embeddedFileInfo) IsDir() bool {
|
||||
return fi.Children != nil
|
||||
}
|
||||
|
||||
func (fi *embeddedFileInfo) Sys() any {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (fi *embeddedFileInfo) Type() fs.FileMode {
|
||||
return util.Iif(fi.IsDir(), fs.ModeDir, 0)
|
||||
}
|
||||
|
||||
func (fi *embeddedFileInfo) Info() (fs.FileInfo, error) {
|
||||
return fi, nil
|
||||
}
|
||||
|
||||
// getExecutableModTime returns the modification time of the executable file.
|
||||
// In bindata, we can't use the ModTime of the files because we need to make the build reproducible
|
||||
var getExecutableModTime = sync.OnceValue(func() (modTime time.Time) {
|
||||
exePath, err := os.Executable()
|
||||
if err != nil {
|
||||
return modTime
|
||||
}
|
||||
exePath, err = filepath.Abs(exePath)
|
||||
if err != nil {
|
||||
return modTime
|
||||
}
|
||||
exePath, err = filepath.EvalSymlinks(exePath)
|
||||
if err != nil {
|
||||
return modTime
|
||||
}
|
||||
st, err := os.Stat(exePath)
|
||||
if err != nil {
|
||||
return modTime
|
||||
}
|
||||
return st.ModTime()
|
||||
})
|
||||
|
||||
func GenerateEmbedBindata(fsRootPath, outputFile string) error {
|
||||
output, err := os.OpenFile(outputFile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer output.Close()
|
||||
|
||||
meta := &EmbeddedMeta{}
|
||||
meta.Root = &embeddedFileInfo{}
|
||||
var outputOffset int64
|
||||
var embedFiles func(parent *embeddedFileInfo, fsPath, embedPath string) error
|
||||
embedFiles = func(parent *embeddedFileInfo, fsPath, embedPath string) error {
|
||||
dirEntries, err := os.ReadDir(fsPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, dirEntry := range dirEntries {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if dirEntry.IsDir() {
|
||||
child := &embeddedFileInfo{
|
||||
BaseName: dirEntry.Name(),
|
||||
Children: []*embeddedFileInfo{}, // non-nil means it's a directory
|
||||
}
|
||||
parent.Children = append(parent.Children, child)
|
||||
if err = embedFiles(child, filepath.Join(fsPath, dirEntry.Name()), path.Join(embedPath, dirEntry.Name())); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
data, err := os.ReadFile(filepath.Join(fsPath, dirEntry.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var compressed bytes.Buffer
|
||||
gz, _ := gzip.NewWriterLevel(&compressed, gzip.BestCompression)
|
||||
if _, err = gz.Write(data); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = gz.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// only use the compressed data if it is smaller than the original data
|
||||
outputBytes := util.Iif(len(compressed.Bytes()) < len(data), compressed.Bytes(), data)
|
||||
child := &embeddedFileInfo{
|
||||
BaseName: dirEntry.Name(),
|
||||
OriginSize: int64(len(data)),
|
||||
DataBegin: outputOffset,
|
||||
DataLen: int64(len(outputBytes)),
|
||||
}
|
||||
if _, err = output.Write(outputBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
outputOffset += child.DataLen
|
||||
parent.Children = append(parent.Children, child)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if err = embedFiles(meta.Root, fsRootPath, ""); err != nil {
|
||||
return err
|
||||
}
|
||||
jsonBuf, err := json.Marshal(meta) // can't use json.NewEncoder here because it writes extra EOL
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, _ = output.Write([]byte{'\n'})
|
||||
_, err = output.Write(jsonBuf)
|
||||
return err
|
||||
}
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package assetfs
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestEmbed(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
tmpDataDir := tmpDir + "/data"
|
||||
_ = os.MkdirAll(tmpDataDir+"/foo/bar", 0o755)
|
||||
_ = os.WriteFile(tmpDataDir+"/a.txt", []byte("a"), 0o644)
|
||||
_ = os.WriteFile(tmpDataDir+"/foo/bar/b.txt", bytes.Repeat([]byte("a"), 1000), 0o644)
|
||||
_ = os.WriteFile(tmpDataDir+"/foo/c.txt", []byte("c"), 0o644)
|
||||
require.NoError(t, GenerateEmbedBindata(tmpDataDir, tmpDir+"/out.dat"))
|
||||
|
||||
data, err := os.ReadFile(tmpDir + "/out.dat")
|
||||
require.NoError(t, err)
|
||||
efs := NewEmbeddedFS(data)
|
||||
|
||||
// test a non-existing file
|
||||
_, err = fs.ReadFile(efs, "not exist")
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// test a normal file (no compression)
|
||||
content, err := fs.ReadFile(efs, "a.txt")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "a", string(content))
|
||||
fi, err := fs.Stat(efs, "a.txt")
|
||||
require.NoError(t, err)
|
||||
_, ok := fi.(EmbeddedFileInfo).GetGzipContent()
|
||||
assert.False(t, ok)
|
||||
|
||||
// test a compressed file
|
||||
content, err = fs.ReadFile(efs, "foo/bar/b.txt")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, bytes.Repeat([]byte("a"), 1000), content)
|
||||
fi, err = fs.Stat(efs, "foo/bar/b.txt")
|
||||
require.NoError(t, err)
|
||||
assert.False(t, fi.Mode().IsDir())
|
||||
assert.True(t, fi.Mode().IsRegular())
|
||||
gzipContent, ok := fi.(EmbeddedFileInfo).GetGzipContent()
|
||||
assert.True(t, ok)
|
||||
assert.Greater(t, len(gzipContent), 1)
|
||||
assert.Less(t, len(gzipContent), 1000)
|
||||
|
||||
// test list root directory
|
||||
entries, err := fs.ReadDir(efs, ".")
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, entries, 2)
|
||||
assert.Equal(t, "a.txt", entries[0].Name())
|
||||
assert.False(t, entries[0].IsDir())
|
||||
|
||||
// test list subdirectory
|
||||
entries, err = fs.ReadDir(efs, "foo")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, entries, 2)
|
||||
assert.Equal(t, "bar", entries[0].Name())
|
||||
assert.True(t, entries[0].IsDir())
|
||||
assert.Equal(t, "c.txt", entries[1].Name())
|
||||
assert.False(t, entries[1].IsDir())
|
||||
|
||||
// test directory mode
|
||||
fi, err = fs.Stat(efs, "foo")
|
||||
require.NoError(t, err)
|
||||
assert.True(t, fi.IsDir())
|
||||
assert.True(t, fi.Mode().IsDir())
|
||||
assert.False(t, fi.Mode().IsRegular())
|
||||
|
||||
// test httpfs
|
||||
hfs := http.FS(efs)
|
||||
hf, err := hfs.Open("foo/bar/b.txt")
|
||||
require.NoError(t, err)
|
||||
hi, err := hf.Stat()
|
||||
require.NoError(t, err)
|
||||
fiEmbedded, ok := hi.(EmbeddedFileInfo)
|
||||
require.True(t, ok)
|
||||
gzipContent, ok = fiEmbedded.GetGzipContent()
|
||||
assert.True(t, ok)
|
||||
assert.Greater(t, len(gzipContent), 1)
|
||||
assert.Less(t, len(gzipContent), 1000)
|
||||
|
||||
// test httpfs directory listing
|
||||
hf, err = hfs.Open("foo")
|
||||
require.NoError(t, err)
|
||||
dirs, err := hf.Readdir(1)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, dirs, 1)
|
||||
}
|
||||
|
|
@ -52,8 +52,8 @@ func Local(name, base string, sub ...string) *Layer {
|
|||
}
|
||||
|
||||
// Bindata returns a new Layer with the given name, it serves files from the given bindata asset.
|
||||
func Bindata(name string, fs http.FileSystem) *Layer {
|
||||
return &Layer{name: name, fs: fs}
|
||||
func Bindata(name string, fs fs.FS) *Layer {
|
||||
return &Layer{name: name, fs: http.FS(fs)}
|
||||
}
|
||||
|
||||
// LayeredFS is a layered asset file-system. It works like http.FileSystem, but it can have multiple layers.
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ func Generate(n int) (string, error) {
|
|||
buffer := make([]byte, n)
|
||||
maxInt := big.NewInt(int64(len(validChars)))
|
||||
for {
|
||||
for j := 0; j < n; j++ {
|
||||
for j := range n {
|
||||
rnd, err := rand.Int(rand.Reader, maxInt)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ func TestComplexity_Generate(t *testing.T) {
|
|||
|
||||
test := func(t *testing.T, modes []string) {
|
||||
testComplextity(modes)
|
||||
for i := 0; i < maxCount; i++ {
|
||||
for range maxCount {
|
||||
pwd, err := Generate(pwdLen)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, pwd, pwdLen)
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ func (c *Client) CheckPassword(pw string, padding bool) (int, error) {
|
|||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
for _, pair := range strings.Split(string(body), "\n") {
|
||||
for pair := range strings.SplitSeq(string(body), "\n") {
|
||||
parts := strings.Split(pair, ":")
|
||||
if len(parts) != 2 {
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -24,8 +24,8 @@ func drawBlock(img *image.Paletted, x, y, size, angle int, points []int) {
|
|||
rotate(points, m, m, angle)
|
||||
}
|
||||
|
||||
for i := 0; i < size; i++ {
|
||||
for j := 0; j < size; j++ {
|
||||
for i := range size {
|
||||
for j := range size {
|
||||
if pointInPolygon(i, j, points) {
|
||||
img.SetColorIndex(x+i, y+j, 1)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -134,7 +134,7 @@ func drawBlocks(p *image.Paletted, size int, c, b1, b2 blockFunc, b1Angle, b2Ang
|
|||
|
||||
// then we make it left-right mirror, so we didn't draw 3/6/9 before
|
||||
for x := 0; x < size/2; x++ {
|
||||
for y := 0; y < size; y++ {
|
||||
for y := range size {
|
||||
p.SetColorIndex(size-x, y, p.ColorIndexAt(x, y))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ func Init() error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := 0; i < 10; i++ {
|
||||
for range 10 {
|
||||
if err = c.Ping(); err == nil {
|
||||
break
|
||||
}
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ func DetectEncoding(content []byte) (string, error) {
|
|||
}
|
||||
times := 1024 / len(content)
|
||||
detectContent = make([]byte, 0, times*len(content))
|
||||
for i := 0; i < times; i++ {
|
||||
for range times {
|
||||
detectContent = append(detectContent, content...)
|
||||
}
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -242,7 +242,7 @@ func stringMustEndWith(t *testing.T, expected, value string) {
|
|||
func TestToUTF8WithFallbackReader(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
|
||||
for testLen := 0; testLen < 2048; testLen++ {
|
||||
for testLen := range 2048 {
|
||||
pattern := " test { () }\n"
|
||||
input := ""
|
||||
for len(input) < testLen {
|
||||
|
|
|
|||
|
|
@ -47,6 +47,7 @@ type Command struct {
|
|||
globalArgsLength int
|
||||
brokenArgs []string
|
||||
cmd *exec.Cmd // for debug purpose only
|
||||
configArgs []string
|
||||
}
|
||||
|
||||
func logArgSanitize(arg string) string {
|
||||
|
|
@ -196,6 +197,16 @@ func (c *Command) AddDashesAndList(list ...string) *Command {
|
|||
return c
|
||||
}
|
||||
|
||||
func (c *Command) AddConfig(key, value string) *Command {
|
||||
kv := key + "=" + value
|
||||
if !isSafeArgumentValue(kv) {
|
||||
c.brokenArgs = append(c.brokenArgs, key)
|
||||
} else {
|
||||
c.configArgs = append(c.configArgs, "-c", kv)
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// ToTrustedCmdArgs converts a list of strings (trusted as argument) to TrustedCmdArgs
|
||||
// In most cases, it shouldn't be used. Use NewCommand().AddXxx() function instead
|
||||
func ToTrustedCmdArgs(args []string) TrustedCmdArgs {
|
||||
|
|
@ -321,7 +332,7 @@ func (c *Command) run(ctx context.Context, skip int, opts *RunOpts) error {
|
|||
|
||||
startTime := time.Now()
|
||||
|
||||
cmd := exec.CommandContext(ctx, c.prog, c.args...)
|
||||
cmd := exec.CommandContext(ctx, c.prog, append(c.configArgs, c.args...)...)
|
||||
c.cmd = cmd // for debug purpose only
|
||||
if opts.Env == nil {
|
||||
cmd.Env = os.Environ()
|
||||
|
|
|
|||
|
|
@ -162,17 +162,25 @@ func AllCommitsCount(ctx context.Context, repoPath string, hidePRRefs bool, file
|
|||
|
||||
// CommitsCountOptions the options when counting commits
|
||||
type CommitsCountOptions struct {
|
||||
RepoPath string
|
||||
Not string
|
||||
Revision []string
|
||||
RelPath []string
|
||||
Since string
|
||||
Until string
|
||||
RepoPath string
|
||||
Not string
|
||||
Revision []string
|
||||
RelPath []string
|
||||
Since string
|
||||
Until string
|
||||
FollowRename bool
|
||||
}
|
||||
|
||||
// CommitsCount returns number of total commits of until given revision.
|
||||
func CommitsCount(ctx context.Context, opts CommitsCountOptions) (int64, error) {
|
||||
cmd := NewCommand("rev-list", "--count")
|
||||
var cmd *Command
|
||||
followRename := len(opts.RelPath) > 0 && opts.FollowRename
|
||||
|
||||
if followRename {
|
||||
cmd = NewCommand("--no-pager", "log", "--pretty=format:%H")
|
||||
} else {
|
||||
cmd = NewCommand("rev-list", "--count")
|
||||
}
|
||||
|
||||
cmd.AddDynamicArguments(opts.Revision...)
|
||||
|
||||
|
|
@ -181,6 +189,9 @@ func CommitsCount(ctx context.Context, opts CommitsCountOptions) (int64, error)
|
|||
}
|
||||
|
||||
if len(opts.RelPath) > 0 {
|
||||
if opts.FollowRename {
|
||||
cmd.AddOptionValues("--follow")
|
||||
}
|
||||
cmd.AddDashesAndList(opts.RelPath...)
|
||||
}
|
||||
|
||||
|
|
@ -188,7 +199,9 @@ func CommitsCount(ctx context.Context, opts CommitsCountOptions) (int64, error)
|
|||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if followRename {
|
||||
return int64(len(strings.Split(stdout, "\n"))), nil
|
||||
}
|
||||
return strconv.ParseInt(strings.TrimSpace(stdout), 10, 64)
|
||||
}
|
||||
|
||||
|
|
@ -277,8 +290,8 @@ func NewSearchCommitsOptions(searchString string, forAllRefs bool) SearchCommits
|
|||
var keywords, authors, committers []string
|
||||
var after, before string
|
||||
|
||||
fields := strings.Fields(searchString)
|
||||
for _, k := range fields {
|
||||
fields := strings.FieldsSeq(searchString)
|
||||
for k := range fields {
|
||||
switch {
|
||||
case strings.HasPrefix(k, "author:"):
|
||||
authors = append(authors, strings.TrimPrefix(k, "author:"))
|
||||
|
|
|
|||
|
|
@ -7,8 +7,7 @@ package git
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"maps"
|
||||
"path"
|
||||
"sort"
|
||||
|
||||
|
|
@ -40,9 +39,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath
|
|||
return nil, nil, err
|
||||
}
|
||||
|
||||
for pth, found := range commits {
|
||||
revs[pth] = found
|
||||
}
|
||||
maps.Copy(revs, commits)
|
||||
}
|
||||
} else {
|
||||
sort.Strings(entryPaths)
|
||||
|
|
@ -124,48 +121,25 @@ func GetLastCommitForPaths(ctx context.Context, commit *Commit, treePath string,
|
|||
return nil, err
|
||||
}
|
||||
|
||||
batchStdinWriter, batchReader, cancel, err := commit.repo.CatFileBatch(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer cancel()
|
||||
|
||||
commitsMap := map[string]*Commit{}
|
||||
commitsMap[commit.ID.String()] = commit
|
||||
|
||||
commitCommits := map[string]*Commit{}
|
||||
for path, commitID := range revs {
|
||||
if len(commitID) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
c, ok := commitsMap[commitID]
|
||||
if ok {
|
||||
commitCommits[path] = c
|
||||
continue
|
||||
}
|
||||
|
||||
if len(commitID) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
_, err := batchStdinWriter.Write([]byte(commitID + "\n"))
|
||||
c, err := commit.repo.GetCommit(commitID) // Ensure the commit exists in the repository
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, typ, size, err := ReadBatchLine(batchReader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if typ != "commit" {
|
||||
if err := DiscardFull(batchReader, size+1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected type: %s for commit id: %s", typ, commitID)
|
||||
}
|
||||
c, err = CommitFromReader(commit.repo, MustIDFromString(commitID), io.LimitReader(batchReader, size))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err := batchReader.Discard(1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
commitCommits[path] = c
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ func TestCutDiffAroundLine(t *testing.T) {
|
|||
}
|
||||
|
||||
func BenchmarkCutDiffAroundLine(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
for b.Loop() {
|
||||
CutDiffAroundLine(strings.NewReader(exampleDiff), 3, true, 3)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ func (f Format) Parser(r io.Reader) *Parser {
|
|||
// would turn into "%0a%00".
|
||||
func (f Format) hexEscaped(delim []byte) string {
|
||||
escaped := ""
|
||||
for i := 0; i < len(delim); i++ {
|
||||
for i := range delim {
|
||||
escaped += "%" + hex.EncodeToString([]byte{delim[i]})
|
||||
}
|
||||
return escaped
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue