Merge branch 'main' into lunny/fix_move_column

This commit is contained in:
Lunny Xiao 2024-05-01 23:53:12 +08:00 committed by GitHub
commit b8a8f41af6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
35 changed files with 487 additions and 176 deletions

View File

@ -1456,7 +1456,7 @@ LEVEL = Info
;; Batch size to send for batched queues ;; Batch size to send for batched queues
;BATCH_LENGTH = 20 ;BATCH_LENGTH = 20
;; ;;
;; Connection string for redis queues this will store the redis or redis-cluster connection string. ;; Connection string for redis queues this will store the redis (or Redis cluster) connection string.
;; When `TYPE` is `persistable-channel`, this provides a directory for the underlying leveldb ;; When `TYPE` is `persistable-channel`, this provides a directory for the underlying leveldb
;; or additional options of the form `leveldb://path/to/db?option=value&....`, and will override `DATADIR`. ;; or additional options of the form `leveldb://path/to/db?option=value&....`, and will override `DATADIR`.
;CONN_STR = "redis://127.0.0.1:6379/0" ;CONN_STR = "redis://127.0.0.1:6379/0"
@ -1740,9 +1740,8 @@ LEVEL = Info
;; For "memory" only, GC interval in seconds, default is 60 ;; For "memory" only, GC interval in seconds, default is 60
;INTERVAL = 60 ;INTERVAL = 60
;; ;;
;; For "redis", "redis-cluster" and "memcache", connection host address ;; For "redis" and "memcache", connection host address
;; redis: `redis://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` ;; redis: `redis://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` (or `redis+cluster://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` for a Redis cluster)
;; redis-cluster: `redis+cluster://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s`
;; memcache: `127.0.0.1:11211` ;; memcache: `127.0.0.1:11211`
;; twoqueue: `{"size":50000,"recent_ratio":0.25,"ghost_ratio":0.5}` or `50000` ;; twoqueue: `{"size":50000,"recent_ratio":0.25,"ghost_ratio":0.5}` or `50000`
;HOST = ;HOST =
@ -1772,15 +1771,14 @@ LEVEL = Info
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; Either "memory", "file", "redis", "redis-cluster", "db", "mysql", "couchbase", "memcache" or "postgres" ;; Either "memory", "file", "redis", "db", "mysql", "couchbase", "memcache" or "postgres"
;; Default is "memory". "db" will reuse the configuration in [database] ;; Default is "memory". "db" will reuse the configuration in [database]
;PROVIDER = memory ;PROVIDER = memory
;; ;;
;; Provider config options ;; Provider config options
;; memory: doesn't have any config yet ;; memory: doesn't have any config yet
;; file: session file path, e.g. `data/sessions` ;; file: session file path, e.g. `data/sessions`
;; redis: `redis://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` ;; redis: `redis://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` (or `redis+cluster://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` for a Redis cluster)
;; redis-cluster: `redis+cluster://127.0.0.1:6379/0?pool_size=100&idle_timeout=180s`
;; mysql: go-sql-driver/mysql dsn config string, e.g. `root:password@/session_table` ;; mysql: go-sql-driver/mysql dsn config string, e.g. `root:password@/session_table`
;PROVIDER_CONFIG = data/sessions ; Relative paths will be made absolute against _`AppWorkPath`_. ;PROVIDER_CONFIG = data/sessions ; Relative paths will be made absolute against _`AppWorkPath`_.
;; ;;

View File

@ -492,7 +492,7 @@ Configuration at `[queue]` will set defaults for queues with overrides for indiv
- `DATADIR`: **queues/common**: Base DataDir for storing level queues. `DATADIR` for individual queues can be set in `queue.name` sections. Relative paths will be made absolute against `%(APP_DATA_PATH)s`. - `DATADIR`: **queues/common**: Base DataDir for storing level queues. `DATADIR` for individual queues can be set in `queue.name` sections. Relative paths will be made absolute against `%(APP_DATA_PATH)s`.
- `LENGTH`: **100000**: Maximal queue size before channel queues block - `LENGTH`: **100000**: Maximal queue size before channel queues block
- `BATCH_LENGTH`: **20**: Batch data before passing to the handler - `BATCH_LENGTH`: **20**: Batch data before passing to the handler
- `CONN_STR`: **redis://127.0.0.1:6379/0**: Connection string for the redis queue type. For `redis-cluster` use `redis+cluster://127.0.0.1:6379/0`. Options can be set using query params. Similarly, LevelDB options can also be set using: **leveldb://relative/path?option=value** or **leveldb:///absolute/path?option=value**, and will override `DATADIR` - `CONN_STR`: **redis://127.0.0.1:6379/0**: Connection string for the redis queue type. If you're running a Redis cluster, use `redis+cluster://127.0.0.1:6379/0`. Options can be set using query params. Similarly, LevelDB options can also be set using: **leveldb://relative/path?option=value** or **leveldb:///absolute/path?option=value**, and will override `DATADIR`
- `QUEUE_NAME`: **_queue**: The suffix for default redis and disk queue name. Individual queues will default to **`name`**`QUEUE_NAME` but can be overridden in the specific `queue.name` section. - `QUEUE_NAME`: **_queue**: The suffix for default redis and disk queue name. Individual queues will default to **`name`**`QUEUE_NAME` but can be overridden in the specific `queue.name` section.
- `SET_NAME`: **_unique**: The suffix that will be added to the default redis and disk queue `set` name for unique queues. Individual queues will default to **`name`**`QUEUE_NAME`_`SET_NAME`_ but can be overridden in the specific `queue.name` section. - `SET_NAME`: **_unique**: The suffix that will be added to the default redis and disk queue `set` name for unique queues. Individual queues will default to **`name`**`QUEUE_NAME`_`SET_NAME`_ but can be overridden in the specific `queue.name` section.
- `MAX_WORKERS`: **(dynamic)**: Maximum number of worker go-routines for the queue. Default value is "CpuNum/2" clipped to between 1 and 10. - `MAX_WORKERS`: **(dynamic)**: Maximum number of worker go-routines for the queue. Default value is "CpuNum/2" clipped to between 1 and 10.
@ -777,11 +777,11 @@ and
## Cache (`cache`) ## Cache (`cache`)
- `ADAPTER`: **memory**: Cache engine adapter, either `memory`, `redis`, `redis-cluster`, `twoqueue` or `memcache`. (`twoqueue` represents a size limited LRU cache.) - `ADAPTER`: **memory**: Cache engine adapter, either `memory`, `redis`, `twoqueue` or `memcache`. (`twoqueue` represents a size limited LRU cache.)
- `INTERVAL`: **60**: Garbage Collection interval (sec), for memory and twoqueue cache only. - `INTERVAL`: **60**: Garbage Collection interval (sec), for memory and twoqueue cache only.
- `HOST`: **_empty_**: Connection string for `redis`, `redis-cluster` and `memcache`. For `twoqueue` sets configuration for the queue. - `HOST`: **_empty_**: Connection string for `redis` and `memcache`. For `twoqueue` sets configuration for the queue.
- Redis: `redis://:macaron@127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` - Redis: `redis://:macaron@127.0.0.1:6379/0?pool_size=100&idle_timeout=180s`
- Redis-cluster `redis+cluster://:macaron@127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` - For a Redis cluster: `redis+cluster://:macaron@127.0.0.1:6379/0?pool_size=100&idle_timeout=180s`
- Memcache: `127.0.0.1:9090;127.0.0.1:9091` - Memcache: `127.0.0.1:9090;127.0.0.1:9091`
- TwoQueue LRU cache: `{"size":50000,"recent_ratio":0.25,"ghost_ratio":0.5}` or `50000` representing the maximum number of objects stored in the cache. - TwoQueue LRU cache: `{"size":50000,"recent_ratio":0.25,"ghost_ratio":0.5}` or `50000` representing the maximum number of objects stored in the cache.
- `ITEM_TTL`: **16h**: Time to keep items in cache if not used, Setting it to -1 disables caching. - `ITEM_TTL`: **16h**: Time to keep items in cache if not used, Setting it to -1 disables caching.
@ -793,7 +793,7 @@ and
## Session (`session`) ## Session (`session`)
- `PROVIDER`: **memory**: Session engine provider \[memory, file, redis, redis-cluster, db, mysql, couchbase, memcache, postgres\]. Setting `db` will reuse the configuration in `[database]` - `PROVIDER`: **memory**: Session engine provider \[memory, file, redis, db, mysql, couchbase, memcache, postgres\]. Setting `db` will reuse the configuration in `[database]`
- `PROVIDER_CONFIG`: **data/sessions**: For file, the root path; for db, empty (database config will be used); for others, the connection string. Relative paths will be made absolute against _`AppWorkPath`_. - `PROVIDER_CONFIG`: **data/sessions**: For file, the root path; for db, empty (database config will be used); for others, the connection string. Relative paths will be made absolute against _`AppWorkPath`_.
- `COOKIE_SECURE`:**_empty_**: `true` or `false`. Enable this to force using HTTPS for all session access. If not set, it defaults to `true` if the ROOT_URL is an HTTPS URL. - `COOKIE_SECURE`:**_empty_**: `true` or `false`. Enable this to force using HTTPS for all session access. If not set, it defaults to `true` if the ROOT_URL is an HTTPS URL.
- `COOKIE_NAME`: **i\_like\_gitea**: The name of the cookie used for the session ID. - `COOKIE_NAME`: **i\_like\_gitea**: The name of the cookie used for the session ID.

View File

@ -130,7 +130,10 @@ func GetRepoAssignees(ctx context.Context, repo *Repository) (_ []*user_model.Us
// and just waste 1 unit is cheaper than re-allocate memory once. // and just waste 1 unit is cheaper than re-allocate memory once.
users := make([]*user_model.User, 0, len(uniqueUserIDs)+1) users := make([]*user_model.User, 0, len(uniqueUserIDs)+1)
if len(userIDs) > 0 { if len(userIDs) > 0 {
if err = e.In("id", uniqueUserIDs.Values()).OrderBy(user_model.GetOrderByName()).Find(&users); err != nil { if err = e.In("id", uniqueUserIDs.Values()).
Where(builder.Eq{"`user`.is_active": true}).
OrderBy(user_model.GetOrderByName()).
Find(&users); err != nil {
return nil, err return nil, err
} }
} }
@ -152,7 +155,8 @@ func GetReviewers(ctx context.Context, repo *Repository, doerID, posterID int64)
return nil, err return nil, err
} }
cond := builder.And(builder.Neq{"`user`.id": posterID}) cond := builder.And(builder.Neq{"`user`.id": posterID}).
And(builder.Eq{"`user`.is_active": true})
if repo.IsPrivate || repo.Owner.Visibility == api.VisibleTypePrivate { if repo.IsPrivate || repo.Owner.Visibility == api.VisibleTypePrivate {
// This a private repository: // This a private repository:

View File

@ -9,6 +9,7 @@ import (
"code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo" repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest" "code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -25,8 +26,17 @@ func TestRepoAssignees(t *testing.T) {
repo21 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 21}) repo21 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 21})
users, err = repo_model.GetRepoAssignees(db.DefaultContext, repo21) users, err = repo_model.GetRepoAssignees(db.DefaultContext, repo21)
assert.NoError(t, err) assert.NoError(t, err)
assert.Len(t, users, 4) if assert.Len(t, users, 4) {
assert.ElementsMatch(t, []int64{10, 15, 16, 18}, []int64{users[0].ID, users[1].ID, users[2].ID, users[3].ID}) assert.ElementsMatch(t, []int64{10, 15, 16, 18}, []int64{users[0].ID, users[1].ID, users[2].ID, users[3].ID})
}
// do not return deactivated users
assert.NoError(t, user_model.UpdateUserCols(db.DefaultContext, &user_model.User{ID: 15, IsActive: false}, "is_active"))
users, err = repo_model.GetRepoAssignees(db.DefaultContext, repo21)
assert.NoError(t, err)
if assert.Len(t, users, 3) {
assert.NotContains(t, []int64{users[0].ID, users[1].ID, users[2].ID}, 15)
}
} }
func TestRepoGetReviewers(t *testing.T) { func TestRepoGetReviewers(t *testing.T) {
@ -38,17 +48,19 @@ func TestRepoGetReviewers(t *testing.T) {
ctx := db.DefaultContext ctx := db.DefaultContext
reviewers, err := repo_model.GetReviewers(ctx, repo1, 2, 2) reviewers, err := repo_model.GetReviewers(ctx, repo1, 2, 2)
assert.NoError(t, err) assert.NoError(t, err)
assert.Len(t, reviewers, 4) if assert.Len(t, reviewers, 3) {
assert.ElementsMatch(t, []int64{1, 4, 11}, []int64{reviewers[0].ID, reviewers[1].ID, reviewers[2].ID})
}
// should include doer if doer is not PR poster. // should include doer if doer is not PR poster.
reviewers, err = repo_model.GetReviewers(ctx, repo1, 11, 2) reviewers, err = repo_model.GetReviewers(ctx, repo1, 11, 2)
assert.NoError(t, err) assert.NoError(t, err)
assert.Len(t, reviewers, 4) assert.Len(t, reviewers, 3)
// should not include PR poster, if PR poster would be otherwise eligible // should not include PR poster, if PR poster would be otherwise eligible
reviewers, err = repo_model.GetReviewers(ctx, repo1, 11, 4) reviewers, err = repo_model.GetReviewers(ctx, repo1, 11, 4)
assert.NoError(t, err) assert.NoError(t, err)
assert.Len(t, reviewers, 3) assert.Len(t, reviewers, 2)
// test private user repo // test private user repo
repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}) repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})

View File

@ -39,8 +39,6 @@ import (
const ( const (
unicodeNormalizeName = "unicodeNormalize" unicodeNormalizeName = "unicodeNormalize"
maxBatchSize = 16 maxBatchSize = 16
// fuzzyDenominator determines the levenshtein distance per each character of a keyword
fuzzyDenominator = 4
) )
func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error { func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error {
@ -245,7 +243,7 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
phraseQuery.Analyzer = repoIndexerAnalyzer phraseQuery.Analyzer = repoIndexerAnalyzer
keywordQuery = phraseQuery keywordQuery = phraseQuery
if opts.IsKeywordFuzzy { if opts.IsKeywordFuzzy {
phraseQuery.Fuzziness = len(opts.Keyword) / fuzzyDenominator phraseQuery.Fuzziness = inner_bleve.GuessFuzzinessByKeyword(opts.Keyword)
} }
if len(opts.RepoIDs) > 0 { if len(opts.RepoIDs) > 0 {

View File

@ -178,12 +178,6 @@ func Init() {
}() }()
rIndexer = elasticsearch.NewIndexer(setting.Indexer.RepoConnStr, setting.Indexer.RepoIndexerName) rIndexer = elasticsearch.NewIndexer(setting.Indexer.RepoConnStr, setting.Indexer.RepoIndexerName)
if err != nil {
cancel()
(*globalIndexer.Load()).Close()
close(waitChannel)
log.Fatal("PID: %d Unable to create the elasticsearch Repository Indexer connstr: %s Error: %v", os.Getpid(), setting.Indexer.RepoConnStr, err)
}
existed, err = rIndexer.Init(ctx) existed, err = rIndexer.Init(ctx)
if err != nil { if err != nil {
cancel() cancel()

View File

@ -47,3 +47,15 @@ func openIndexer(path string, latestVersion int) (bleve.Index, int, error) {
return index, 0, nil return index, 0, nil
} }
func GuessFuzzinessByKeyword(s string) int {
// according to https://github.com/blevesearch/bleve/issues/1563, the supported max fuzziness is 2
// magic number 4 was chosen to determine the levenshtein distance per each character of a keyword
// BUT, when using CJK (eg: `갃갃갃` `啊啊啊`), it mismatches a lot.
for _, r := range s {
if r >= 128 {
return 0
}
}
return min(2, len(s)/4)
}

View File

@ -35,11 +35,7 @@ func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error {
}) })
} }
const ( const maxBatchSize = 16
maxBatchSize = 16
// fuzzyDenominator determines the levenshtein distance per each character of a keyword
fuzzyDenominator = 4
)
// IndexerData an update to the issue indexer // IndexerData an update to the issue indexer
type IndexerData internal.IndexerData type IndexerData internal.IndexerData
@ -162,7 +158,7 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
if options.Keyword != "" { if options.Keyword != "" {
fuzziness := 0 fuzziness := 0
if options.IsFuzzyKeyword { if options.IsFuzzyKeyword {
fuzziness = len(options.Keyword) / fuzzyDenominator fuzziness = inner_bleve.GuessFuzzinessByKeyword(options.Keyword)
} }
queries = append(queries, bleve.NewDisjunctionQuery([]query.Query{ queries = append(queries, bleve.NewDisjunctionQuery([]query.Query{

View File

@ -0,0 +1,34 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package structs
import (
"time"
)
// ActionTask represents a ActionTask
type ActionTask struct {
ID int64 `json:"id"`
Name string `json:"name"`
HeadBranch string `json:"head_branch"`
HeadSHA string `json:"head_sha"`
RunNumber int64 `json:"run_number"`
Event string `json:"event"`
DisplayTitle string `json:"display_title"`
Status string `json:"status"`
WorkflowID string `json:"workflow_id"`
URL string `json:"url"`
// swagger:strfmt date-time
CreatedAt time.Time `json:"created_at"`
// swagger:strfmt date-time
UpdatedAt time.Time `json:"updated_at"`
// swagger:strfmt date-time
RunStartedAt time.Time `json:"run_started_at"`
}
// ActionTaskResponse returns a ActionTask
type ActionTaskResponse struct {
Entries []*ActionTask `json:"workflow_runs"`
TotalCount int64 `json:"total_count"`
}

View File

@ -3495,6 +3495,7 @@ npm.install=Para instalar o pacote usando o npm, execute o seguinte comando:
npm.install2=ou adicione-o ao ficheiro <code>package.json</code>: npm.install2=ou adicione-o ao ficheiro <code>package.json</code>:
npm.dependencies=Dependências npm.dependencies=Dependências
npm.dependencies.development=Dependências de desenvolvimento npm.dependencies.development=Dependências de desenvolvimento
npm.dependencies.bundle=Dependências agregadas
npm.dependencies.peer=Dependências de pares npm.dependencies.peer=Dependências de pares
npm.dependencies.optional=Dependências opcionais npm.dependencies.optional=Dependências opcionais
npm.details.tag=Etiqueta npm.details.tag=Etiqueta

26
package-lock.json generated
View File

@ -42,7 +42,6 @@
"postcss": "8.4.38", "postcss": "8.4.38",
"postcss-loader": "8.1.1", "postcss-loader": "8.1.1",
"postcss-nesting": "12.1.2", "postcss-nesting": "12.1.2",
"pretty-ms": "9.0.0",
"sortablejs": "1.15.2", "sortablejs": "1.15.2",
"swagger-ui-dist": "5.17.2", "swagger-ui-dist": "5.17.2",
"tailwindcss": "3.4.3", "tailwindcss": "3.4.3",
@ -9170,17 +9169,6 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/parse-ms": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-4.0.0.tgz",
"integrity": "sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/path-exists": { "node_modules/path-exists": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
@ -9772,20 +9760,6 @@
"url": "https://github.com/chalk/ansi-styles?sponsor=1" "url": "https://github.com/chalk/ansi-styles?sponsor=1"
} }
}, },
"node_modules/pretty-ms": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-9.0.0.tgz",
"integrity": "sha512-E9e9HJ9R9NasGOgPaPE8VMeiPKAyWR5jcFpNnwIejslIhWqdqOrb2wShBsncMPUb+BcCd2OPYfh7p2W6oemTng==",
"dependencies": {
"parse-ms": "^4.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/printable-characters": { "node_modules/printable-characters": {
"version": "1.0.42", "version": "1.0.42",
"resolved": "https://registry.npmjs.org/printable-characters/-/printable-characters-1.0.42.tgz", "resolved": "https://registry.npmjs.org/printable-characters/-/printable-characters-1.0.42.tgz",

View File

@ -41,7 +41,6 @@
"postcss": "8.4.38", "postcss": "8.4.38",
"postcss-loader": "8.1.1", "postcss-loader": "8.1.1",
"postcss-nesting": "12.1.2", "postcss-nesting": "12.1.2",
"pretty-ms": "9.0.0",
"sortablejs": "1.15.2", "sortablejs": "1.15.2",
"swagger-ui-dist": "5.17.2", "swagger-ui-dist": "5.17.2",
"tailwindcss": "3.4.3", "tailwindcss": "3.4.3",

View File

@ -1168,6 +1168,9 @@ func Routes() *web.Route {
m.Post("", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, bind(api.CreateTagOption{}), repo.CreateTag) m.Post("", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, bind(api.CreateTagOption{}), repo.CreateTag)
m.Delete("/*", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, repo.DeleteTag) m.Delete("/*", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, repo.DeleteTag)
}, reqRepoReader(unit.TypeCode), context.ReferencesGitRepo(true)) }, reqRepoReader(unit.TypeCode), context.ReferencesGitRepo(true))
m.Group("/actions", func() {
m.Get("/tasks", repo.ListActionTasks)
}, reqRepoReader(unit.TypeActions), context.ReferencesGitRepo(true))
m.Group("/keys", func() { m.Group("/keys", func() {
m.Combo("").Get(repo.ListDeployKeys). m.Combo("").Get(repo.ListDeployKeys).
Post(bind(api.CreateKeyOption{}), repo.CreateDeployKey) Post(bind(api.CreateKeyOption{}), repo.CreateDeployKey)

View File

@ -0,0 +1,80 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package repo
import (
"net/http"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/routers/api/v1/utils"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/convert"
)
// ListActionTasks list all the actions of a repository
func ListActionTasks(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/actions/tasks repository ListActionTasks
// ---
// summary: List a repository's action tasks
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: page
// in: query
// description: page number of results to return (1-based)
// type: integer
// - name: limit
// in: query
// description: page size of results, default maximum page size is 50
// type: integer
// responses:
// "200":
// "$ref": "#/responses/TasksList"
// "400":
// "$ref": "#/responses/error"
// "403":
// "$ref": "#/responses/forbidden"
// "404":
// "$ref": "#/responses/notFound"
// "409":
// "$ref": "#/responses/conflict"
// "422":
// "$ref": "#/responses/validationError"
tasks, total, err := db.FindAndCount[actions_model.ActionTask](ctx, &actions_model.FindTaskOptions{
ListOptions: utils.GetListOptions(ctx),
RepoID: ctx.Repo.Repository.ID,
})
if err != nil {
ctx.Error(http.StatusInternalServerError, "ListActionTasks", err)
return
}
res := new(api.ActionTaskResponse)
res.TotalCount = total
res.Entries = make([]*api.ActionTask, len(tasks))
for i := range tasks {
convertedTask, err := convert.ToActionTask(ctx, tasks[i])
if err != nil {
ctx.Error(http.StatusInternalServerError, "ToActionTask", err)
return
}
res.Entries[i] = convertedTask
}
ctx.JSON(http.StatusOK, &res)
}

View File

@ -415,6 +415,13 @@ type swaggerRepoNewIssuePinsAllowed struct {
Body api.NewIssuePinsAllowed `json:"body"` Body api.NewIssuePinsAllowed `json:"body"`
} }
// TasksList
// swagger:response TasksList
type swaggerRepoTasksList struct {
// in:body
Body api.ActionTaskResponse `json:"body"`
}
// swagger:response Compare // swagger:response Compare
type swaggerCompare struct { type swaggerCompare struct {
// in:body // in:body

View File

@ -117,16 +117,14 @@ func HookPostReceive(ctx *gitea_context.PrivateContext) {
} }
} }
if len(branchesToSync) > 0 { if len(branchesToSync) > 0 {
if gitRepo == nil { var err error
var err error gitRepo, err = gitrepo.OpenRepository(ctx, repo)
gitRepo, err = gitrepo.OpenRepository(ctx, repo) if err != nil {
if err != nil { log.Error("Failed to open repository: %s/%s Error: %v", ownerName, repoName, err)
log.Error("Failed to open repository: %s/%s Error: %v", ownerName, repoName, err) ctx.JSON(http.StatusInternalServerError, private.HookPostReceiveResult{
ctx.JSON(http.StatusInternalServerError, private.HookPostReceiveResult{ Err: fmt.Sprintf("Failed to open repository: %s/%s Error: %v", ownerName, repoName, err),
Err: fmt.Sprintf("Failed to open repository: %s/%s Error: %v", ownerName, repoName, err), })
}) return
return
}
} }
var ( var (

View File

@ -28,6 +28,7 @@ func Search(ctx *context.Context) {
ctx.Data["Language"] = language ctx.Data["Language"] = language
ctx.Data["IsFuzzy"] = isFuzzy ctx.Data["IsFuzzy"] = isFuzzy
ctx.Data["PageIsViewCode"] = true ctx.Data["PageIsViewCode"] = true
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
if keyword == "" { if keyword == "" {
ctx.HTML(http.StatusOK, tplSearch) ctx.HTML(http.StatusOK, tplSearch)
@ -86,7 +87,6 @@ func Search(ctx *context.Context) {
} }
} }
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
ctx.Data["Repo"] = ctx.Repo.Repository ctx.Data["Repo"] = ctx.Repo.Repository
ctx.Data["SearchResults"] = searchResults ctx.Data["SearchResults"] = searchResults
ctx.Data["SearchResultLanguages"] = searchResultLanguages ctx.Data["SearchResultLanguages"] = searchResultLanguages

View File

@ -182,7 +182,7 @@ func createProvider(providerName string, source *Source) (goth.Provider, error)
} }
// always set the name if provider is created so we can support multiple setups of 1 provider // always set the name if provider is created so we can support multiple setups of 1 provider
if err == nil && provider != nil { if provider != nil {
provider.SetName(providerName) provider.SetName(providerName)
} }

View File

@ -11,6 +11,7 @@ import (
"strings" "strings"
"time" "time"
actions_model "code.gitea.io/gitea/models/actions"
asymkey_model "code.gitea.io/gitea/models/asymkey" asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/auth" "code.gitea.io/gitea/models/auth"
git_model "code.gitea.io/gitea/models/git" git_model "code.gitea.io/gitea/models/git"
@ -24,6 +25,7 @@ import (
"code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs" api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/gitdiff" "code.gitea.io/gitea/services/gitdiff"
@ -193,6 +195,31 @@ func ToTag(repo *repo_model.Repository, t *git.Tag) *api.Tag {
} }
} }
// ToActionTask convert a actions_model.ActionTask to an api.ActionTask
func ToActionTask(ctx context.Context, t *actions_model.ActionTask) (*api.ActionTask, error) {
if err := t.LoadAttributes(ctx); err != nil {
return nil, err
}
url := strings.TrimSuffix(setting.AppURL, "/") + t.GetRunLink()
return &api.ActionTask{
ID: t.ID,
Name: t.Job.Name,
HeadBranch: t.Job.Run.PrettyRef(),
HeadSHA: t.Job.CommitSHA,
RunNumber: t.Job.Run.Index,
Event: t.Job.Run.TriggerEvent,
DisplayTitle: t.Job.Run.Title,
Status: t.Status.String(),
WorkflowID: t.Job.Run.WorkflowID,
URL: url,
CreatedAt: t.Created.AsLocalTime(),
UpdatedAt: t.Updated.AsLocalTime(),
RunStartedAt: t.Started.AsLocalTime(),
}, nil
}
// ToVerification convert a git.Commit.Signature to an api.PayloadCommitVerification // ToVerification convert a git.Commit.Signature to an api.PayloadCommitVerification
func ToVerification(ctx context.Context, c *git.Commit) *api.PayloadCommitVerification { func ToVerification(ctx context.Context, c *git.Commit) *api.PayloadCommitVerification {
verif := asymkey_model.ParseCommitWithSignature(ctx, c) verif := asymkey_model.ParseCommitWithSignature(ctx, c)

View File

@ -211,13 +211,11 @@ func ToLabel(label *issues_model.Label, repo *repo_model.Repository, org *user_m
IsArchived: label.IsArchived(), IsArchived: label.IsArchived(),
} }
labelBelongsToRepo := label.BelongsToRepo()
// calculate URL // calculate URL
if label.BelongsToRepo() && repo != nil { if labelBelongsToRepo && repo != nil {
if repo != nil { result.URL = fmt.Sprintf("%s/labels/%d", repo.APIURL(), label.ID)
result.URL = fmt.Sprintf("%s/labels/%d", repo.APIURL(), label.ID)
} else {
log.Error("ToLabel did not get repo to calculate url for label with id '%d'", label.ID)
}
} else { // BelongsToOrg } else { // BelongsToOrg
if org != nil { if org != nil {
result.URL = fmt.Sprintf("%sapi/v1/orgs/%s/labels/%d", setting.AppURL, url.PathEscape(org.Name), label.ID) result.URL = fmt.Sprintf("%sapi/v1/orgs/%s/labels/%d", setting.AppURL, url.PathEscape(org.Name), label.ID)
@ -226,6 +224,10 @@ func ToLabel(label *issues_model.Label, repo *repo_model.Repository, org *user_m
} }
} }
if labelBelongsToRepo && repo == nil {
log.Error("ToLabel did not get repo to calculate url for label with id '%d'", label.ID)
}
return result return result
} }

View File

@ -12,6 +12,14 @@
<!-- mobile right menu, it must be here because in mobile view, each item is a flex column, the first item is a full row column --> <!-- mobile right menu, it must be here because in mobile view, each item is a flex column, the first item is a full row column -->
<div class="ui secondary menu item navbar-mobile-right only-mobile"> <div class="ui secondary menu item navbar-mobile-right only-mobile">
{{if and .IsSigned EnableTimetracking .ActiveStopwatch}}
<a id="mobile-stopwatch-icon" class="active-stopwatch item tw-mx-0" href="{{.ActiveStopwatch.IssueLink}}" title="{{ctx.Locale.Tr "active_stopwatch"}}" data-seconds="{{.ActiveStopwatch.Seconds}}">
<div class="tw-relative">
{{svg "octicon-stopwatch"}}
<span class="header-stopwatch-dot"></span>
</div>
</a>
{{end}}
{{if .IsSigned}} {{if .IsSigned}}
<a id="mobile-notifications-icon" class="item tw-w-auto tw-p-2" href="{{AppSubUrl}}/notifications" data-tooltip-content="{{ctx.Locale.Tr "notifications"}}" aria-label="{{ctx.Locale.Tr "notifications"}}"> <a id="mobile-notifications-icon" class="item tw-w-auto tw-p-2" href="{{AppSubUrl}}/notifications" data-tooltip-content="{{ctx.Locale.Tr "notifications"}}" aria-label="{{ctx.Locale.Tr "notifications"}}">
<div class="tw-relative"> <div class="tw-relative">
@ -74,41 +82,13 @@
</div><!-- end content avatar menu --> </div><!-- end content avatar menu -->
</div><!-- end dropdown avatar menu --> </div><!-- end dropdown avatar menu -->
{{else if .IsSigned}} {{else if .IsSigned}}
{{if EnableTimetracking}} {{if and EnableTimetracking .ActiveStopwatch}}
<a class="active-stopwatch-trigger item tw-mx-0{{if not .ActiveStopwatch}} tw-hidden{{end}}" href="{{.ActiveStopwatch.IssueLink}}" title="{{ctx.Locale.Tr "active_stopwatch"}}"> <a class="item not-mobile active-stopwatch tw-mx-0" href="{{.ActiveStopwatch.IssueLink}}" title="{{ctx.Locale.Tr "active_stopwatch"}}" data-seconds="{{.ActiveStopwatch.Seconds}}">
<div class="tw-relative"> <div class="tw-relative">
{{svg "octicon-stopwatch"}} {{svg "octicon-stopwatch"}}
<span class="header-stopwatch-dot"></span> <span class="header-stopwatch-dot"></span>
</div> </div>
<span class="only-mobile tw-ml-2">{{ctx.Locale.Tr "active_stopwatch"}}</span>
</a> </a>
<div class="active-stopwatch-popup item tippy-target tw-p-2">
<div class="tw-flex tw-items-center">
<a class="stopwatch-link tw-flex tw-items-center" href="{{.ActiveStopwatch.IssueLink}}">
{{svg "octicon-issue-opened" 16 "tw-mr-2"}}
<span class="stopwatch-issue">{{.ActiveStopwatch.RepoSlug}}#{{.ActiveStopwatch.IssueIndex}}</span>
<span class="ui primary label stopwatch-time tw-my-0 tw-mx-4" data-seconds="{{.ActiveStopwatch.Seconds}}">
{{if .ActiveStopwatch}}{{Sec2Time .ActiveStopwatch.Seconds}}{{end}}
</span>
</a>
<form class="stopwatch-commit" method="post" action="{{.ActiveStopwatch.IssueLink}}/times/stopwatch/toggle">
{{.CsrfTokenHtml}}
<button
type="submit"
class="ui button mini compact basic icon"
data-tooltip-content="{{ctx.Locale.Tr "repo.issues.stop_tracking"}}"
>{{svg "octicon-square-fill"}}</button>
</form>
<form class="stopwatch-cancel" method="post" action="{{.ActiveStopwatch.IssueLink}}/times/stopwatch/cancel">
{{.CsrfTokenHtml}}
<button
type="submit"
class="ui button mini compact basic icon"
data-tooltip-content="{{ctx.Locale.Tr "repo.issues.cancel_tracking"}}"
>{{svg "octicon-trash"}}</button>
</form>
</div>
</div>
{{end}} {{end}}
<a class="item not-mobile tw-mx-0" href="{{AppSubUrl}}/notifications" data-tooltip-content="{{ctx.Locale.Tr "notifications"}}" aria-label="{{ctx.Locale.Tr "notifications"}}"> <a class="item not-mobile tw-mx-0" href="{{AppSubUrl}}/notifications" data-tooltip-content="{{ctx.Locale.Tr "notifications"}}" aria-label="{{ctx.Locale.Tr "notifications"}}">
@ -202,4 +182,33 @@
</a> </a>
{{end}} {{end}}
</div><!-- end full right menu --> </div><!-- end full right menu -->
{{if and .IsSigned EnableTimetracking .ActiveStopwatch}}
<div class="active-stopwatch-popup tippy-target">
<div class="tw-flex tw-items-center tw-gap-2 tw-p-3">
<a class="stopwatch-link tw-flex tw-items-center tw-gap-2 muted" href="{{.ActiveStopwatch.IssueLink}}">
{{svg "octicon-issue-opened" 16}}
<span class="stopwatch-issue">{{.ActiveStopwatch.RepoSlug}}#{{.ActiveStopwatch.IssueIndex}}</span>
</a>
<div class="tw-flex tw-gap-1">
<form class="stopwatch-commit" method="post" action="{{.ActiveStopwatch.IssueLink}}/times/stopwatch/toggle">
{{.CsrfTokenHtml}}
<button
type="submit"
class="ui button mini compact basic icon tw-mr-0"
data-tooltip-content="{{ctx.Locale.Tr "repo.issues.stop_tracking"}}"
>{{svg "octicon-square-fill"}}</button>
</form>
<form class="stopwatch-cancel" method="post" action="{{.ActiveStopwatch.IssueLink}}/times/stopwatch/cancel">
{{.CsrfTokenHtml}}
<button
type="submit"
class="ui button mini compact basic icon tw-mr-0"
data-tooltip-content="{{ctx.Locale.Tr "repo.issues.cancel_tracking"}}"
>{{svg "octicon-trash"}}</button>
</form>
</div>
</div>
</div>
{{end}}
</nav> </nav>

View File

@ -3997,6 +3997,66 @@
} }
} }
}, },
"/repos/{owner}/{repo}/actions/tasks": {
"get": {
"produces": [
"application/json"
],
"tags": [
"repository"
],
"summary": "List a repository's action tasks",
"operationId": "ListActionTasks",
"parameters": [
{
"type": "string",
"description": "owner of the repo",
"name": "owner",
"in": "path",
"required": true
},
{
"type": "string",
"description": "name of the repo",
"name": "repo",
"in": "path",
"required": true
},
{
"type": "integer",
"description": "page number of results to return (1-based)",
"name": "page",
"in": "query"
},
{
"type": "integer",
"description": "page size of results, default maximum page size is 50",
"name": "limit",
"in": "query"
}
],
"responses": {
"200": {
"$ref": "#/responses/TasksList"
},
"400": {
"$ref": "#/responses/error"
},
"403": {
"$ref": "#/responses/forbidden"
},
"404": {
"$ref": "#/responses/notFound"
},
"409": {
"$ref": "#/responses/conflict"
},
"422": {
"$ref": "#/responses/validationError"
}
}
}
},
"/repos/{owner}/{repo}/actions/variables": { "/repos/{owner}/{repo}/actions/variables": {
"get": { "get": {
"produces": [ "produces": [
@ -17953,6 +18013,89 @@
}, },
"x-go-package": "code.gitea.io/gitea/modules/structs" "x-go-package": "code.gitea.io/gitea/modules/structs"
}, },
"ActionTask": {
"description": "ActionTask represents a ActionTask",
"type": "object",
"properties": {
"created_at": {
"type": "string",
"format": "date-time",
"x-go-name": "CreatedAt"
},
"display_title": {
"type": "string",
"x-go-name": "DisplayTitle"
},
"event": {
"type": "string",
"x-go-name": "Event"
},
"head_branch": {
"type": "string",
"x-go-name": "HeadBranch"
},
"head_sha": {
"type": "string",
"x-go-name": "HeadSHA"
},
"id": {
"type": "integer",
"format": "int64",
"x-go-name": "ID"
},
"name": {
"type": "string",
"x-go-name": "Name"
},
"run_number": {
"type": "integer",
"format": "int64",
"x-go-name": "RunNumber"
},
"run_started_at": {
"type": "string",
"format": "date-time",
"x-go-name": "RunStartedAt"
},
"status": {
"type": "string",
"x-go-name": "Status"
},
"updated_at": {
"type": "string",
"format": "date-time",
"x-go-name": "UpdatedAt"
},
"url": {
"type": "string",
"x-go-name": "URL"
},
"workflow_id": {
"type": "string",
"x-go-name": "WorkflowID"
}
},
"x-go-package": "code.gitea.io/gitea/modules/structs"
},
"ActionTaskResponse": {
"description": "ActionTaskResponse returns a ActionTask",
"type": "object",
"properties": {
"total_count": {
"type": "integer",
"format": "int64",
"x-go-name": "TotalCount"
},
"workflow_runs": {
"type": "array",
"items": {
"$ref": "#/definitions/ActionTask"
},
"x-go-name": "Entries"
}
},
"x-go-package": "code.gitea.io/gitea/modules/structs"
},
"ActionVariable": { "ActionVariable": {
"description": "ActionVariable return value of the query API", "description": "ActionVariable return value of the query API",
"type": "object", "type": "object",
@ -25409,6 +25552,12 @@
} }
} }
}, },
"TasksList": {
"description": "TasksList",
"schema": {
"$ref": "#/definitions/ActionTaskResponse"
}
},
"Team": { "Team": {
"description": "Team", "description": "Team",
"schema": { "schema": {

View File

@ -684,7 +684,9 @@ func TestAPIRepoGetReviewers(t *testing.T) {
resp := MakeRequest(t, req, http.StatusOK) resp := MakeRequest(t, req, http.StatusOK)
var reviewers []*api.User var reviewers []*api.User
DecodeJSON(t, resp, &reviewers) DecodeJSON(t, resp, &reviewers)
assert.Len(t, reviewers, 4) if assert.Len(t, reviewers, 3) {
assert.ElementsMatch(t, []int64{1, 4, 11}, []int64{reviewers[0].ID, reviewers[1].ID, reviewers[2].ID})
}
} }
func TestAPIRepoGetAssignees(t *testing.T) { func TestAPIRepoGetAssignees(t *testing.T) {

View File

@ -81,7 +81,7 @@ func testGit(t *testing.T, u *url.URL) {
rawTest(t, &httpContext, little, big, littleLFS, bigLFS) rawTest(t, &httpContext, little, big, littleLFS, bigLFS)
mediaTest(t, &httpContext, little, big, littleLFS, bigLFS) mediaTest(t, &httpContext, little, big, littleLFS, bigLFS)
t.Run("CreateAgitFlowPull", doCreateAgitFlowPull(dstPath, &httpContext, "master", "test/head")) t.Run("CreateAgitFlowPull", doCreateAgitFlowPull(dstPath, &httpContext, "test/head"))
t.Run("BranchProtectMerge", doBranchProtectPRMerge(&httpContext, dstPath)) t.Run("BranchProtectMerge", doBranchProtectPRMerge(&httpContext, dstPath))
t.Run("AutoMerge", doAutoPRMerge(&httpContext, dstPath)) t.Run("AutoMerge", doAutoPRMerge(&httpContext, dstPath))
t.Run("CreatePRAndSetManuallyMerged", doCreatePRAndSetManuallyMerged(httpContext, httpContext, dstPath, "master", "test-manually-merge")) t.Run("CreatePRAndSetManuallyMerged", doCreatePRAndSetManuallyMerged(httpContext, httpContext, dstPath, "master", "test-manually-merge"))
@ -122,7 +122,7 @@ func testGit(t *testing.T, u *url.URL) {
rawTest(t, &sshContext, little, big, littleLFS, bigLFS) rawTest(t, &sshContext, little, big, littleLFS, bigLFS)
mediaTest(t, &sshContext, little, big, littleLFS, bigLFS) mediaTest(t, &sshContext, little, big, littleLFS, bigLFS)
t.Run("CreateAgitFlowPull", doCreateAgitFlowPull(dstPath, &sshContext, "master", "test/head2")) t.Run("CreateAgitFlowPull", doCreateAgitFlowPull(dstPath, &sshContext, "test/head2"))
t.Run("BranchProtectMerge", doBranchProtectPRMerge(&sshContext, dstPath)) t.Run("BranchProtectMerge", doBranchProtectPRMerge(&sshContext, dstPath))
t.Run("MergeFork", func(t *testing.T) { t.Run("MergeFork", func(t *testing.T) {
defer tests.PrintCurrentTest(t)() defer tests.PrintCurrentTest(t)()
@ -329,9 +329,6 @@ func generateCommitWithNewData(size int, repoPath, email, fullName, prefix strin
} }
written += n written += n
} }
if err != nil {
return "", err
}
// Commit // Commit
// Now here we should explicitly allow lfs filters to run // Now here we should explicitly allow lfs filters to run
@ -693,7 +690,7 @@ func doAutoPRMerge(baseCtx *APITestContext, dstPath string) func(t *testing.T) {
} }
} }
func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headBranch string) func(t *testing.T) { func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, headBranch string) func(t *testing.T) {
return func(t *testing.T) { return func(t *testing.T) {
defer tests.PrintCurrentTest(t)() defer tests.PrintCurrentTest(t)()

View File

@ -103,19 +103,12 @@
width: 50%; width: 50%;
min-height: 48px; min-height: 48px;
} }
#navbar #mobile-stopwatch-icon,
#navbar #mobile-notifications-icon { #navbar #mobile-notifications-icon {
margin-right: 6px !important; margin-right: 6px !important;
} }
} }
#navbar a.item .notification_count {
color: var(--color-nav-bg);
padding: 0 3.75px;
font-size: 12px;
line-height: 12px;
font-weight: var(--font-weight-bold);
}
#navbar a.item:hover .notification_count, #navbar a.item:hover .notification_count,
#navbar a.item:hover .header-stopwatch-dot { #navbar a.item:hover .header-stopwatch-dot {
border-color: var(--color-nav-hover-bg); border-color: var(--color-nav-hover-bg);
@ -123,6 +116,11 @@
#navbar a.item .notification_count, #navbar a.item .notification_count,
#navbar a.item .header-stopwatch-dot { #navbar a.item .header-stopwatch-dot {
color: var(--color-nav-bg);
padding: 0 3.75px;
font-size: 12px;
line-height: 12px;
font-weight: var(--font-weight-bold);
background: var(--color-primary); background: var(--color-primary);
border: 2px solid var(--color-nav-bg); border: 2px solid var(--color-nav-bg);
position: absolute; position: absolute;
@ -135,6 +133,8 @@
align-items: center; align-items: center;
justify-content: center; justify-content: center;
z-index: 1; /* prevent menu button background from overlaying icon */ z-index: 1; /* prevent menu button background from overlaying icon */
user-select: none;
white-space: nowrap;
} }
.secondary-nav { .secondary-nav {

View File

@ -16,8 +16,8 @@
.tippy-box { .tippy-box {
position: relative; position: relative;
background-color: var(--color-body); background-color: var(--color-menu);
color: var(--color-secondary-dark-6); color: var(--color-text);
border: 1px solid var(--color-secondary); border: 1px solid var(--color-secondary);
border-radius: var(--border-radius); border-radius: var(--border-radius);
font-size: 1rem; font-size: 1rem;
@ -25,7 +25,6 @@
.tippy-content { .tippy-content {
position: relative; position: relative;
padding: 1rem; /* if you need different padding, use different data-theme */
z-index: 1; z-index: 1;
} }
@ -166,5 +165,5 @@
} }
.tippy-svg-arrow-inner { .tippy-svg-arrow-inner {
fill: var(--color-body); fill: var(--color-menu);
} }

View File

@ -251,9 +251,9 @@ const sfc = {
this.repos = json.data.map((webSearchRepo) => { this.repos = json.data.map((webSearchRepo) => {
return { return {
...webSearchRepo.repository, ...webSearchRepo.repository,
latest_commit_status_state: webSearchRepo.latest_commit_status.State, latest_commit_status_state: webSearchRepo.latest_commit_status?.State, // if latest_commit_status is null, it means there is no commit status
latest_commit_status_state_link: webSearchRepo.latest_commit_status?.TargetURL,
locale_latest_commit_status_state: webSearchRepo.locale_latest_commit_status, locale_latest_commit_status_state: webSearchRepo.locale_latest_commit_status,
latest_commit_status_state_link: webSearchRepo.latest_commit_status.TargetURL,
}; };
}); });
const count = response.headers.get('X-Total-Count'); const count = response.headers.get('X-Total-Count');

View File

@ -18,6 +18,7 @@ export function attachRefIssueContextPopup(refIssues) {
if (!owner) return; if (!owner) return;
const el = document.createElement('div'); const el = document.createElement('div');
el.classList.add('tw-p-3');
refIssue.parentNode.insertBefore(el, refIssue.nextSibling); refIssue.parentNode.insertBefore(el, refIssue.nextSibling);
const view = createApp(ContextPopup); const view = createApp(ContextPopup);
@ -30,6 +31,7 @@ export function attachRefIssueContextPopup(refIssues) {
} }
createTippy(refIssue, { createTippy(refIssue, {
theme: 'default',
content: el, content: el,
placement: 'top-start', placement: 'top-start',
interactive: true, interactive: true,

View File

@ -1,6 +1,7 @@
import $ from 'jquery'; import $ from 'jquery';
import {GET} from '../modules/fetch.js'; import {GET} from '../modules/fetch.js';
import {toggleElem} from '../utils/dom.js'; import {toggleElem} from '../utils/dom.js';
import {logoutFromWorker} from '../modules/worker.js';
const {appSubUrl, notificationSettings, assetVersionEncoded} = window.config; const {appSubUrl, notificationSettings, assetVersionEncoded} = window.config;
let notificationSequenceNumber = 0; let notificationSequenceNumber = 0;
@ -95,7 +96,7 @@ export function initNotificationCount() {
type: 'close', type: 'close',
}); });
worker.port.close(); worker.port.close();
window.location.href = `${appSubUrl}/`; logoutFromWorker();
} else if (event.data.type === 'close') { } else if (event.data.type === 'close') {
worker.port.postMessage({ worker.port.postMessage({
type: 'close', type: 'close',

View File

@ -113,6 +113,7 @@ function showLineButton() {
btn.closest('.code-view').append(menu.cloneNode(true)); btn.closest('.code-view').append(menu.cloneNode(true));
createTippy(btn, { createTippy(btn, {
theme: 'menu',
trigger: 'click', trigger: 'click',
hideOnClick: true, hideOnClick: true,
content: menu, content: menu,

View File

@ -502,6 +502,7 @@ export function initRepoPullRequestReview() {
if ($reviewBtn.length && $panel.length) { if ($reviewBtn.length && $panel.length) {
const tippy = createTippy($reviewBtn[0], { const tippy = createTippy($reviewBtn[0], {
content: $panel[0], content: $panel[0],
theme: 'default',
placement: 'bottom', placement: 'bottom',
trigger: 'click', trigger: 'click',
maxWidth: 'none', maxWidth: 'none',

View File

@ -1,7 +1,7 @@
import prettyMilliseconds from 'pretty-ms';
import {createTippy} from '../modules/tippy.js'; import {createTippy} from '../modules/tippy.js';
import {GET} from '../modules/fetch.js'; import {GET} from '../modules/fetch.js';
import {hideElem, showElem} from '../utils/dom.js'; import {hideElem, showElem} from '../utils/dom.js';
import {logoutFromWorker} from '../modules/worker.js';
const {appSubUrl, notificationSettings, enableTimeTracking, assetVersionEncoded} = window.config; const {appSubUrl, notificationSettings, enableTimeTracking, assetVersionEncoded} = window.config;
@ -10,28 +10,31 @@ export function initStopwatch() {
return; return;
} }
const stopwatchEl = document.querySelector('.active-stopwatch-trigger'); const stopwatchEls = document.querySelectorAll('.active-stopwatch');
const stopwatchPopup = document.querySelector('.active-stopwatch-popup'); const stopwatchPopup = document.querySelector('.active-stopwatch-popup');
if (!stopwatchEl || !stopwatchPopup) { if (!stopwatchEls.length || !stopwatchPopup) {
return; return;
} }
stopwatchEl.removeAttribute('href'); // intended for noscript mode only
createTippy(stopwatchEl, {
content: stopwatchPopup,
placement: 'bottom-end',
trigger: 'click',
maxWidth: 'none',
interactive: true,
hideOnClick: true,
});
// global stop watch (in the head_navbar), it should always work in any case either the EventSource or the PeriodicPoller is used. // global stop watch (in the head_navbar), it should always work in any case either the EventSource or the PeriodicPoller is used.
const currSeconds = document.querySelector('.stopwatch-time')?.getAttribute('data-seconds'); const seconds = stopwatchEls[0]?.getAttribute('data-seconds');
if (currSeconds) { if (seconds) {
updateStopwatchTime(currSeconds); updateStopwatchTime(parseInt(seconds));
}
for (const stopwatchEl of stopwatchEls) {
stopwatchEl.removeAttribute('href'); // intended for noscript mode only
createTippy(stopwatchEl, {
content: stopwatchPopup.cloneNode(true),
placement: 'bottom-end',
trigger: 'click',
maxWidth: 'none',
interactive: true,
hideOnClick: true,
theme: 'default',
});
} }
let usingPeriodicPoller = false; let usingPeriodicPoller = false;
@ -75,7 +78,7 @@ export function initStopwatch() {
type: 'close', type: 'close',
}); });
worker.port.close(); worker.port.close();
window.location.href = `${appSubUrl}/`; logoutFromWorker();
} else if (event.data.type === 'close') { } else if (event.data.type === 'close') {
worker.port.postMessage({ worker.port.postMessage({
type: 'close', type: 'close',
@ -124,10 +127,9 @@ async function updateStopwatch() {
function updateStopwatchData(data) { function updateStopwatchData(data) {
const watch = data[0]; const watch = data[0];
const btnEl = document.querySelector('.active-stopwatch-trigger'); const btnEls = document.querySelectorAll('.active-stopwatch');
if (!watch) { if (!watch) {
clearStopwatchTimer(); hideElem(btnEls);
hideElem(btnEl);
} else { } else {
const {repo_owner_name, repo_name, issue_index, seconds} = watch; const {repo_owner_name, repo_name, issue_index, seconds} = watch;
const issueUrl = `${appSubUrl}/${repo_owner_name}/${repo_name}/issues/${issue_index}`; const issueUrl = `${appSubUrl}/${repo_owner_name}/${repo_name}/issues/${issue_index}`;
@ -137,31 +139,28 @@ function updateStopwatchData(data) {
const stopwatchIssue = document.querySelector('.stopwatch-issue'); const stopwatchIssue = document.querySelector('.stopwatch-issue');
if (stopwatchIssue) stopwatchIssue.textContent = `${repo_owner_name}/${repo_name}#${issue_index}`; if (stopwatchIssue) stopwatchIssue.textContent = `${repo_owner_name}/${repo_name}#${issue_index}`;
updateStopwatchTime(seconds); updateStopwatchTime(seconds);
showElem(btnEl); showElem(btnEls);
} }
return Boolean(data.length); return Boolean(data.length);
} }
let updateTimeIntervalId = null; // holds setInterval id when active // TODO: This flickers on page load, we could avoid this by making a custom
function clearStopwatchTimer() { // element to render time periods. Feeding a datetime in backend does not work
if (updateTimeIntervalId !== null) { // when time zone between server and client differs.
clearInterval(updateTimeIntervalId); function updateStopwatchTime(seconds) {
updateTimeIntervalId = null; if (!Number.isFinite(seconds)) return;
const datetime = (new Date(Date.now() - seconds * 1000)).toISOString();
for (const parent of document.querySelectorAll('.header-stopwatch-dot')) {
const existing = parent.querySelector(':scope > relative-time');
if (existing) {
existing.setAttribute('datetime', datetime);
} else {
const el = document.createElement('relative-time');
el.setAttribute('format', 'micro');
el.setAttribute('datetime', datetime);
el.setAttribute('lang', 'en-US');
el.setAttribute('title', ''); // make <relative-time> show no title and therefor no tooltip
parent.append(el);
}
} }
} }
function updateStopwatchTime(seconds) {
const secs = parseInt(seconds);
if (!Number.isFinite(secs)) return;
clearStopwatchTimer();
const stopwatch = document.querySelector('.stopwatch-time');
// TODO: replace with <relative-time> similar to how system status up time is shown
const start = Date.now();
const updateUi = () => {
const delta = Date.now() - start;
const dur = prettyMilliseconds(secs * 1000 + delta, {compact: true});
if (stopwatch) stopwatch.textContent = dur;
};
updateUi();
updateTimeIntervalId = setInterval(updateUi, 1000);
}

View File

@ -37,8 +37,10 @@ export function createTippy(target, opts = {}) {
return onShow?.(instance); return onShow?.(instance);
}, },
arrow: arrow || (theme === 'bare' ? false : arrowSvg), arrow: arrow || (theme === 'bare' ? false : arrowSvg),
role: role || 'menu', // HTML role attribute // HTML role attribute, ideally the default role would be "popover" but it does not exist
theme: theme || role || 'menu', // CSS theme, either "tooltip", "menu", "box-with-header" or "bare" role: role || 'menu',
// CSS theme, either "default", "tooltip", "menu", "box-with-header" or "bare"
theme: theme || role || 'default',
plugins: [followCursor], plugins: [followCursor],
...other, ...other,
}); });

View File

@ -0,0 +1,9 @@
import {sleep} from '../utils.js';
const {appSubUrl} = window.config;
export async function logoutFromWorker() {
// wait for a while because other requests (eg: logout) may be in the flight
await sleep(5000);
window.location.href = `${appSubUrl}/`;
}

View File

@ -131,6 +131,7 @@ window.customElements.define('overflow-menu', class extends HTMLElement {
interactive: true, interactive: true,
placement: 'bottom-end', placement: 'bottom-end',
role: 'menu', role: 'menu',
theme: 'menu',
content: this.tippyContent, content: this.tippyContent,
onShow: () => { // FIXME: onShown doesn't work (never be called) onShow: () => { // FIXME: onShown doesn't work (never be called)
setTimeout(() => { setTimeout(() => {