mirror of
https://github.com/go-gitea/gitea.git
synced 2025-07-21 17:41:16 +02:00
Compare commits
19 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
5e62137fe3 | ||
|
6a081f95c0 | ||
|
c3c246cffc | ||
|
85be939c2a | ||
|
a680c911e4 | ||
|
d9c18cbba0 | ||
|
3daedb3877 | ||
|
2bf987229a | ||
|
f984a7e6c6 | ||
|
c96da610c2 | ||
|
e46dbec294 | ||
|
8f64017058 | ||
|
d737eaa63a | ||
|
058ee52333 | ||
|
47b1fc5149 | ||
|
20c2bdf86b | ||
|
df13fc8818 | ||
|
445992d929 | ||
|
d059156c3a |
@@ -25,7 +25,7 @@ globals:
|
||||
Tribute: false
|
||||
|
||||
overrides:
|
||||
- files: ["web_src/**/*.worker.js", "web_src/js/serviceworker.js"]
|
||||
- files: ["web_src/**/*worker.js"]
|
||||
env:
|
||||
worker: true
|
||||
rules:
|
||||
|
20
CHANGELOG.md
20
CHANGELOG.md
@@ -4,6 +4,26 @@ This changelog goes through all the changes that have been made in each release
|
||||
without substantial changes to our git log; to see the highlights of what has
|
||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||
|
||||
## [1.12.2](https://github.com/go-gitea/gitea/releases/tag/v1.12.2) - 2020-07-11
|
||||
|
||||
* BUGFIXES
|
||||
* When deleting repository decrese user repository count in cache (#11954) (#12188)
|
||||
* Return full commit message instead of summary in commits API (#12186) (#12187)
|
||||
* Properly set HEAD when a repo is created with a default branch that is not named 'master' (#12135) (#12182)
|
||||
* Ensure GPG Subkeys are verified (#12155) (#12168)
|
||||
* Fix failing to cache last commit with key being to long (#12151) (#12161)
|
||||
* Multiple small admin dashboard fixes (#12153) (#12156)
|
||||
* Remove spurious logging of " Delete all repository archives" at startup (#12139) (#12148)
|
||||
* Fix repository setup instructions when default branch is not named 'master' (#12122) (#12147)
|
||||
* Move EventSource to SharedWorker (#12095) (#12130)
|
||||
* Fix ui bug in wiki commit page (#12089) (#12125)
|
||||
* Fix gitgraph branch continues after merge (#12044) (#12105)
|
||||
* Set the base url when migrating from Gitlab using access token or username without password (#11852) (#12104)
|
||||
* Ensure BlameReaders close at end of request (#12102) (#12103)
|
||||
* Fix panic when adding review comment (#12058)
|
||||
* ENHANCEMENTS
|
||||
* Disable dropzone's timeout for file uploads (#12024) (#12032)
|
||||
|
||||
## [1.12.1](https://github.com/go-gitea/gitea/releases/tag/v1.12.1) - 2020-06-21
|
||||
|
||||
* BUGFIXES
|
||||
|
12
Makefile
12
Makefile
@@ -253,7 +253,7 @@ swagger-validate:
|
||||
.PHONY: errcheck
|
||||
errcheck:
|
||||
@hash errcheck > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||
$(GO) get -u github.com/kisielk/errcheck; \
|
||||
GO111MODULE=off $(GO) get -u github.com/kisielk/errcheck; \
|
||||
fi
|
||||
errcheck $(GO_PACKAGES)
|
||||
|
||||
@@ -264,14 +264,14 @@ revive:
|
||||
.PHONY: misspell-check
|
||||
misspell-check:
|
||||
@hash misspell > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||
$(GO) get -u github.com/client9/misspell/cmd/misspell; \
|
||||
GO111MODULE=off $(GO) get -u github.com/client9/misspell/cmd/misspell; \
|
||||
fi
|
||||
misspell -error -i unknwon,destory $(GO_SOURCES_OWN)
|
||||
|
||||
.PHONY: misspell
|
||||
misspell:
|
||||
@hash misspell > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||
$(GO) get -u github.com/client9/misspell/cmd/misspell; \
|
||||
GO111MODULE=off $(GO) get -u github.com/client9/misspell/cmd/misspell; \
|
||||
fi
|
||||
misspell -w -i unknwon $(GO_SOURCES_OWN)
|
||||
|
||||
@@ -529,7 +529,7 @@ $(DIST_DIRS):
|
||||
.PHONY: release-windows
|
||||
release-windows: | $(DIST_DIRS)
|
||||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||
$(GO) get -u src.techknowlogick.com/xgo; \
|
||||
GO111MODULE=off $(GO) get -u src.techknowlogick.com/xgo; \
|
||||
fi
|
||||
CGO_CFLAGS="$(CGO_CFLAGS)" GO111MODULE=off xgo -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) .
|
||||
ifeq ($(CI),drone)
|
||||
@@ -539,7 +539,7 @@ endif
|
||||
.PHONY: release-linux
|
||||
release-linux: | $(DIST_DIRS)
|
||||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||
$(GO) get -u src.techknowlogick.com/xgo; \
|
||||
GO111MODULE=off $(GO) get -u src.techknowlogick.com/xgo; \
|
||||
fi
|
||||
CGO_CFLAGS="$(CGO_CFLAGS)" GO111MODULE=off xgo -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64,linux/mips64le,linux/mips,linux/mipsle' -out gitea-$(VERSION) .
|
||||
ifeq ($(CI),drone)
|
||||
@@ -549,7 +549,7 @@ endif
|
||||
.PHONY: release-darwin
|
||||
release-darwin: | $(DIST_DIRS)
|
||||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||
$(GO) get -u src.techknowlogick.com/xgo; \
|
||||
GO111MODULE=off $(GO) get -u src.techknowlogick.com/xgo; \
|
||||
fi
|
||||
CGO_CFLAGS="$(CGO_CFLAGS)" GO111MODULE=off xgo -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin/*' -out gitea-$(VERSION) .
|
||||
ifeq ($(CI),drone)
|
||||
|
@@ -120,6 +120,12 @@ var checklist = []check{
|
||||
isDefault: false,
|
||||
f: runDoctorPRMergeBase,
|
||||
},
|
||||
{
|
||||
title: "Recalculate Stars number for all user",
|
||||
name: "recalculate_stars_number",
|
||||
isDefault: false,
|
||||
f: runDoctorUserStarNum,
|
||||
},
|
||||
// more checks please append here
|
||||
}
|
||||
|
||||
@@ -494,6 +500,10 @@ func runDoctorPRMergeBase(ctx *cli.Context) ([]string, error) {
|
||||
return results, err
|
||||
}
|
||||
|
||||
func runDoctorUserStarNum(ctx *cli.Context) ([]string, error) {
|
||||
return nil, models.DoctorUserStarNum()
|
||||
}
|
||||
|
||||
func runDoctorScriptType(ctx *cli.Context) ([]string, error) {
|
||||
path, err := exec.LookPath(setting.ScriptType)
|
||||
if err != nil {
|
||||
|
@@ -211,7 +211,7 @@ MIN_TIMEOUT = 10s
|
||||
MAX_TIMEOUT = 60s
|
||||
TIMEOUT_STEP = 10s
|
||||
; This setting determines how often the db is queried to get the latest notification counts.
|
||||
; If the browser client supports EventSource, it will be used in preference to polling notification.
|
||||
; If the browser client supports EventSource and SharedWorker, a SharedWorker will be used in preference to polling notification. Set to -1 to disable the EventSource
|
||||
EVENT_SOURCE_UPDATE_TIME = 10s
|
||||
|
||||
[markdown]
|
||||
|
@@ -148,8 +148,7 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`.
|
||||
- `MIN_TIMEOUT`: **10s**: These options control how often notification endpoint is polled to update the notification count. On page load the notification count will be checked after `MIN_TIMEOUT`. The timeout will increase to `MAX_TIMEOUT` by `TIMEOUT_STEP` if the notification count is unchanged. Set MIN_TIMEOUT to 0 to turn off.
|
||||
- `MAX_TIMEOUT`: **60s**.
|
||||
- `TIMEOUT_STEP`: **10s**.
|
||||
- `EVENT_SOURCE_UPDATE_TIME`: **10s**: This setting determines how often the database is queried to update notification counts. If the browser client supports `EventSource`, it will be used in preference to polling notification endpoint.
|
||||
|
||||
- `EVENT_SOURCE_UPDATE_TIME`: **10s**: This setting determines how often the database is queried to update notification counts. If the browser client supports `EventSource` and `SharedWorker`, a `SharedWorker` will be used in preference to polling notification endpoint. Set to **-1** to disable the `EventSource`.
|
||||
|
||||
## Markdown (`markdown`)
|
||||
|
||||
|
@@ -509,6 +509,18 @@ func hashAndVerifyForKeyID(sig *packet.Signature, payload string, committer *Use
|
||||
return nil
|
||||
}
|
||||
for _, key := range keys {
|
||||
var primaryKeys []*GPGKey
|
||||
if key.PrimaryKeyID != "" {
|
||||
primaryKeys, err = GetGPGKeysByKeyID(key.PrimaryKeyID)
|
||||
if err != nil {
|
||||
log.Error("GetGPGKeysByKeyID: %v", err)
|
||||
return &CommitVerification{
|
||||
CommittingUser: committer,
|
||||
Verified: false,
|
||||
Reason: "gpg.error.failed_retrieval_gpg_keys",
|
||||
}
|
||||
}
|
||||
}
|
||||
activated := false
|
||||
if len(email) != 0 {
|
||||
for _, e := range key.Emails {
|
||||
@@ -518,6 +530,20 @@ func hashAndVerifyForKeyID(sig *packet.Signature, payload string, committer *Use
|
||||
break
|
||||
}
|
||||
}
|
||||
if !activated {
|
||||
for _, pkey := range primaryKeys {
|
||||
for _, e := range pkey.Emails {
|
||||
if e.IsActivated && strings.EqualFold(e.Email, email) {
|
||||
activated = true
|
||||
email = e.Email
|
||||
break
|
||||
}
|
||||
}
|
||||
if activated {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for _, e := range key.Emails {
|
||||
if e.IsActivated {
|
||||
@@ -526,7 +552,22 @@ func hashAndVerifyForKeyID(sig *packet.Signature, payload string, committer *Use
|
||||
break
|
||||
}
|
||||
}
|
||||
if !activated {
|
||||
for _, pkey := range primaryKeys {
|
||||
for _, e := range pkey.Emails {
|
||||
if e.IsActivated {
|
||||
activated = true
|
||||
email = e.Email
|
||||
break
|
||||
}
|
||||
}
|
||||
if activated {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !activated {
|
||||
continue
|
||||
}
|
||||
@@ -614,7 +655,6 @@ func ParseCommitWithSignature(c *git.Commit) *CommitVerification {
|
||||
if keyID == "" && sig.IssuerFingerprint != nil && len(sig.IssuerFingerprint) > 0 {
|
||||
keyID = fmt.Sprintf("%X", sig.IssuerFingerprint[12:20])
|
||||
}
|
||||
|
||||
defaultReason := NoKeyFound
|
||||
|
||||
// First check if the sig has a keyID and if so just look at that
|
||||
|
@@ -11,6 +11,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"unicode/utf8"
|
||||
|
||||
// Needed for jpeg support
|
||||
_ "image/jpeg"
|
||||
@@ -1384,11 +1385,11 @@ func GetRepositoriesByForkID(forkID int64) ([]*Repository, error) {
|
||||
func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err error) {
|
||||
repo.LowerName = strings.ToLower(repo.Name)
|
||||
|
||||
if len(repo.Description) > 255 {
|
||||
repo.Description = repo.Description[:255]
|
||||
if utf8.RuneCountInString(repo.Description) > 255 {
|
||||
repo.Description = string([]rune(repo.Description)[:255])
|
||||
}
|
||||
if len(repo.Website) > 255 {
|
||||
repo.Website = repo.Website[:255]
|
||||
if utf8.RuneCountInString(repo.Website) > 255 {
|
||||
repo.Website = string([]rune(repo.Website)[:255])
|
||||
}
|
||||
|
||||
if _, err = e.ID(repo.ID).AllCols().Update(repo); err != nil {
|
||||
@@ -1566,6 +1567,10 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
||||
releaseAttachments = append(releaseAttachments, attachments[i].LocalPath())
|
||||
}
|
||||
|
||||
if _, err = sess.Exec("UPDATE `user` SET num_stars=num_stars-1 WHERE id IN (SELECT `uid` FROM `star` WHERE repo_id = ?)", repo.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = deleteBeans(sess,
|
||||
&Access{RepoID: repo.ID},
|
||||
&Action{RepoID: repo.ID},
|
||||
@@ -2331,3 +2336,38 @@ func updateRepositoryCols(e Engine, repo *Repository, cols ...string) error {
|
||||
func UpdateRepositoryCols(repo *Repository, cols ...string) error {
|
||||
return updateRepositoryCols(x, repo, cols...)
|
||||
}
|
||||
|
||||
// DoctorUserStarNum recalculate Stars number for all user
|
||||
func DoctorUserStarNum() (err error) {
|
||||
const batchSize = 100
|
||||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
|
||||
for start := 0; ; start += batchSize {
|
||||
users := make([]User, 0, batchSize)
|
||||
if err = sess.Limit(batchSize, start).Where("type = ?", 0).Cols("id").Find(&users); err != nil {
|
||||
return
|
||||
}
|
||||
if len(users) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
if err = sess.Begin(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for _, user := range users {
|
||||
if _, err = sess.Exec("UPDATE `user` SET num_stars=(SELECT COUNT(*) FROM `star` WHERE uid=?) WHERE id=?", user.ID, user.ID); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if err = sess.Commit(); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
log.Debug("recalculate Stars number for all user finished")
|
||||
|
||||
return
|
||||
}
|
||||
|
@@ -187,3 +187,9 @@ func TestDeleteAvatar(t *testing.T) {
|
||||
|
||||
assert.Equal(t, "", repo.Avatar)
|
||||
}
|
||||
|
||||
func TestDoctorUserStarNum(t *testing.T) {
|
||||
assert.NoError(t, PrepareTestDatabase())
|
||||
|
||||
assert.NoError(t, DoctorUserStarNum())
|
||||
}
|
||||
|
@@ -51,7 +51,8 @@ func (f *AdminEditUserForm) Validate(ctx *macaron.Context, errs binding.Errors)
|
||||
|
||||
// AdminDashboardForm form for admin dashboard operations
|
||||
type AdminDashboardForm struct {
|
||||
Op string `binding:"required"`
|
||||
Op string `binding:"required"`
|
||||
From string
|
||||
}
|
||||
|
||||
// Validate validates form fields
|
||||
|
10
modules/cache/last_commit.go
vendored
10
modules/cache/last_commit.go
vendored
@@ -5,6 +5,7 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
@@ -34,9 +35,14 @@ func NewLastCommitCache(repoPath string, gitRepo *git.Repository, ttl int64) *La
|
||||
}
|
||||
}
|
||||
|
||||
func (c LastCommitCache) getCacheKey(repoPath, ref, entryPath string) string {
|
||||
hashBytes := sha256.Sum256([]byte(fmt.Sprintf("%s:%s:%s", repoPath, ref, entryPath)))
|
||||
return fmt.Sprintf("last_commit:%x", hashBytes)
|
||||
}
|
||||
|
||||
// Get get the last commit information by commit id and entry path
|
||||
func (c LastCommitCache) Get(ref, entryPath string) (*object.Commit, error) {
|
||||
v := c.Cache.Get(fmt.Sprintf("last_commit:%s:%s:%s", c.repoPath, ref, entryPath))
|
||||
v := c.Cache.Get(c.getCacheKey(c.repoPath, ref, entryPath))
|
||||
if vs, ok := v.(string); ok {
|
||||
log.Trace("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs)
|
||||
if commit, ok := c.commitCache[vs]; ok {
|
||||
@@ -60,5 +66,5 @@ func (c LastCommitCache) Get(ref, entryPath string) (*object.Commit, error) {
|
||||
// Put put the last commit id with commit and entry path
|
||||
func (c LastCommitCache) Put(ref, entryPath, commitID string) error {
|
||||
log.Trace("LastCommitCache save: [%s:%s:%s]", ref, entryPath, commitID)
|
||||
return c.Cache.Put(fmt.Sprintf("last_commit:%s:%s:%s", c.repoPath, ref, entryPath), commitID, c.ttl)
|
||||
return c.Cache.Put(c.getCacheKey(c.repoPath, ref, entryPath), commitID, c.ttl)
|
||||
}
|
||||
|
@@ -17,6 +17,9 @@ import (
|
||||
|
||||
// Init starts this eventsource
|
||||
func (m *Manager) Init() {
|
||||
if setting.UI.Notification.EventSourceUpdateTime <= 0 {
|
||||
return
|
||||
}
|
||||
go graceful.GetManager().RunWithShutdownContext(m.Run)
|
||||
}
|
||||
|
||||
|
@@ -79,7 +79,9 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
|
||||
// Close BlameReader - don't run NextPart after invoking that
|
||||
func (r *BlameReader) Close() error {
|
||||
defer process.GetManager().Remove(r.pid)
|
||||
defer r.cancel()
|
||||
r.cancel()
|
||||
|
||||
_ = r.output.Close()
|
||||
|
||||
if err := r.cmd.Wait(); err != nil {
|
||||
return fmt.Errorf("Wait: %v", err)
|
||||
@@ -89,19 +91,19 @@ func (r *BlameReader) Close() error {
|
||||
}
|
||||
|
||||
// CreateBlameReader creates reader for given repository, commit and file
|
||||
func CreateBlameReader(repoPath, commitID, file string) (*BlameReader, error) {
|
||||
func CreateBlameReader(ctx context.Context, repoPath, commitID, file string) (*BlameReader, error) {
|
||||
gitRepo, err := OpenRepository(repoPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
gitRepo.Close()
|
||||
|
||||
return createBlameReader(repoPath, GitExecutable, "blame", commitID, "--porcelain", "--", file)
|
||||
return createBlameReader(ctx, repoPath, GitExecutable, "blame", commitID, "--porcelain", "--", file)
|
||||
}
|
||||
|
||||
func createBlameReader(dir string, command ...string) (*BlameReader, error) {
|
||||
// FIXME: graceful: This should have a timeout
|
||||
ctx, cancel := context.WithCancel(DefaultContext)
|
||||
func createBlameReader(ctx context.Context, dir string, command ...string) (*BlameReader, error) {
|
||||
// Here we use the provided context - this should be tied to the request performing the blame so that it does not hang around.
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
cmd := exec.CommandContext(ctx, command[0], command[1:]...)
|
||||
cmd.Dir = dir
|
||||
cmd.Stderr = os.Stderr
|
||||
|
@@ -5,6 +5,7 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
@@ -93,8 +94,10 @@ func TestReadingBlameOutput(t *testing.T) {
|
||||
if _, err = tempFile.WriteString(exampleBlame); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
blameReader, err := createBlameReader("", "cat", tempFile.Name())
|
||||
blameReader, err := createBlameReader(ctx, "", "cat", tempFile.Name())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
@@ -90,7 +90,7 @@ func NewGitlabDownloader(baseURL, repoPath, username, password string) *GitlabDo
|
||||
var err error
|
||||
if username != "" {
|
||||
if password == "" {
|
||||
gitlabClient, err = gitlab.NewClient(username)
|
||||
gitlabClient, err = gitlab.NewClient(username, gitlab.WithBaseURL(baseURL))
|
||||
} else {
|
||||
gitlabClient, err = gitlab.NewBasicAuthClient(username, password, gitlab.WithBaseURL(baseURL))
|
||||
}
|
||||
|
@@ -75,6 +75,7 @@ func CreateQueue(name string, handle HandlerFunc, exemplar interface{}) Queue {
|
||||
MaxAttempts: q.MaxAttempts,
|
||||
Config: cfg,
|
||||
QueueLength: q.Length,
|
||||
Name: name,
|
||||
}, exemplar)
|
||||
}
|
||||
if err != nil {
|
||||
|
@@ -214,6 +214,13 @@ func initRepository(ctx models.DBContext, repoPath string, u *models.User, repo
|
||||
repo.DefaultBranch = "master"
|
||||
if len(opts.DefaultBranch) > 0 {
|
||||
repo.DefaultBranch = opts.DefaultBranch
|
||||
gitRepo, err := git.OpenRepository(repo.RepoPath())
|
||||
if err != nil {
|
||||
return fmt.Errorf("openRepository: %v", err)
|
||||
}
|
||||
if err = gitRepo.SetDefaultBranch(repo.DefaultBranch); err != nil {
|
||||
return fmt.Errorf("setDefaultBranch: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err = models.UpdateRepositoryCtx(ctx, repo, false); err != nil {
|
||||
|
@@ -164,9 +164,16 @@ func NewFuncMap() []template.FuncMap {
|
||||
mimeType := mime.TypeByExtension(filepath.Ext(filename))
|
||||
return strings.HasPrefix(mimeType, "image/")
|
||||
},
|
||||
"TabSizeClass": func(ec *editorconfig.Editorconfig, filename string) string {
|
||||
"TabSizeClass": func(ec interface{}, filename string) string {
|
||||
var (
|
||||
value *editorconfig.Editorconfig
|
||||
ok bool
|
||||
)
|
||||
if ec != nil {
|
||||
def, err := ec.GetDefinitionForFilename(filename)
|
||||
if value, ok = ec.(*editorconfig.Editorconfig); !ok || value == nil {
|
||||
return "tab-size-8"
|
||||
}
|
||||
def, err := value.GetDefinitionForFilename(filename)
|
||||
if err != nil {
|
||||
log.Error("tab size class: getting definition for filename: %v", err)
|
||||
return "tab-size-8"
|
||||
@@ -282,8 +289,8 @@ func NewFuncMap() []template.FuncMap {
|
||||
return ""
|
||||
}
|
||||
},
|
||||
"NotificationSettings": func() map[string]int {
|
||||
return map[string]int{
|
||||
"NotificationSettings": func() map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
"MinTimeout": int(setting.UI.Notification.MinTimeout / time.Millisecond),
|
||||
"TimeoutStep": int(setting.UI.Notification.TimeoutStep / time.Millisecond),
|
||||
"MaxTimeout": int(setting.UI.Notification.MaxTimeout / time.Millisecond),
|
||||
|
@@ -264,7 +264,11 @@ func GetDingtalkPayload(p api.Payloader, event models.HookEventType, meta string
|
||||
case models.HookEventIssues, models.HookEventIssueAssign, models.HookEventIssueLabel, models.HookEventIssueMilestone:
|
||||
return getDingtalkIssuesPayload(p.(*api.IssuePayload))
|
||||
case models.HookEventIssueComment, models.HookEventPullRequestComment:
|
||||
return getDingtalkIssueCommentPayload(p.(*api.IssueCommentPayload))
|
||||
pl, ok := p.(*api.IssueCommentPayload)
|
||||
if ok {
|
||||
return getDingtalkIssueCommentPayload(pl)
|
||||
}
|
||||
return getDingtalkPullRequestPayload(p.(*api.PullRequestPayload))
|
||||
case models.HookEventPush:
|
||||
return getDingtalkPushPayload(p.(*api.PushPayload))
|
||||
case models.HookEventPullRequest, models.HookEventPullRequestAssign, models.HookEventPullRequestLabel,
|
||||
|
@@ -408,7 +408,11 @@ func GetDiscordPayload(p api.Payloader, event models.HookEventType, meta string)
|
||||
case models.HookEventIssues, models.HookEventIssueAssign, models.HookEventIssueLabel, models.HookEventIssueMilestone:
|
||||
return getDiscordIssuesPayload(p.(*api.IssuePayload), discord)
|
||||
case models.HookEventIssueComment, models.HookEventPullRequestComment:
|
||||
return getDiscordIssueCommentPayload(p.(*api.IssueCommentPayload), discord)
|
||||
pl, ok := p.(*api.IssueCommentPayload)
|
||||
if ok {
|
||||
return getDiscordIssueCommentPayload(pl, discord)
|
||||
}
|
||||
return getDiscordPullRequestPayload(p.(*api.PullRequestPayload), discord)
|
||||
case models.HookEventPush:
|
||||
return getDiscordPushPayload(p.(*api.PushPayload), discord)
|
||||
case models.HookEventPullRequest, models.HookEventPullRequestAssign, models.HookEventPullRequestLabel,
|
||||
|
@@ -183,13 +183,17 @@ func GetFeishuPayload(p api.Payloader, event models.HookEventType, meta string)
|
||||
return getFeishuForkPayload(p.(*api.ForkPayload))
|
||||
case models.HookEventIssues:
|
||||
return getFeishuIssuesPayload(p.(*api.IssuePayload))
|
||||
case models.HookEventIssueComment:
|
||||
return getFeishuIssueCommentPayload(p.(*api.IssueCommentPayload))
|
||||
case models.HookEventIssueComment, models.HookEventPullRequestComment:
|
||||
pl, ok := p.(*api.IssueCommentPayload)
|
||||
if ok {
|
||||
return getFeishuIssueCommentPayload(pl)
|
||||
}
|
||||
return getFeishuPullRequestPayload(p.(*api.PullRequestPayload))
|
||||
case models.HookEventPush:
|
||||
return getFeishuPushPayload(p.(*api.PushPayload))
|
||||
case models.HookEventPullRequest:
|
||||
return getFeishuPullRequestPayload(p.(*api.PullRequestPayload))
|
||||
case models.HookEventPullRequestReviewApproved, models.HookEventPullRequestReviewRejected, models.HookEventPullRequestComment:
|
||||
case models.HookEventPullRequestReviewApproved, models.HookEventPullRequestReviewRejected:
|
||||
return getFeishuPullRequestApprovalPayload(p.(*api.PullRequestPayload), event)
|
||||
case models.HookEventRepository:
|
||||
return getFeishuRepositoryPayload(p.(*api.RepositoryPayload))
|
||||
|
@@ -119,6 +119,8 @@ func getPullRequestPayloadInfo(p *api.PullRequestPayload, linkFormatter linkForm
|
||||
linkFormatter(mileStoneLink, p.PullRequest.Milestone.Title), titleLink)
|
||||
case api.HookIssueDemilestoned:
|
||||
text = fmt.Sprintf("[%s] Pull request milestone cleared: %s", repoLink, titleLink)
|
||||
case api.HookIssueReviewed:
|
||||
text = fmt.Sprintf("[%s] Pull request reviewed: %s", repoLink, titleLink)
|
||||
}
|
||||
if withSender {
|
||||
text += fmt.Sprintf(" by %s", linkFormatter(setting.AppURL+p.Sender.UserName, p.Sender.UserName))
|
||||
|
@@ -230,7 +230,11 @@ func GetMatrixPayload(p api.Payloader, event models.HookEventType, meta string)
|
||||
case models.HookEventIssues, models.HookEventIssueAssign, models.HookEventIssueLabel, models.HookEventIssueMilestone:
|
||||
return getMatrixIssuesPayload(p.(*api.IssuePayload), matrix)
|
||||
case models.HookEventIssueComment, models.HookEventPullRequestComment:
|
||||
return getMatrixIssueCommentPayload(p.(*api.IssueCommentPayload), matrix)
|
||||
pl, ok := p.(*api.IssueCommentPayload)
|
||||
if ok {
|
||||
return getMatrixIssueCommentPayload(pl, matrix)
|
||||
}
|
||||
return getMatrixPullRequestPayload(p.(*api.PullRequestPayload), matrix)
|
||||
case models.HookEventPush:
|
||||
return getMatrixPushPayload(p.(*api.PushPayload), matrix)
|
||||
case models.HookEventPullRequest, models.HookEventPullRequestAssign, models.HookEventPullRequestLabel,
|
||||
|
@@ -558,7 +558,11 @@ func GetMSTeamsPayload(p api.Payloader, event models.HookEventType, meta string)
|
||||
case models.HookEventIssues, models.HookEventIssueAssign, models.HookEventIssueLabel, models.HookEventIssueMilestone:
|
||||
return getMSTeamsIssuesPayload(p.(*api.IssuePayload))
|
||||
case models.HookEventIssueComment, models.HookEventPullRequestComment:
|
||||
return getMSTeamsIssueCommentPayload(p.(*api.IssueCommentPayload))
|
||||
pl, ok := p.(*api.IssueCommentPayload)
|
||||
if ok {
|
||||
return getMSTeamsIssueCommentPayload(pl)
|
||||
}
|
||||
return getMSTeamsPullRequestPayload(p.(*api.PullRequestPayload))
|
||||
case models.HookEventPush:
|
||||
return getMSTeamsPushPayload(p.(*api.PushPayload))
|
||||
case models.HookEventPullRequest, models.HookEventPullRequestAssign, models.HookEventPullRequestLabel,
|
||||
|
@@ -321,7 +321,11 @@ func GetSlackPayload(p api.Payloader, event models.HookEventType, meta string) (
|
||||
case models.HookEventIssues, models.HookEventIssueAssign, models.HookEventIssueLabel, models.HookEventIssueMilestone:
|
||||
return getSlackIssuesPayload(p.(*api.IssuePayload), slack)
|
||||
case models.HookEventIssueComment, models.HookEventPullRequestComment:
|
||||
return getSlackIssueCommentPayload(p.(*api.IssueCommentPayload), slack)
|
||||
pl, ok := p.(*api.IssueCommentPayload)
|
||||
if ok {
|
||||
return getSlackIssueCommentPayload(pl, slack)
|
||||
}
|
||||
return getSlackPullRequestPayload(p.(*api.PullRequestPayload), slack)
|
||||
case models.HookEventPush:
|
||||
return getSlackPushPayload(p.(*api.PushPayload), slack)
|
||||
case models.HookEventPullRequest, models.HookEventPullRequestAssign, models.HookEventPullRequestLabel,
|
||||
|
@@ -206,7 +206,11 @@ func GetTelegramPayload(p api.Payloader, event models.HookEventType, meta string
|
||||
case models.HookEventIssues, models.HookEventIssueAssign, models.HookEventIssueLabel, models.HookEventIssueMilestone:
|
||||
return getTelegramIssuesPayload(p.(*api.IssuePayload))
|
||||
case models.HookEventIssueComment, models.HookEventPullRequestComment:
|
||||
return getTelegramIssueCommentPayload(p.(*api.IssueCommentPayload))
|
||||
pl, ok := p.(*api.IssueCommentPayload)
|
||||
if ok {
|
||||
return getTelegramIssueCommentPayload(pl)
|
||||
}
|
||||
return getTelegramPullRequestPayload(p.(*api.PullRequestPayload))
|
||||
case models.HookEventPush:
|
||||
return getTelegramPushPayload(p.(*api.PushPayload))
|
||||
case models.HookEventPullRequest, models.HookEventPullRequestAssign, models.HookEventPullRequestLabel,
|
||||
|
@@ -1843,12 +1843,12 @@ dashboard.operation_switch = Switch
|
||||
dashboard.operation_run = Run
|
||||
dashboard.clean_unbind_oauth = Clean unbound OAuth connections
|
||||
dashboard.clean_unbind_oauth_success = All unbound OAuth connections have been deleted.
|
||||
dashboard.task.started=Started Task: %s
|
||||
dashboard.task.process=Task: %s
|
||||
dashboard.task.cancelled=Task: %s cancelled: %[3]s
|
||||
dashboard.task.error=Error in Task: %s: %[3]s
|
||||
dashboard.task.finished=Task: %s started by %s has finished
|
||||
dashboard.task.unknown=Unknown task: %s
|
||||
dashboard.task.started=Started Task: %[1]s
|
||||
dashboard.task.process=Task: %[1]s
|
||||
dashboard.task.cancelled=Task: %[1]s cancelled: %[3]s
|
||||
dashboard.task.error=Error in Task: %[1]s: %[3]s
|
||||
dashboard.task.finished=Task: %[1]s started by %[2]s has finished
|
||||
dashboard.task.unknown=Unknown task: %[1]s
|
||||
dashboard.cron.started=Started Cron: %[1]s
|
||||
dashboard.cron.process=Cron: %[1]s
|
||||
dashboard.cron.cancelled=Cron: %s cancelled: %[3]s
|
||||
|
@@ -153,8 +153,11 @@ func DashboardPost(ctx *context.Context, form auth.AdminDashboardForm) {
|
||||
ctx.Flash.Error(ctx.Tr("admin.dashboard.task.unknown", form.Op))
|
||||
}
|
||||
}
|
||||
|
||||
ctx.Redirect(setting.AppSubURL + "/admin")
|
||||
if form.From == "monitor" {
|
||||
ctx.Redirect(setting.AppSubURL + "/admin/monitor")
|
||||
} else {
|
||||
ctx.Redirect(setting.AppSubURL + "/admin")
|
||||
}
|
||||
}
|
||||
|
||||
// SendTestMail send test mail to confirm mail service is OK
|
||||
@@ -331,7 +334,7 @@ func MonitorCancel(ctx *context.Context) {
|
||||
pid := ctx.ParamsInt64("pid")
|
||||
process.GetManager().Cancel(pid)
|
||||
ctx.JSON(200, map[string]interface{}{
|
||||
"redirect": ctx.Repo.RepoLink + "/admin/monitor",
|
||||
"redirect": setting.AppSubURL + "/admin/monitor",
|
||||
})
|
||||
}
|
||||
|
||||
|
@@ -296,7 +296,7 @@ func toCommit(ctx *context.APIContext, repo *models.Repository, commit *git.Comm
|
||||
},
|
||||
Date: commit.Committer.When.Format(time.RFC3339),
|
||||
},
|
||||
Message: commit.Summary(),
|
||||
Message: commit.Message(),
|
||||
Tree: &api.CommitMeta{
|
||||
URL: repo.APIURL() + "/git/trees/" + commit.ID.String(),
|
||||
SHA: commit.ID.String(),
|
||||
|
@@ -36,7 +36,6 @@ import (
|
||||
|
||||
"gitea.com/macaron/i18n"
|
||||
"gitea.com/macaron/macaron"
|
||||
unknwoni18n "github.com/unknwon/i18n"
|
||||
)
|
||||
|
||||
func checkRunMode() {
|
||||
@@ -124,8 +123,6 @@ func GlobalInit(ctx context.Context) {
|
||||
// Setup i18n
|
||||
InitLocales()
|
||||
|
||||
log.Info("%s", unknwoni18n.Tr("en-US", "admin.dashboard.delete_repo_archives"))
|
||||
|
||||
NewServices()
|
||||
|
||||
if setting.InstallLock {
|
||||
|
@@ -141,7 +141,13 @@ func RefBlame(ctx *context.Context) {
|
||||
ctx.Data["FileSize"] = blob.Size()
|
||||
ctx.Data["FileName"] = blob.Name()
|
||||
|
||||
blameReader, err := git.CreateBlameReader(models.RepoPath(userName, repoName), commitID, fileName)
|
||||
ctx.Data["NumLines"], err = blob.GetBlobLineCount()
|
||||
if err != nil {
|
||||
ctx.NotFound("GetBlobLineCount", err)
|
||||
return
|
||||
}
|
||||
|
||||
blameReader, err := git.CreateBlameReader(ctx.Req.Context(), models.RepoPath(userName, repoName), commitID, fileName)
|
||||
if err != nil {
|
||||
ctx.NotFound("CreateBlameReader", err)
|
||||
return
|
||||
|
@@ -8,6 +8,7 @@
|
||||
</h4>
|
||||
<div class="ui attached table segment">
|
||||
<form method="post" action="{{AppSubUrl}}/admin">
|
||||
<input type="hidden" name="from" value="monitor"/>
|
||||
{{.CsrfTokenHtml}}
|
||||
<table class="ui very basic striped table">
|
||||
<thead>
|
||||
|
@@ -49,7 +49,7 @@
|
||||
<div class="markdown">
|
||||
<pre><code>touch README.md
|
||||
git init
|
||||
{{if ne .Repository.DefaultBranch "master"}}git branch -m master {{.Repository.DefaultBranch}}{{end}}
|
||||
{{if ne .Repository.DefaultBranch "master"}}git checkout -b {{.Repository.DefaultBranch}}{{end}}
|
||||
git add README.md
|
||||
git commit -m "first commit"
|
||||
git remote add origin <span class="clone-url">{{if $.DisableSSH}}{{$.CloneLink.HTTPS}}{{else}}{{$.CloneLink.SSH}}{{end}}</span>
|
||||
|
135
web_src/js/features/eventsource.sharedworker.js
Normal file
135
web_src/js/features/eventsource.sharedworker.js
Normal file
@@ -0,0 +1,135 @@
|
||||
self.name = 'eventsource.sharedworker.js';
|
||||
|
||||
const sourcesByUrl = {};
|
||||
const sourcesByPort = {};
|
||||
|
||||
class Source {
|
||||
constructor(url) {
|
||||
this.url = url;
|
||||
this.eventSource = new EventSource(url);
|
||||
this.listening = {};
|
||||
this.clients = [];
|
||||
this.listen('open');
|
||||
this.listen('logout');
|
||||
this.listen('notification-count');
|
||||
this.listen('error');
|
||||
}
|
||||
|
||||
register(port) {
|
||||
if (this.clients.includes(port)) return;
|
||||
|
||||
this.clients.push(port);
|
||||
|
||||
port.postMessage({
|
||||
type: 'status',
|
||||
message: `registered to ${this.url}`,
|
||||
});
|
||||
}
|
||||
|
||||
deregister(port) {
|
||||
const portIdx = this.clients.indexOf(port);
|
||||
if (portIdx < 0) {
|
||||
return this.clients.length;
|
||||
}
|
||||
this.clients.splice(portIdx, 1);
|
||||
return this.clients.length;
|
||||
}
|
||||
|
||||
close() {
|
||||
if (!this.eventSource) return;
|
||||
|
||||
this.eventSource.close();
|
||||
this.eventSource = null;
|
||||
}
|
||||
|
||||
listen(eventType) {
|
||||
if (this.listening[eventType]) return;
|
||||
this.listening[eventType] = true;
|
||||
const self = this;
|
||||
this.eventSource.addEventListener(eventType, (event) => {
|
||||
self.notifyClients({
|
||||
type: eventType,
|
||||
data: event.data
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
notifyClients(event) {
|
||||
for (const client of this.clients) {
|
||||
client.postMessage(event);
|
||||
}
|
||||
}
|
||||
|
||||
status(port) {
|
||||
port.postMessage({
|
||||
type: 'status',
|
||||
message: `url: ${this.url} readyState: ${this.eventSource.readyState}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
self.onconnect = (e) => {
|
||||
for (const port of e.ports) {
|
||||
port.addEventListener('message', (event) => {
|
||||
if (event.data.type === 'start') {
|
||||
const url = event.data.url;
|
||||
if (sourcesByUrl[url]) {
|
||||
// we have a Source registered to this url
|
||||
const source = sourcesByUrl[url];
|
||||
source.register(port);
|
||||
sourcesByPort[port] = source;
|
||||
return;
|
||||
}
|
||||
let source = sourcesByPort[port];
|
||||
if (source) {
|
||||
if (source.eventSource && source.url === url) return;
|
||||
|
||||
// How this has happened I don't understand...
|
||||
// deregister from that source
|
||||
const count = source.deregister(port);
|
||||
// Clean-up
|
||||
if (count === 0) {
|
||||
source.close();
|
||||
sourcesByUrl[source.url] = null;
|
||||
}
|
||||
}
|
||||
// Create a new Source
|
||||
source = new Source(url);
|
||||
source.register(port);
|
||||
sourcesByUrl[url] = source;
|
||||
sourcesByPort[port] = source;
|
||||
} else if (event.data.type === 'listen') {
|
||||
const source = sourcesByPort[port];
|
||||
source.listen(event.data.eventType);
|
||||
} else if (event.data.type === 'close') {
|
||||
const source = sourcesByPort[port];
|
||||
|
||||
if (!source) return;
|
||||
|
||||
const count = source.deregister(port);
|
||||
if (count === 0) {
|
||||
source.close();
|
||||
sourcesByUrl[source.url] = null;
|
||||
sourcesByPort[port] = null;
|
||||
}
|
||||
} else if (event.data.type === 'status') {
|
||||
const source = sourcesByPort[port];
|
||||
if (!source) {
|
||||
port.postMessage({
|
||||
type: 'status',
|
||||
message: 'not connected',
|
||||
});
|
||||
return;
|
||||
}
|
||||
source.status(port);
|
||||
} else {
|
||||
// just send it back
|
||||
port.postMessage({
|
||||
type: 'error',
|
||||
message: `received but don't know how to handle: ${event.data}`,
|
||||
});
|
||||
}
|
||||
});
|
||||
port.start();
|
||||
}
|
||||
};
|
@@ -18,7 +18,25 @@ export function initNotificationsTable() {
|
||||
});
|
||||
}
|
||||
|
||||
export function initNotificationCount() {
|
||||
async function receiveUpdateCount(event) {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
|
||||
const notificationCount = document.querySelector('.notification_count');
|
||||
if (data.Count > 0) {
|
||||
notificationCount.classList.remove('hidden');
|
||||
} else {
|
||||
notificationCount.classList.add('hidden');
|
||||
}
|
||||
|
||||
notificationCount.textContent = `${data.Count}`;
|
||||
await updateNotificationTable();
|
||||
} catch (error) {
|
||||
console.error(error, event);
|
||||
}
|
||||
}
|
||||
|
||||
export async function initNotificationCount() {
|
||||
const notificationCount = $('.notification_count');
|
||||
|
||||
if (!notificationCount.length) {
|
||||
@@ -26,36 +44,52 @@ export function initNotificationCount() {
|
||||
}
|
||||
|
||||
if (NotificationSettings.EventSourceUpdateTime > 0 && !!window.EventSource) {
|
||||
// Try to connect to the event source first
|
||||
const source = new EventSource(`${AppSubUrl}/user/events`);
|
||||
source.addEventListener('notification-count', async (e) => {
|
||||
try {
|
||||
const data = JSON.parse(e.data);
|
||||
|
||||
const notificationCount = $('.notification_count');
|
||||
if (data.Count === 0) {
|
||||
notificationCount.addClass('hidden');
|
||||
} else {
|
||||
notificationCount.removeClass('hidden');
|
||||
// Try to connect to the event source via the shared worker first
|
||||
if (window.SharedWorker) {
|
||||
const worker = new SharedWorker(`${__webpack_public_path__}js/eventsource.sharedworker.js`, 'notification-worker');
|
||||
worker.addEventListener('error', (event) => {
|
||||
console.error(event);
|
||||
});
|
||||
worker.port.onmessageerror = () => {
|
||||
console.error('Unable to deserialize message');
|
||||
};
|
||||
worker.port.postMessage({
|
||||
type: 'start',
|
||||
url: `${window.location.origin}${AppSubUrl}/user/events`,
|
||||
});
|
||||
worker.port.addEventListener('message', (event) => {
|
||||
if (!event.data || !event.data.type) {
|
||||
console.error(event);
|
||||
return;
|
||||
}
|
||||
if (event.data.type === 'notification-count') {
|
||||
receiveUpdateCount(event.data);
|
||||
} else if (event.data.type === 'error') {
|
||||
console.error(event.data);
|
||||
} else if (event.data.type === 'logout') {
|
||||
if (event.data !== 'here') {
|
||||
return;
|
||||
}
|
||||
worker.port.postMessage({
|
||||
type: 'close',
|
||||
});
|
||||
worker.port.close();
|
||||
window.location.href = AppSubUrl;
|
||||
}
|
||||
});
|
||||
worker.port.addEventListener('error', (e) => {
|
||||
console.error(e);
|
||||
});
|
||||
worker.port.start();
|
||||
window.addEventListener('beforeunload', () => {
|
||||
worker.port.postMessage({
|
||||
type: 'close',
|
||||
});
|
||||
worker.port.close();
|
||||
});
|
||||
|
||||
notificationCount.text(`${data.Count}`);
|
||||
await updateNotificationTable();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
});
|
||||
source.addEventListener('logout', async (e) => {
|
||||
if (e.data !== 'here') {
|
||||
return;
|
||||
}
|
||||
source.close();
|
||||
window.location.href = AppSubUrl;
|
||||
});
|
||||
window.addEventListener('beforeunload', () => {
|
||||
source.close();
|
||||
});
|
||||
return;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (NotificationSettings.MinTimeout <= 0) {
|
||||
|
@@ -915,6 +915,7 @@ async function initRepository() {
|
||||
dictInvalidFileType: $dropzone.data('invalid-input-type'),
|
||||
dictFileTooBig: $dropzone.data('file-too-big'),
|
||||
dictRemoveFile: $dropzone.data('remove-file'),
|
||||
timeout: 0,
|
||||
init() {
|
||||
this.on('success', (file, data) => {
|
||||
filenameDict[file.name] = {
|
||||
@@ -2308,6 +2309,7 @@ $(document).ready(async () => {
|
||||
dictInvalidFileType: $dropzone.data('invalid-input-type'),
|
||||
dictFileTooBig: $dropzone.data('file-too-big'),
|
||||
dictRemoveFile: $dropzone.data('remove-file'),
|
||||
timeout: 0,
|
||||
init() {
|
||||
this.on('success', (file, data) => {
|
||||
filenameDict[file.name] = data.uuid;
|
||||
@@ -2453,7 +2455,6 @@ $(document).ready(async () => {
|
||||
initTemplateSearch();
|
||||
initContextPopups();
|
||||
initNotificationsTable();
|
||||
initNotificationCount();
|
||||
initTribute();
|
||||
|
||||
// Repo clone url.
|
||||
@@ -2500,6 +2501,7 @@ $(document).ready(async () => {
|
||||
initClipboard(),
|
||||
initUserHeatmap(),
|
||||
initServiceWorker(),
|
||||
initNotificationCount(),
|
||||
]);
|
||||
});
|
||||
|
||||
|
7
web_src/js/vendor/gitgraph.js
vendored
7
web_src/js/vendor/gitgraph.js
vendored
@@ -372,6 +372,10 @@ export default function gitGraph(canvas, rawGraphList, config) {
|
||||
inlineIntersect = false;
|
||||
}
|
||||
|
||||
if (colomn === '|' && currentRow[colomnIndex - 1] && currentRow[colomnIndex - 1] === '\\') {
|
||||
flows.splice(colomnIndex, 0, genNewFlow());
|
||||
}
|
||||
|
||||
color = flows[colomnIndex].color;
|
||||
|
||||
switch (colomn) {
|
||||
@@ -387,6 +391,9 @@ export default function gitGraph(canvas, rawGraphList, config) {
|
||||
break;
|
||||
|
||||
case '|':
|
||||
if (prevColomn && prevColomn === '\\') {
|
||||
x += config.unitSize;
|
||||
}
|
||||
drawLineUp(x, y, color);
|
||||
break;
|
||||
|
||||
|
@@ -200,11 +200,6 @@ a:hover {
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
.repository.view.issue .comment-list .comment .content > .bottom.segment a {
|
||||
border: solid 1px #353945;
|
||||
background-color: #353945;
|
||||
}
|
||||
|
||||
.ui.attached.header {
|
||||
background: #404552;
|
||||
border: 1px solid #404552;
|
||||
@@ -641,21 +636,6 @@ a.ui.basic.green.label:hover {
|
||||
border: 1px solid #404552;
|
||||
}
|
||||
|
||||
.repository.view.issue .comment-list .comment .content > .bottom.segment {
|
||||
background: #353945;
|
||||
}
|
||||
|
||||
.repository.view.issue .comment-list .comment .content .header {
|
||||
color: #dbdbdb;
|
||||
background-color: #404552;
|
||||
border-bottom: 1px solid #353944;
|
||||
}
|
||||
|
||||
.repository.view.issue .comment-list .comment .content .merge-section {
|
||||
background-color: #404552;
|
||||
border-top: 1px solid #353944;
|
||||
}
|
||||
|
||||
.repository.view.issue .comment-list .event > .svg.issue-symbol {
|
||||
background: #3b4954;
|
||||
}
|
||||
@@ -677,18 +657,41 @@ a.ui.basic.green.label:hover {
|
||||
color: #fff !important;
|
||||
}
|
||||
|
||||
.repository.view.issue .comment-list .comment .content .header:after {
|
||||
border-right-color: #404552;
|
||||
.repository.view.issue .comment-list .comment .content {
|
||||
> .bottom.segment {
|
||||
background: #353945;
|
||||
a {
|
||||
border: solid 1px #353945;
|
||||
background-color: #353945;
|
||||
}
|
||||
}
|
||||
|
||||
.header {
|
||||
color: #dbdbdb;
|
||||
background-color: #404552;
|
||||
border-bottom: 1px solid #353944;
|
||||
}
|
||||
|
||||
.merge-section {
|
||||
background-color: #404552;
|
||||
border-top: 1px solid #353944;
|
||||
}
|
||||
|
||||
.header:after,
|
||||
> .merge-section.no-header:after {
|
||||
border-right-color: #404552;
|
||||
}
|
||||
|
||||
.header:before,
|
||||
> .merge-section.no-header:before {
|
||||
border-right-color: #404552;
|
||||
}
|
||||
}
|
||||
|
||||
.repository.new.issue .comment.form .content:after {
|
||||
border-right-color: #353945;
|
||||
}
|
||||
|
||||
.repository.view.issue .comment-list .comment .content .header:before {
|
||||
border-right-color: #404552;
|
||||
}
|
||||
|
||||
.repository.new.issue .comment.form .content:before {
|
||||
border-right-color: #353945;
|
||||
}
|
||||
|
@@ -38,6 +38,9 @@ module.exports = {
|
||||
serviceworker: [
|
||||
resolve(__dirname, 'web_src/js/serviceworker.js'),
|
||||
],
|
||||
'eventsource.sharedworker': [
|
||||
resolve(__dirname, 'web_src/js/features/eventsource.sharedworker.js'),
|
||||
],
|
||||
icons: glob('node_modules/@primer/octicons/build/svg/**/*.svg'),
|
||||
...themes,
|
||||
},
|
||||
|
Reference in New Issue
Block a user