aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRunxi Yu <me@runxiyu.org>2025-03-19 11:39:54 +0800
committerRunxi Yu <me@runxiyu.org>2025-03-19 12:03:39 +0800
commit90ce7b1faf976d76329a8c02008cd84c78a753f5 (patch)
tree9374618661e2521fec73ab9fc6f1f0f8f68f0c2b
parentRemove underscores from Go code, pt 3 (diff)
downloadforge-90ce7b1faf976d76329a8c02008cd84c78a753f5.tar.gz
forge-90ce7b1faf976d76329a8c02008cd84c78a753f5.tar.zst
forge-90ce7b1faf976d76329a8c02008cd84c78a753f5.zip
Remove underscores from Go code, pt 4
-rw-r--r--acl.go4
-rw-r--r--git_hooks_handle.go273
-rw-r--r--git_misc.go6
-rw-r--r--git_ref.go18
-rw-r--r--http_auth.go8
-rw-r--r--http_handle_gc.go2
-rw-r--r--http_handle_group_index.go60
-rw-r--r--http_handle_index.go4
-rw-r--r--http_handle_login.go36
-rw-r--r--http_handle_repo_commit.go84
-rw-r--r--http_handle_repo_contrib_index.go4
-rw-r--r--http_handle_repo_contrib_one.go58
-rw-r--r--http_handle_repo_index.go10
-rw-r--r--http_handle_repo_info.go22
-rw-r--r--http_handle_repo_log.go4
-rw-r--r--http_handle_repo_raw.go4
-rw-r--r--http_handle_repo_tree.go72
-rw-r--r--http_handle_repo_upload_pack.go2
-rw-r--r--http_handle_users.go2
-rw-r--r--http_server.go140
-rw-r--r--http_template.go6
-rw-r--r--http_template_funcs.go8
-rw-r--r--readme_to_html.go44
-rw-r--r--remote_url.go8
-rw-r--r--resources.go20
-rw-r--r--ssh_handle_receive_pack.go94
-rw-r--r--ssh_handle_upload_pack.go10
-rw-r--r--ssh_server.go4
-rw-r--r--ssh_utils.go2
-rw-r--r--url.go79
-rw-r--r--users.go2
31 files changed, 544 insertions, 546 deletions
diff --git a/acl.go b/acl.go
index 8d23dc3..bbb03e9 100644
--- a/acl.go
+++ b/acl.go
@@ -11,7 +11,7 @@ import (
// getRepoInfo returns the filesystem path and direct
// access permission for a given repo and a provided ssh public key.
-func getRepoInfo(ctx context.Context, group_path []string, repoName, sshPubkey string) (repoID int, fsPath string, access bool, contribReq, userType string, userID int, err error) {
+func getRepoInfo(ctx context.Context, groupPath []string, repoName, sshPubkey string) (repoID int, fsPath string, access bool, contribReq, userType string, userID int, err error) {
err = database.QueryRow(ctx, `
WITH RECURSIVE group_path_cte AS (
-- Start: match the first name in the path where parent_group IS NULL
@@ -51,7 +51,7 @@ LEFT JOIN users u ON u.id = s.user_id
LEFT JOIN user_group_roles ugr ON ugr.group_id = g.id AND ugr.user_id = u.id
WHERE g.depth = cardinality($1::text[])
AND r.name = $2
-`, pgtype.FlatArray[string](group_path), repoName, sshPubkey,
+`, pgtype.FlatArray[string](groupPath), repoName, sshPubkey,
).Scan(&repoID, &fsPath, &access, &contribReq, &userType, &userID)
return
}
diff --git a/git_hooks_handle.go b/git_hooks_handle.go
index b931387..a244a84 100644
--- a/git_hooks_handle.go
+++ b/git_hooks_handle.go
@@ -144,169 +144,168 @@ func hooksHandler(conn net.Conn) {
case "pre-receive":
if packPass.directAccess {
return 0
- } else {
- allOK := true
- for {
- var line, oldOID, rest, newIOID, refName string
- var found bool
- var oldHash, newHash plumbing.Hash
- var oldCommit, newCommit *object.Commit
- var pushOptCount int
-
- pushOptCount, err = strconv.Atoi(gitEnv["GIT_PUSH_OPTION_COUNT"])
- if err != nil {
- writeRedError(sshStderr, "Failed to parse GIT_PUSH_OPTION_COUNT: %v", err)
+ }
+ allOK := true
+ for {
+ var line, oldOID, rest, newIOID, refName string
+ var found bool
+ var oldHash, newHash plumbing.Hash
+ var oldCommit, newCommit *object.Commit
+ var pushOptCount int
+
+ pushOptCount, err = strconv.Atoi(gitEnv["GIT_PUSH_OPTION_COUNT"])
+ if err != nil {
+ writeRedError(sshStderr, "Failed to parse GIT_PUSH_OPTION_COUNT: %v", err)
+ return 1
+ }
+
+ // TODO: Allow existing users (even if they are already federated or registered) to add a federated user ID... though perhaps this should be in the normal SSH interface instead of the git push interface?
+ // Also it'd be nice to be able to combine users or whatever
+ if packPass.contribReq == "federated" && packPass.userType != "federated" && packPass.userType != "registered" {
+ if pushOptCount == 0 {
+ writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
return 1
}
-
- // TODO: Allow existing users (even if they are already federated or registered) to add a federated user ID... though perhaps this should be in the normal SSH interface instead of the git push interface?
- // Also it'd be nice to be able to combine users or whatever
- if packPass.contribReq == "federated" && packPass.userType != "federated" && packPass.userType != "registered" {
- if pushOptCount == 0 {
- writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
+ for i := 0; i < pushOptCount; i++ {
+ pushOpt, ok := gitEnv[fmt.Sprintf("GIT_PUSH_OPTION_%d", i)]
+ if !ok {
+ writeRedError(sshStderr, "Failed to get push option %d", i)
return 1
}
- for i := 0; i < pushOptCount; i++ {
- pushOpt, ok := gitEnv[fmt.Sprintf("GIT_PUSH_OPTION_%d", i)]
- if !ok {
- writeRedError(sshStderr, "Failed to get push option %d", i)
+ if strings.HasPrefix(pushOpt, "fedid=") {
+ fedUserID := strings.TrimPrefix(pushOpt, "fedid=")
+ service, username, found := strings.Cut(fedUserID, ":")
+ if !found {
+ writeRedError(sshStderr, "Invalid federated user identifier %#v does not contain a colon", fedUserID)
return 1
}
- if strings.HasPrefix(pushOpt, "fedid=") {
- fedUserID := strings.TrimPrefix(pushOpt, "fedid=")
- service, username, found := strings.Cut(fedUserID, ":")
- if !found {
- writeRedError(sshStderr, "Invalid federated user identifier %#v does not contain a colon", fedUserID)
- return 1
- }
-
- ok, err := fedauth(ctx, packPass.userID, service, username, packPass.pubkey)
- if err != nil {
- writeRedError(sshStderr, "Failed to verify federated user identifier %#v: %v", fedUserID, err)
- return 1
- }
- if !ok {
- writeRedError(sshStderr, "Failed to verify federated user identifier %#v: you don't seem to be on the list", fedUserID)
- return 1
- }
-
- break
+
+ ok, err := fedauth(ctx, packPass.userID, service, username, packPass.pubkey)
+ if err != nil {
+ writeRedError(sshStderr, "Failed to verify federated user identifier %#v: %v", fedUserID, err)
+ return 1
}
- if i == pushOptCount-1 {
- writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
+ if !ok {
+ writeRedError(sshStderr, "Failed to verify federated user identifier %#v: you don't seem to be on the list", fedUserID)
return 1
}
- }
- }
- line, err = stdin.ReadString('\n')
- if errors.Is(err, io.EOF) {
- break
- } else if err != nil {
- writeRedError(sshStderr, "Failed to read pre-receive line: %v", err)
- return 1
- }
- line = line[:len(line)-1]
-
- oldOID, rest, found = strings.Cut(line, " ")
- if !found {
- writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
- return 1
+ break
+ }
+ if i == pushOptCount-1 {
+ writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
+ return 1
+ }
}
+ }
- newIOID, refName, found = strings.Cut(rest, " ")
- if !found {
- writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
- return 1
- }
+ line, err = stdin.ReadString('\n')
+ if errors.Is(err, io.EOF) {
+ break
+ } else if err != nil {
+ writeRedError(sshStderr, "Failed to read pre-receive line: %v", err)
+ return 1
+ }
+ line = line[:len(line)-1]
- if strings.HasPrefix(refName, "refs/heads/contrib/") {
- if allZero(oldOID) { // New branch
- fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
- var newMRID int
+ oldOID, rest, found = strings.Cut(line, " ")
+ if !found {
+ writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
+ return 1
+ }
- err = database.QueryRow(ctx,
- "INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING id",
- packPass.repoID, packPass.userID, strings.TrimPrefix(refName, "refs/heads/"),
- ).Scan(&newMRID)
- if err != nil {
- writeRedError(sshStderr, "Error creating merge request: %v", err)
- return 1
- }
- fmt.Fprintln(sshStderr, ansiec.Blue+"Created merge request at", generate_http_remote_url(packPass.group_path, packPass.repo_name)+"/contrib/"+strconv.FormatUint(uint64(newMRID), 10)+"/"+ansiec.Reset)
- } else { // Existing contrib branch
- var existingMRUser int
- var isAncestor bool
-
- err = database.QueryRow(ctx,
- "SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2",
- strings.TrimPrefix(refName, "refs/heads/"), packPass.repoID,
- ).Scan(&existingMRUser)
- if err != nil {
- if errors.Is(err, pgx.ErrNoRows) {
- writeRedError(sshStderr, "No existing merge request for existing contrib branch: %v", err)
- } else {
- writeRedError(sshStderr, "Error querying for existing merge request: %v", err)
- }
- return 1
- }
- if existingMRUser == 0 {
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs to unowned MR)")
- continue
- }
+ newIOID, refName, found = strings.Cut(rest, " ")
+ if !found {
+ writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
+ return 1
+ }
- if existingMRUser != packPass.userID {
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs another user's MR)")
- continue
+ if strings.HasPrefix(refName, "refs/heads/contrib/") {
+ if allZero(oldOID) { // New branch
+ fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
+ var newMRID int
+
+ err = database.QueryRow(ctx,
+ "INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING id",
+ packPass.repoID, packPass.userID, strings.TrimPrefix(refName, "refs/heads/"),
+ ).Scan(&newMRID)
+ if err != nil {
+ writeRedError(sshStderr, "Error creating merge request: %v", err)
+ return 1
+ }
+ fmt.Fprintln(sshStderr, ansiec.Blue+"Created merge request at", genHTTPRemoteURL(packPass.groupPath, packPass.repoName)+"/contrib/"+strconv.FormatUint(uint64(newMRID), 10)+"/"+ansiec.Reset)
+ } else { // Existing contrib branch
+ var existingMRUser int
+ var isAncestor bool
+
+ err = database.QueryRow(ctx,
+ "SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2",
+ strings.TrimPrefix(refName, "refs/heads/"), packPass.repoID,
+ ).Scan(&existingMRUser)
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ writeRedError(sshStderr, "No existing merge request for existing contrib branch: %v", err)
+ } else {
+ writeRedError(sshStderr, "Error querying for existing merge request: %v", err)
}
+ return 1
+ }
+ if existingMRUser == 0 {
+ allOK = false
+ fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs to unowned MR)")
+ continue
+ }
- oldHash = plumbing.NewHash(oldOID)
+ if existingMRUser != packPass.userID {
+ allOK = false
+ fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs another user's MR)")
+ continue
+ }
- if oldCommit, err = packPass.repo.CommitObject(oldHash); err != nil {
- writeRedError(sshStderr, "Daemon failed to get old commit: %v", err)
- return 1
- }
+ oldHash = plumbing.NewHash(oldOID)
- // Potential BUG: I'm not sure if new_commit is guaranteed to be
- // detectable as they haven't been merged into the main repo's
- // objects yet. But it seems to work, and I don't think there's
- // any reason for this to only work intermitently.
- newHash = plumbing.NewHash(newIOID)
- if newCommit, err = packPass.repo.CommitObject(newHash); err != nil {
- writeRedError(sshStderr, "Daemon failed to get new commit: %v", err)
- return 1
- }
+ if oldCommit, err = packPass.repo.CommitObject(oldHash); err != nil {
+ writeRedError(sshStderr, "Daemon failed to get old commit: %v", err)
+ return 1
+ }
- if isAncestor, err = oldCommit.IsAncestor(newCommit); err != nil {
- writeRedError(sshStderr, "Daemon failed to check if old commit is ancestor: %v", err)
- return 1
- }
+ // Potential BUG: I'm not sure if new_commit is guaranteed to be
+ // detectable as they haven't been merged into the main repo's
+ // objects yet. But it seems to work, and I don't think there's
+ // any reason for this to only work intermitently.
+ newHash = plumbing.NewHash(newIOID)
+ if newCommit, err = packPass.repo.CommitObject(newHash); err != nil {
+ writeRedError(sshStderr, "Daemon failed to get new commit: %v", err)
+ return 1
+ }
- if !isAncestor {
- // TODO: Create MR snapshot ref instead
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(force pushes are not supported yet)")
- continue
- }
+ if isAncestor, err = oldCommit.IsAncestor(newCommit); err != nil {
+ writeRedError(sshStderr, "Daemon failed to check if old commit is ancestor: %v", err)
+ return 1
+ }
- fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
+ if !isAncestor {
+ // TODO: Create MR snapshot ref instead
+ allOK = false
+ fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(force pushes are not supported yet)")
+ continue
}
- } else { // Non-contrib branch
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(you cannot push to branches outside of contrib/*)")
+
+ fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
}
+ } else { // Non-contrib branch
+ allOK = false
+ fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(you cannot push to branches outside of contrib/*)")
}
+ }
- fmt.Fprintln(sshStderr)
- if allOK {
- fmt.Fprintln(sshStderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)")
- return 0
- } else {
- fmt.Fprintln(sshStderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)")
- return 1
- }
+ fmt.Fprintln(sshStderr)
+ if allOK {
+ fmt.Fprintln(sshStderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)")
+ return 0
+ } else {
+ fmt.Fprintln(sshStderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)")
+ return 1
}
default:
fmt.Fprintln(sshStderr, ansiec.Red+"Invalid hook:", args[0]+ansiec.Reset)
diff --git a/git_misc.go b/git_misc.go
index 3f170d0..cd2dc4d 100644
--- a/git_misc.go
+++ b/git_misc.go
@@ -64,9 +64,9 @@ WHERE g.depth = cardinality($1::text[])
// go-git's tree entries are not friendly for use in HTML templates.
type displayTreeEntry struct {
- Name string
- Mode string
- Size int64
+ Name string
+ Mode string
+ Size int64
IsFile bool
IsSubtree bool
}
diff --git a/git_ref.go b/git_ref.go
index ab8e35d..f073d37 100644
--- a/git_ref.go
+++ b/git_ref.go
@@ -10,28 +10,28 @@ import (
// getRefHash returns the hash of a reference given its
// type and name as supplied in URL queries.
-func getRefHash(repo *git.Repository, ref_type, ref_name string) (ref_hash plumbing.Hash, err error) {
+func getRefHash(repo *git.Repository, refType, refName string) (refHash plumbing.Hash, err error) {
var ref *plumbing.Reference
- switch ref_type {
+ switch refType {
case "":
if ref, err = repo.Head(); err != nil {
return
}
- ref_hash = ref.Hash()
+ refHash = ref.Hash()
case "commit":
- ref_hash = plumbing.NewHash(ref_name)
+ refHash = plumbing.NewHash(refName)
case "branch":
- if ref, err = repo.Reference(plumbing.NewBranchReferenceName(ref_name), true); err != nil {
+ if ref, err = repo.Reference(plumbing.NewBranchReferenceName(refName), true); err != nil {
return
}
- ref_hash = ref.Hash()
+ refHash = ref.Hash()
case "tag":
- if ref, err = repo.Reference(plumbing.NewTagReferenceName(ref_name), true); err != nil {
+ if ref, err = repo.Reference(plumbing.NewTagReferenceName(refName), true); err != nil {
return
}
- ref_hash = ref.Hash()
+ refHash = ref.Hash()
default:
- panic("Invalid ref type " + ref_type)
+ panic("Invalid ref type " + refType)
}
return
}
diff --git a/http_auth.go b/http_auth.go
index 4868aa4..db7df09 100644
--- a/http_auth.go
+++ b/http_auth.go
@@ -7,17 +7,17 @@ import (
"net/http"
)
-func get_user_info_from_request(r *http.Request) (id int, username string, err error) {
- var session_cookie *http.Cookie
+func getUserFromRequest(r *http.Request) (id int, username string, err error) {
+ var sessionCookie *http.Cookie
- if session_cookie, err = r.Cookie("session"); err != nil {
+ if sessionCookie, err = r.Cookie("session"); err != nil {
return
}
err = database.QueryRow(
r.Context(),
"SELECT user_id, COALESCE(username, '') FROM users u JOIN sessions s ON u.id = s.user_id WHERE s.session_id = $1;",
- session_cookie.Value,
+ sessionCookie.Value,
).Scan(&id, &username)
return
diff --git a/http_handle_gc.go b/http_handle_gc.go
index 3f7717c..b00ef8a 100644
--- a/http_handle_gc.go
+++ b/http_handle_gc.go
@@ -8,7 +8,7 @@ import (
"runtime"
)
-func handle_gc(w http.ResponseWriter, r *http.Request, params map[string]any) {
+func httpHandleGC(w http.ResponseWriter, r *http.Request, params map[string]any) {
runtime.GC()
http.Redirect(w, r, "/", http.StatusSeeOther)
}
diff --git a/http_handle_group_index.go b/http_handle_group_index.go
index 3ed3a85..51e4773 100644
--- a/http_handle_group_index.go
+++ b/http_handle_group_index.go
@@ -12,15 +12,15 @@ import (
"github.com/jackc/pgx/v5/pgtype"
)
-func handle_group_index(w http.ResponseWriter, r *http.Request, params map[string]any) {
- var group_path []string
+func httpHandleGroupIndex(w http.ResponseWriter, r *http.Request, params map[string]any) {
+ var groupPath []string
var repos []nameDesc
var subgroups []nameDesc
var err error
- var group_id int
- var group_description string
+ var groupID int
+ var groupDesc string
- group_path = params["group_path"].([]string)
+ groupPath = params["group_path"].([]string)
// The group itself
err = database.QueryRow(r.Context(), `
@@ -51,8 +51,8 @@ func handle_group_index(w http.ResponseWriter, r *http.Request, params map[strin
JOIN groups g ON g.id = c.id
WHERE c.depth = cardinality($1::text[])
`,
- pgtype.FlatArray[string](group_path),
- ).Scan(&group_id, &group_description)
+ pgtype.FlatArray[string](groupPath),
+ ).Scan(&groupID, &groupDesc)
if err == pgx.ErrNoRows {
http.Error(w, "Group not found", http.StatusNotFound)
@@ -69,64 +69,64 @@ func handle_group_index(w http.ResponseWriter, r *http.Request, params map[strin
FROM user_group_roles
WHERE user_id = $1
AND group_id = $2
- `, params["user_id"].(int), group_id).Scan(&count)
+ `, params["user_id"].(int), groupID).Scan(&count)
if err != nil {
http.Error(w, "Error checking access: "+err.Error(), http.StatusInternalServerError)
return
}
- direct_access := (count > 0)
+ directAccess := (count > 0)
if r.Method == "POST" {
- if !direct_access {
+ if !directAccess {
http.Error(w, "You do not have direct access to this group", http.StatusForbidden)
return
}
- repo_name := r.FormValue("repo_name")
- repo_description := r.FormValue("repo_desc")
- contrib_requirements := r.FormValue("repo_contrib")
- if repo_name == "" {
+ repoName := r.FormValue("repo_name")
+ repoDesc := r.FormValue("repo_desc")
+ contribReq := r.FormValue("repo_contrib")
+ if repoName == "" {
http.Error(w, "Repo name is required", http.StatusBadRequest)
return
}
- var new_repo_id int
+ var newRepoID int
err := database.QueryRow(
r.Context(),
`INSERT INTO repos (name, description, group_id, contrib_requirements)
VALUES ($1, $2, $3, $4)
RETURNING id`,
- repo_name,
- repo_description,
- group_id,
- contrib_requirements,
- ).Scan(&new_repo_id)
+ repoName,
+ repoDesc,
+ groupID,
+ contribReq,
+ ).Scan(&newRepoID)
if err != nil {
http.Error(w, "Error creating repo: "+err.Error(), http.StatusInternalServerError)
return
}
- file_path := filepath.Join(config.Git.RepoDir, strconv.Itoa(new_repo_id)+".git")
+ filePath := filepath.Join(config.Git.RepoDir, strconv.Itoa(newRepoID)+".git")
_, err = database.Exec(
r.Context(),
`UPDATE repos
SET filesystem_path = $1
WHERE id = $2`,
- file_path,
- new_repo_id,
+ filePath,
+ newRepoID,
)
if err != nil {
http.Error(w, "Error updating repo path: "+err.Error(), http.StatusInternalServerError)
return
}
- if err = gitInit(file_path); err != nil {
+ if err = gitInit(filePath); err != nil {
http.Error(w, "Error initializing repo: "+err.Error(), http.StatusInternalServerError)
return
}
- redirect_unconditionally(w, r)
+ redirectUnconditionally(w, r)
return
}
@@ -136,7 +136,7 @@ func handle_group_index(w http.ResponseWriter, r *http.Request, params map[strin
SELECT name, COALESCE(description, '')
FROM repos
WHERE group_id = $1
- `, group_id)
+ `, groupID)
if err != nil {
http.Error(w, "Error getting repos: "+err.Error(), http.StatusInternalServerError)
return
@@ -161,7 +161,7 @@ func handle_group_index(w http.ResponseWriter, r *http.Request, params map[strin
SELECT name, COALESCE(description, '')
FROM groups
WHERE parent_group = $1
- `, group_id)
+ `, groupID)
if err != nil {
http.Error(w, "Error getting subgroups: "+err.Error(), http.StatusInternalServerError)
return
@@ -183,8 +183,8 @@ func handle_group_index(w http.ResponseWriter, r *http.Request, params map[strin
params["repos"] = repos
params["subgroups"] = subgroups
- params["description"] = group_description
- params["direct_access"] = direct_access
+ params["description"] = groupDesc
+ params["direct_access"] = directAccess
- render_template(w, "group", params)
+ renderTemplate(w, "group", params)
}
diff --git a/http_handle_index.go b/http_handle_index.go
index 40bea7c..2fc99a6 100644
--- a/http_handle_index.go
+++ b/http_handle_index.go
@@ -10,7 +10,7 @@ import (
"github.com/dustin/go-humanize"
)
-func handle_index(w http.ResponseWriter, r *http.Request, params map[string]any) {
+func httpHandleIndex(w http.ResponseWriter, r *http.Request, params map[string]any) {
var err error
var groups []nameDesc
@@ -25,5 +25,5 @@ func handle_index(w http.ResponseWriter, r *http.Request, params map[string]any)
memstats := runtime.MemStats{}
runtime.ReadMemStats(&memstats)
params["mem"] = humanize.IBytes(memstats.Alloc)
- render_template(w, "index", params)
+ renderTemplate(w, "index", params)
}
diff --git a/http_handle_login.go b/http_handle_login.go
index 1d23d3c..1c5c066 100644
--- a/http_handle_login.go
+++ b/http_handle_login.go
@@ -15,19 +15,19 @@ import (
"github.com/jackc/pgx/v5"
)
-func handle_login(w http.ResponseWriter, r *http.Request, params map[string]any) {
+func httpHandleLogin(w http.ResponseWriter, r *http.Request, params map[string]any) {
var username, password string
- var user_id int
- var password_hash string
+ var userID int
+ var passwordHash string
var err error
- var password_matches bool
- var cookie_value string
+ var passwordMatches bool
+ var cookieValue string
var now time.Time
var expiry time.Time
var cookie http.Cookie
if r.Method != "POST" {
- render_template(w, "login", params)
+ renderTemplate(w, "login", params)
return
}
@@ -37,34 +37,34 @@ func handle_login(w http.ResponseWriter, r *http.Request, params map[string]any)
err = database.QueryRow(r.Context(),
"SELECT id, COALESCE(password, '') FROM users WHERE username = $1",
username,
- ).Scan(&user_id, &password_hash)
+ ).Scan(&userID, &passwordHash)
if err != nil {
if errors.Is(err, pgx.ErrNoRows) {
params["login_error"] = "Unknown username"
- render_template(w, "login", params)
+ renderTemplate(w, "login", params)
return
}
http.Error(w, "Error querying user information: "+err.Error(), http.StatusInternalServerError)
return
}
- if password_hash == "" {
+ if passwordHash == "" {
params["login_error"] = "User has no password"
- render_template(w, "login", params)
+ renderTemplate(w, "login", params)
return
}
- if password_matches, err = argon2id.ComparePasswordAndHash(password, password_hash); err != nil {
+ if passwordMatches, err = argon2id.ComparePasswordAndHash(password, passwordHash); err != nil {
http.Error(w, "Error comparing password and hash: "+err.Error(), http.StatusInternalServerError)
return
}
- if !password_matches {
+ if !passwordMatches {
params["login_error"] = "Invalid password"
- render_template(w, "login", params)
+ renderTemplate(w, "login", params)
return
}
- if cookie_value, err = random_urlsafe_string(16); err != nil {
+ if cookieValue, err = randomUrlsafeStr(16); err != nil {
http.Error(w, "Error getting random string: "+err.Error(), http.StatusInternalServerError)
return
}
@@ -74,7 +74,7 @@ func handle_login(w http.ResponseWriter, r *http.Request, params map[string]any)
cookie = http.Cookie{
Name: "session",
- Value: cookie_value,
+ Value: cookieValue,
SameSite: http.SameSiteLaxMode,
HttpOnly: true,
Secure: false, // TODO
@@ -85,7 +85,7 @@ func handle_login(w http.ResponseWriter, r *http.Request, params map[string]any)
http.SetCookie(w, &cookie)
- _, err = database.Exec(r.Context(), "INSERT INTO sessions (user_id, session_id) VALUES ($1, $2)", user_id, cookie_value)
+ _, err = database.Exec(r.Context(), "INSERT INTO sessions (user_id, session_id) VALUES ($1, $2)", userID, cookieValue)
if err != nil {
http.Error(w, "Error inserting session: "+err.Error(), http.StatusInternalServerError)
return
@@ -94,10 +94,10 @@ func handle_login(w http.ResponseWriter, r *http.Request, params map[string]any)
http.Redirect(w, r, "/", http.StatusSeeOther)
}
-// random_urlsafe_string generates a random string of the given entropic size
+// randomUrlsafeStr generates a random string of the given entropic size
// using the URL-safe base64 encoding. The actual size of the string returned
// will be 4*sz.
-func random_urlsafe_string(sz int) (string, error) {
+func randomUrlsafeStr(sz int) (string, error) {
r := make([]byte, 3*sz)
_, err := rand.Read(r)
if err != nil {
diff --git a/http_handle_repo_commit.go b/http_handle_repo_commit.go
index 07f24fb..f6ef4c1 100644
--- a/http_handle_repo_commit.go
+++ b/http_handle_repo_commit.go
@@ -18,125 +18,125 @@ import (
// The file patch type from go-git isn't really usable in HTML templates
// either.
-type usable_file_patch_t struct {
+type usableFilePatch struct {
From diff.File
To diff.File
- Chunks []usable_chunk
+ Chunks []usableChunk
}
-type usable_chunk struct {
+type usableChunk struct {
Operation diff.Operation
Content string
}
-func handle_repo_commit(w http.ResponseWriter, r *http.Request, params map[string]any) {
+func httpHandleRepoCommit(w http.ResponseWriter, r *http.Request, params map[string]any) {
var repo *git.Repository
- var commit_id_specified_string, commit_id_specified_string_without_suffix string
- var commit_id plumbing.Hash
- var parent_commit_hash plumbing.Hash
- var commit_object *object.Commit
- var commit_id_string string
+ var commitIDStrSpec, commitIDStrSpecNoSuffix string
+ var commitID plumbing.Hash
+ var parentCommitHash plumbing.Hash
+ var commitObj *object.Commit
+ var commitIDStr string
var err error
var patch *object.Patch
- repo, commit_id_specified_string = params["repo"].(*git.Repository), params["commit_id"].(string)
+ repo, commitIDStrSpec = params["repo"].(*git.Repository), params["commit_id"].(string)
- commit_id_specified_string_without_suffix = strings.TrimSuffix(commit_id_specified_string, ".patch")
- commit_id = plumbing.NewHash(commit_id_specified_string_without_suffix)
- if commit_object, err = repo.CommitObject(commit_id); err != nil {
+ commitIDStrSpecNoSuffix = strings.TrimSuffix(commitIDStrSpec, ".patch")
+ commitID = plumbing.NewHash(commitIDStrSpecNoSuffix)
+ if commitObj, err = repo.CommitObject(commitID); err != nil {
http.Error(w, "Error getting commit object: "+err.Error(), http.StatusInternalServerError)
return
}
- if commit_id_specified_string_without_suffix != commit_id_specified_string {
- var formatted_patch string
- if formatted_patch, err = fmtCommitPatch(commit_object); err != nil {
+ if commitIDStrSpecNoSuffix != commitIDStrSpec {
+ var patchStr string
+ if patchStr, err = fmtCommitPatch(commitObj); err != nil {
http.Error(w, "Error formatting patch: "+err.Error(), http.StatusInternalServerError)
return
}
- fmt.Fprintln(w, formatted_patch)
+ fmt.Fprintln(w, patchStr)
return
}
- commit_id_string = commit_object.Hash.String()
+ commitIDStr = commitObj.Hash.String()
- if commit_id_string != commit_id_specified_string {
- http.Redirect(w, r, commit_id_string, http.StatusSeeOther)
+ if commitIDStr != commitIDStrSpec {
+ http.Redirect(w, r, commitIDStr, http.StatusSeeOther)
return
}
- params["commit_object"] = commit_object
- params["commit_id"] = commit_id_string
+ params["commit_object"] = commitObj
+ params["commit_id"] = commitIDStr
- parent_commit_hash, patch, err = fmtCommitAsPatch(commit_object)
+ parentCommitHash, patch, err = fmtCommitAsPatch(commitObj)
if err != nil {
http.Error(w, "Error getting patch from commit: "+err.Error(), http.StatusInternalServerError)
return
}
- params["parent_commit_hash"] = parent_commit_hash.String()
+ params["parent_commit_hash"] = parentCommitHash.String()
params["patch"] = patch
- params["file_patches"] = make_usable_file_patches(patch)
+ params["file_patches"] = makeUsableFilePatches(patch)
- render_template(w, "repo_commit", params)
+ renderTemplate(w, "repo_commit", params)
}
-type fake_diff_file struct {
+type fakeDiffFile struct {
hash plumbing.Hash
mode filemode.FileMode
path string
}
-func (f fake_diff_file) Hash() plumbing.Hash {
+func (f fakeDiffFile) Hash() plumbing.Hash {
return f.hash
}
-func (f fake_diff_file) Mode() filemode.FileMode {
+func (f fakeDiffFile) Mode() filemode.FileMode {
return f.mode
}
-func (f fake_diff_file) Path() string {
+func (f fakeDiffFile) Path() string {
return f.path
}
-var fake_diff_file_null = fake_diff_file{
+var nullFakeDiffFile = fakeDiffFile{
hash: plumbing.NewHash("0000000000000000000000000000000000000000"),
mode: misc.First_or_panic(filemode.New("100644")),
path: "",
}
-func make_usable_file_patches(patch diff.Patch) (usable_file_patches []usable_file_patch_t) {
+func makeUsableFilePatches(patch diff.Patch) (usableFilePatches []usableFilePatch) {
// TODO: Remove unnecessary context
// TODO: Prepend "+"/"-"/" " instead of solely distinguishing based on color
- for _, file_patch := range patch.FilePatches() {
+ for _, filePatch := range patch.FilePatches() {
var from, to diff.File
- var usable_file_patch usable_file_patch_t
- chunks := []usable_chunk{}
+ var ufp usableFilePatch
+ chunks := []usableChunk{}
- from, to = file_patch.Files()
+ from, to = filePatch.Files()
if from == nil {
- from = fake_diff_file_null
+ from = nullFakeDiffFile
}
if to == nil {
- to = fake_diff_file_null
+ to = nullFakeDiffFile
}
- for _, chunk := range file_patch.Chunks() {
+ for _, chunk := range filePatch.Chunks() {
var content string
content = chunk.Content()
if len(content) > 0 && content[0] == '\n' {
content = "\n" + content
} // Horrible hack to fix how browsers newlines that immediately proceed <pre>
- chunks = append(chunks, usable_chunk{
+ chunks = append(chunks, usableChunk{
Operation: chunk.Type(),
Content: content,
})
}
- usable_file_patch = usable_file_patch_t{
+ ufp = usableFilePatch{
Chunks: chunks,
From: from,
To: to,
}
- usable_file_patches = append(usable_file_patches, usable_file_patch)
+ usableFilePatches = append(usableFilePatches, ufp)
}
return
}
diff --git a/http_handle_repo_contrib_index.go b/http_handle_repo_contrib_index.go
index f352a3f..152c8cd 100644
--- a/http_handle_repo_contrib_index.go
+++ b/http_handle_repo_contrib_index.go
@@ -15,7 +15,7 @@ type id_title_status_t struct {
Status string
}
-func handle_repo_contrib_index(w http.ResponseWriter, r *http.Request, params map[string]any) {
+func httpHandleRepoContribIndex(w http.ResponseWriter, r *http.Request, params map[string]any) {
var rows pgx.Rows
var result []id_title_status_t
var err error
@@ -44,5 +44,5 @@ func handle_repo_contrib_index(w http.ResponseWriter, r *http.Request, params ma
}
params["merge_requests"] = result
- render_template(w, "repo_contrib_index", params)
+ renderTemplate(w, "repo_contrib_index", params)
}
diff --git a/http_handle_repo_contrib_one.go b/http_handle_repo_contrib_one.go
index cc17a67..74bd2ca 100644
--- a/http_handle_repo_contrib_one.go
+++ b/http_handle_repo_contrib_one.go
@@ -12,77 +12,77 @@ import (
"github.com/go-git/go-git/v5/plumbing/object"
)
-func handle_repo_contrib_one(w http.ResponseWriter, r *http.Request, params map[string]any) {
- var mr_id_string string
- var mr_id int
+func httpHandleRepoContribOne(w http.ResponseWriter, r *http.Request, params map[string]any) {
+ var mrIDStr string
+ var mrIDInt int
var err error
- var title, status, source_ref, destination_branch string
+ var title, status, srcRefStr, dstBranchStr string
var repo *git.Repository
- var source_ref_hash plumbing.Hash
- var source_commit, destination_commit, merge_base *object.Commit
- var merge_bases []*object.Commit
+ var srcRefHash plumbing.Hash
+ var dstBranchHash plumbing.Hash
+ var srcCommit, dstCommit, mergeBaseCommit *object.Commit
+ var mergeBases []*object.Commit
- mr_id_string = params["mr_id"].(string)
- mr_id_int64, err := strconv.ParseInt(mr_id_string, 10, strconv.IntSize)
+ mrIDStr = params["mr_id"].(string)
+ mrIDInt64, err := strconv.ParseInt(mrIDStr, 10, strconv.IntSize)
if err != nil {
http.Error(w, "Merge request ID not an integer: "+err.Error(), http.StatusBadRequest)
return
}
- mr_id = int(mr_id_int64)
+ mrIDInt = int(mrIDInt64)
if err = database.QueryRow(r.Context(),
"SELECT COALESCE(title, ''), status, source_ref, COALESCE(destination_branch, '') FROM merge_requests WHERE id = $1",
- mr_id,
- ).Scan(&title, &status, &source_ref, &destination_branch); err != nil {
+ mrIDInt,
+ ).Scan(&title, &status, &srcRefStr, &dstBranchStr); err != nil {
http.Error(w, "Error querying merge request: "+err.Error(), http.StatusInternalServerError)
return
}
repo = params["repo"].(*git.Repository)
- if source_ref_hash, err = getRefHash(repo, "branch", source_ref); err != nil {
+ if srcRefHash, err = getRefHash(repo, "branch", srcRefStr); err != nil {
http.Error(w, "Error getting source ref hash: "+err.Error(), http.StatusInternalServerError)
return
}
- if source_commit, err = repo.CommitObject(source_ref_hash); err != nil {
+ if srcCommit, err = repo.CommitObject(srcRefHash); err != nil {
http.Error(w, "Error getting source commit: "+err.Error(), http.StatusInternalServerError)
return
}
- params["source_commit"] = source_commit
+ params["source_commit"] = srcCommit
- var destination_branch_hash plumbing.Hash
- if destination_branch == "" {
- destination_branch = "HEAD"
- destination_branch_hash, err = getRefHash(repo, "", "")
+ if dstBranchStr == "" {
+ dstBranchStr = "HEAD"
+ dstBranchHash, err = getRefHash(repo, "", "")
} else {
- destination_branch_hash, err = getRefHash(repo, "branch", destination_branch)
+ dstBranchHash, err = getRefHash(repo, "branch", dstBranchStr)
}
if err != nil {
http.Error(w, "Error getting destination branch hash: "+err.Error(), http.StatusInternalServerError)
return
}
- if destination_commit, err = repo.CommitObject(destination_branch_hash); err != nil {
+ if dstCommit, err = repo.CommitObject(dstBranchHash); err != nil {
http.Error(w, "Error getting destination commit: "+err.Error(), http.StatusInternalServerError)
return
}
- params["destination_commit"] = destination_commit
+ params["destination_commit"] = dstCommit
- if merge_bases, err = source_commit.MergeBase(destination_commit); err != nil {
+ if mergeBases, err = srcCommit.MergeBase(dstCommit); err != nil {
http.Error(w, "Error getting merge base: "+err.Error(), http.StatusInternalServerError)
return
}
- merge_base = merge_bases[0]
- params["merge_base"] = merge_base
+ mergeBaseCommit = mergeBases[0]
+ params["merge_base"] = mergeBaseCommit
- patch, err := merge_base.Patch(source_commit)
+ patch, err := mergeBaseCommit.Patch(srcCommit)
if err != nil {
http.Error(w, "Error getting patch: "+err.Error(), http.StatusInternalServerError)
return
}
- params["file_patches"] = make_usable_file_patches(patch)
+ params["file_patches"] = makeUsableFilePatches(patch)
- params["mr_title"], params["mr_status"], params["mr_source_ref"], params["mr_destination_branch"] = title, status, source_ref, destination_branch
+ params["mr_title"], params["mr_status"], params["mr_source_ref"], params["mr_destination_branch"] = title, status, srcRefStr, dstBranchStr
- render_template(w, "repo_contrib_one", params)
+ renderTemplate(w, "repo_contrib_one", params)
}
diff --git a/http_handle_repo_index.go b/http_handle_repo_index.go
index b38863c..e274867 100644
--- a/http_handle_repo_index.go
+++ b/http_handle_repo_index.go
@@ -13,7 +13,7 @@ import (
"github.com/go-git/go-git/v5/plumbing/storer"
)
-func handle_repo_index(w http.ResponseWriter, r *http.Request, params map[string]any) {
+func httpHandleRepoIndex(w http.ResponseWriter, r *http.Request, params map[string]any) {
var repo *git.Repository
var repo_name string
var group_path []string
@@ -62,13 +62,13 @@ func handle_repo_index(w http.ResponseWriter, r *http.Request, params map[string
}
params["files"] = makeDisplayTree(tree)
- params["readme_filename"], params["readme"] = render_readme_at_tree(tree)
+ params["readme_filename"], params["readme"] = renderReadmeAtTree(tree)
no_ref:
- params["http_clone_url"] = generate_http_remote_url(group_path, repo_name)
- params["ssh_clone_url"] = generate_ssh_remote_url(group_path, repo_name)
+ params["http_clone_url"] = genHTTPRemoteURL(group_path, repo_name)
+ params["ssh_clone_url"] = genSSHRemoteURL(group_path, repo_name)
params["notes"] = notes
- render_template(w, "repo_index", params)
+ renderTemplate(w, "repo_index", params)
}
diff --git a/http_handle_repo_info.go b/http_handle_repo_info.go
index 466e0bb..44689c7 100644
--- a/http_handle_repo_info.go
+++ b/http_handle_repo_info.go
@@ -12,9 +12,9 @@ import (
"github.com/jackc/pgx/v5/pgtype"
)
-func handle_repo_info(w http.ResponseWriter, r *http.Request, params map[string]any) (err error) {
- var group_path []string
- var repo_name, repo_path string
+func httpHandleRepoInfo(w http.ResponseWriter, r *http.Request, params map[string]any) (err error) {
+ var groupPath []string
+ var repoName, repoPath string
if err := database.QueryRow(r.Context(), `
WITH RECURSIVE group_path_cte AS (
@@ -47,16 +47,16 @@ func handle_repo_info(w http.ResponseWriter, r *http.Request, params map[string]
WHERE c.depth = cardinality($1::text[])
AND r.name = $2
`,
- pgtype.FlatArray[string](group_path),
- repo_name,
- ).Scan(&repo_path); err != nil {
+ pgtype.FlatArray[string](groupPath),
+ repoName,
+ ).Scan(&repoPath); err != nil {
return err
}
w.Header().Set("Content-Type", "application/x-git-upload-pack-advertisement")
w.WriteHeader(http.StatusOK)
- cmd := exec.Command("git", "upload-pack", "--stateless-rpc", "--advertise-refs", repo_path)
+ cmd := exec.Command("git", "upload-pack", "--stateless-rpc", "--advertise-refs", repoPath)
stdout, err := cmd.StdoutPipe()
if err != nil {
return err
@@ -70,11 +70,11 @@ func handle_repo_info(w http.ResponseWriter, r *http.Request, params map[string]
return err
}
- if err = pack_line(w, "# service=git-upload-pack\n"); err != nil {
+ if err = packLine(w, "# service=git-upload-pack\n"); err != nil {
return err
}
- if err = pack_flush(w); err != nil {
+ if err = packFlush(w); err != nil {
return
}
@@ -90,13 +90,13 @@ func handle_repo_info(w http.ResponseWriter, r *http.Request, params map[string]
}
// Taken from https://github.com/icyphox/legit, MIT license
-func pack_line(w io.Writer, s string) error {
+func packLine(w io.Writer, s string) error {
_, err := fmt.Fprintf(w, "%04x%s", len(s)+4, s)
return err
}
// Taken from https://github.com/icyphox/legit, MIT license
-func pack_flush(w io.Writer) error {
+func packFlush(w io.Writer) error {
_, err := fmt.Fprint(w, "0000")
return err
}
diff --git a/http_handle_repo_log.go b/http_handle_repo_log.go
index c7dde82..2c27b03 100644
--- a/http_handle_repo_log.go
+++ b/http_handle_repo_log.go
@@ -12,7 +12,7 @@ import (
)
// TODO: I probably shouldn't include *all* commits here...
-func handle_repo_log(w http.ResponseWriter, r *http.Request, params map[string]any) {
+func httpHandleRepoLog(w http.ResponseWriter, r *http.Request, params map[string]any) {
var repo *git.Repository
var ref_hash plumbing.Hash
var err error
@@ -31,5 +31,5 @@ func handle_repo_log(w http.ResponseWriter, r *http.Request, params map[string]a
}
params["commits"] = commits
- render_template(w, "repo_log", params)
+ renderTemplate(w, "repo_log", params)
}
diff --git a/http_handle_repo_raw.go b/http_handle_repo_raw.go
index cfe03b6..a2e3536 100644
--- a/http_handle_repo_raw.go
+++ b/http_handle_repo_raw.go
@@ -14,7 +14,7 @@ import (
"github.com/go-git/go-git/v5/plumbing/object"
)
-func handle_repo_raw(w http.ResponseWriter, r *http.Request, params map[string]any) {
+func httpHandleRepoRaw(w http.ResponseWriter, r *http.Request, params map[string]any) {
var raw_path_spec, path_spec string
var repo *git.Repository
var ref_hash plumbing.Hash
@@ -71,5 +71,5 @@ func handle_repo_raw(w http.ResponseWriter, r *http.Request, params map[string]a
params["files"] = makeDisplayTree(target)
- render_template(w, "repo_raw_dir", params)
+ renderTemplate(w, "repo_raw_dir", params)
}
diff --git a/http_handle_repo_tree.go b/http_handle_repo_tree.go
index 121e76a..a1a3ca4 100644
--- a/http_handle_repo_tree.go
+++ b/http_handle_repo_tree.go
@@ -11,94 +11,94 @@ import (
"strings"
"github.com/alecthomas/chroma/v2"
- chroma_formatters_html "github.com/alecthomas/chroma/v2/formatters/html"
- chroma_lexers "github.com/alecthomas/chroma/v2/lexers"
- chroma_styles "github.com/alecthomas/chroma/v2/styles"
+ chromaHTML "github.com/alecthomas/chroma/v2/formatters/html"
+ chromaLexers "github.com/alecthomas/chroma/v2/lexers"
+ chromaStyles "github.com/alecthomas/chroma/v2/styles"
"github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object"
)
-func handle_repo_tree(w http.ResponseWriter, r *http.Request, params map[string]any) {
- var raw_path_spec, path_spec string
+func httpHandleRepoTree(w http.ResponseWriter, r *http.Request, params map[string]any) {
+ var rawPathSpec, pathSpec string
var repo *git.Repository
- var ref_hash plumbing.Hash
- var commit_object *object.Commit
+ var refHash plumbing.Hash
+ var commitObject *object.Commit
var tree *object.Tree
var err error
- raw_path_spec = params["rest"].(string)
- repo, path_spec = params["repo"].(*git.Repository), strings.TrimSuffix(raw_path_spec, "/")
- params["path_spec"] = path_spec
+ rawPathSpec = params["rest"].(string)
+ repo, pathSpec = params["repo"].(*git.Repository), strings.TrimSuffix(rawPathSpec, "/")
+ params["path_spec"] = pathSpec
- if ref_hash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil {
+ if refHash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil {
http.Error(w, "Error getting ref hash: "+err.Error(), http.StatusInternalServerError)
return
}
- if commit_object, err = repo.CommitObject(ref_hash); err != nil {
+ if commitObject, err = repo.CommitObject(refHash); err != nil {
http.Error(w, "Error getting commit object: "+err.Error(), http.StatusInternalServerError)
return
}
- if tree, err = commit_object.Tree(); err != nil {
+ if tree, err = commitObject.Tree(); err != nil {
http.Error(w, "Error getting file tree: "+err.Error(), http.StatusInternalServerError)
return
}
var target *object.Tree
- if path_spec == "" {
+ if pathSpec == "" {
target = tree
} else {
- if target, err = tree.Tree(path_spec); err != nil {
+ if target, err = tree.Tree(pathSpec); err != nil {
var file *object.File
- var file_contents string
+ var fileContent string
var lexer chroma.Lexer
var iterator chroma.Iterator
var style *chroma.Style
- var formatter *chroma_formatters_html.Formatter
- var formatted_encapsulated template.HTML
+ var formatter *chromaHTML.Formatter
+ var formattedHTML template.HTML
- if file, err = tree.File(path_spec); err != nil {
+ if file, err = tree.File(pathSpec); err != nil {
http.Error(w, "Error retrieving path: "+err.Error(), http.StatusInternalServerError)
return
}
- if len(raw_path_spec) != 0 && raw_path_spec[len(raw_path_spec)-1] == '/' {
- http.Redirect(w, r, "../"+path_spec, http.StatusSeeOther)
+ if len(rawPathSpec) != 0 && rawPathSpec[len(rawPathSpec)-1] == '/' {
+ http.Redirect(w, r, "../"+pathSpec, http.StatusSeeOther)
return
}
- if file_contents, err = file.Contents(); err != nil {
+ if fileContent, err = file.Contents(); err != nil {
http.Error(w, "Error reading file: "+err.Error(), http.StatusInternalServerError)
return
}
- lexer = chroma_lexers.Match(path_spec)
+ lexer = chromaLexers.Match(pathSpec)
if lexer == nil {
- lexer = chroma_lexers.Fallback
+ lexer = chromaLexers.Fallback
}
- if iterator, err = lexer.Tokenise(nil, file_contents); err != nil {
+ if iterator, err = lexer.Tokenise(nil, fileContent); err != nil {
http.Error(w, "Error tokenizing code: "+err.Error(), http.StatusInternalServerError)
return
}
- var formatted_unencapsulated bytes.Buffer
- style = chroma_styles.Get("autumn")
- formatter = chroma_formatters_html.New(chroma_formatters_html.WithClasses(true), chroma_formatters_html.TabWidth(8))
- if err = formatter.Format(&formatted_unencapsulated, style, iterator); err != nil {
+ var formattedHTMLStr bytes.Buffer
+ style = chromaStyles.Get("autumn")
+ formatter = chromaHTML.New(chromaHTML.WithClasses(true), chromaHTML.TabWidth(8))
+ if err = formatter.Format(&formattedHTMLStr, style, iterator); err != nil {
http.Error(w, "Error formatting code: "+err.Error(), http.StatusInternalServerError)
return
}
- formatted_encapsulated = template.HTML(formatted_unencapsulated.Bytes()) //#nosec G203
- params["file_contents"] = formatted_encapsulated
+ formattedHTML = template.HTML(formattedHTMLStr.Bytes()) //#nosec G203
+ params["file_contents"] = formattedHTML
- render_template(w, "repo_tree_file", params)
+ renderTemplate(w, "repo_tree_file", params)
return
}
}
- if len(raw_path_spec) != 0 && raw_path_spec[len(raw_path_spec)-1] != '/' {
- http.Redirect(w, r, path.Base(path_spec)+"/", http.StatusSeeOther)
+ if len(rawPathSpec) != 0 && rawPathSpec[len(rawPathSpec)-1] != '/' {
+ http.Redirect(w, r, path.Base(pathSpec)+"/", http.StatusSeeOther)
return
}
- params["readme_filename"], params["readme"] = render_readme_at_tree(target)
+ params["readme_filename"], params["readme"] = renderReadmeAtTree(target)
params["files"] = makeDisplayTree(target)
- render_template(w, "repo_tree_dir", params)
+ renderTemplate(w, "repo_tree_dir", params)
}
diff --git a/http_handle_repo_upload_pack.go b/http_handle_repo_upload_pack.go
index 04ffe57..86f1ab3 100644
--- a/http_handle_repo_upload_pack.go
+++ b/http_handle_repo_upload_pack.go
@@ -12,7 +12,7 @@ import (
"github.com/jackc/pgx/v5/pgtype"
)
-func handle_upload_pack(w http.ResponseWriter, r *http.Request, params map[string]any) (err error) {
+func httpHandleUploadPack(w http.ResponseWriter, r *http.Request, params map[string]any) (err error) {
var group_path []string
var repo_name string
var repo_path string
diff --git a/http_handle_users.go b/http_handle_users.go
index 8a66d56..c657bad 100644
--- a/http_handle_users.go
+++ b/http_handle_users.go
@@ -7,6 +7,6 @@ import (
"net/http"
)
-func handle_users(w http.ResponseWriter, r *http.Request, params map[string]any) {
+func httpHandleUsers(w http.ResponseWriter, r *http.Request, params map[string]any) {
http.Error(w, "Not implemented", http.StatusNotImplemented)
}
diff --git a/http_server.go b/http_server.go
index d61dc8f..afe8000 100644
--- a/http_server.go
+++ b/http_server.go
@@ -21,42 +21,42 @@ func (router *httpRouter) ServeHTTP(w http.ResponseWriter, r *http.Request) {
var segments []string
var err error
- var non_empty_last_segments_len int
- var separator_index int
+ var contentfulSegmentsLen int
+ var sepIndex int
params := make(map[string]any)
- if segments, _, err = parse_request_uri(r.RequestURI); err != nil {
+ if segments, _, err = parseReqURI(r.RequestURI); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
- non_empty_last_segments_len = len(segments)
+ contentfulSegmentsLen = len(segments)
if segments[len(segments)-1] == "" {
- non_empty_last_segments_len--
+ contentfulSegmentsLen--
}
if segments[0] == ":" {
if len(segments) < 2 {
http.Error(w, "Blank system endpoint", http.StatusNotFound)
return
- } else if len(segments) == 2 && redirect_with_slash(w, r) {
+ } else if len(segments) == 2 && redirectDir(w, r) {
return
}
switch segments[1] {
case "static":
- static_handler.ServeHTTP(w, r)
+ staticHandler.ServeHTTP(w, r)
return
case "source":
- source_handler.ServeHTTP(w, r)
+ sourceHandler.ServeHTTP(w, r)
return
}
}
params["url_segments"] = segments
params["global"] = globalData
- var _user_id int // 0 for none
- _user_id, params["username"], err = get_user_info_from_request(r)
- params["user_id"] = _user_id
+ var userID int // 0 for none
+ userID, params["username"], err = getUserFromRequest(r)
+ params["user_id"] = userID
if errors.Is(err, http.ErrNoCookie) {
} else if errors.Is(err, pgx.ErrNoRows) {
} else if err != nil {
@@ -64,22 +64,22 @@ func (router *httpRouter) ServeHTTP(w http.ResponseWriter, r *http.Request) {
return
}
- if _user_id == 0 {
+ if userID == 0 {
params["user_id_string"] = ""
} else {
- params["user_id_string"] = strconv.Itoa(_user_id)
+ params["user_id_string"] = strconv.Itoa(userID)
}
if segments[0] == ":" {
switch segments[1] {
case "login":
- handle_login(w, r, params)
+ httpHandleLogin(w, r, params)
return
case "users":
- handle_users(w, r, params)
+ httpHandleUsers(w, r, params)
return
case "gc":
- handle_gc(w, r, params)
+ httpHandleGC(w, r, params)
return
default:
http.Error(w, fmt.Sprintf("Unknown system module type: %s", segments[1]), http.StatusNotFound)
@@ -87,65 +87,65 @@ func (router *httpRouter) ServeHTTP(w http.ResponseWriter, r *http.Request) {
}
}
- separator_index = -1
+ sepIndex = -1
for i, part := range segments {
if part == ":" {
- separator_index = i
+ sepIndex = i
break
}
}
- params["separator_index"] = separator_index
+ params["separator_index"] = sepIndex
- var group_path []string
- var module_type string
- var module_name string
+ var groupPath []string
+ var moduleType string
+ var moduleName string
- if separator_index > 0 {
- group_path = segments[:separator_index]
+ if sepIndex > 0 {
+ groupPath = segments[:sepIndex]
} else {
- group_path = segments[:len(segments)-1]
+ groupPath = segments[:len(segments)-1]
}
- params["group_path"] = group_path
+ params["group_path"] = groupPath
switch {
- case non_empty_last_segments_len == 0:
- handle_index(w, r, params)
- case separator_index == -1:
- if redirect_with_slash(w, r) {
+ case contentfulSegmentsLen == 0:
+ httpHandleIndex(w, r, params)
+ case sepIndex == -1:
+ if redirectDir(w, r) {
return
}
- handle_group_index(w, r, params)
- case non_empty_last_segments_len == separator_index+1:
+ httpHandleGroupIndex(w, r, params)
+ case contentfulSegmentsLen == sepIndex+1:
http.Error(w, "Illegal path 1", http.StatusNotImplemented)
return
- case non_empty_last_segments_len == separator_index+2:
+ case contentfulSegmentsLen == sepIndex+2:
http.Error(w, "Illegal path 2", http.StatusNotImplemented)
return
default:
- module_type = segments[separator_index+1]
- module_name = segments[separator_index+2]
- switch module_type {
+ moduleType = segments[sepIndex+1]
+ moduleName = segments[sepIndex+2]
+ switch moduleType {
case "repos":
- params["repo_name"] = module_name
+ params["repo_name"] = moduleName
- if non_empty_last_segments_len > separator_index+3 {
- switch segments[separator_index+3] {
+ if contentfulSegmentsLen > sepIndex+3 {
+ switch segments[sepIndex+3] {
case "info":
- if err = handle_repo_info(w, r, params); err != nil {
+ if err = httpHandleRepoInfo(w, r, params); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
return
case "git-upload-pack":
- if err = handle_upload_pack(w, r, params); err != nil {
+ if err = httpHandleUploadPack(w, r, params); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
return
}
}
- if params["ref_type"], params["ref_name"], err = get_param_ref_and_type(r); err != nil {
- if errors.Is(err, err_no_ref_spec) {
+ if params["ref_type"], params["ref_name"], err = getParamRefTypeName(r); err != nil {
+ if errors.Is(err, errNoRefSpec) {
params["ref_type"] = ""
} else {
http.Error(w, "Error querying ref type: "+err.Error(), http.StatusInternalServerError)
@@ -155,66 +155,66 @@ func (router *httpRouter) ServeHTTP(w http.ResponseWriter, r *http.Request) {
// TODO: subgroups
- if params["repo"], params["repo_description"], params["repo_id"], err = openRepo(r.Context(), group_path, module_name); err != nil {
+ if params["repo"], params["repo_description"], params["repo_id"], err = openRepo(r.Context(), groupPath, moduleName); err != nil {
http.Error(w, "Error opening repo: "+err.Error(), http.StatusInternalServerError)
return
}
- if non_empty_last_segments_len == separator_index+3 {
- if redirect_with_slash(w, r) {
+ if contentfulSegmentsLen == sepIndex+3 {
+ if redirectDir(w, r) {
return
}
- handle_repo_index(w, r, params)
+ httpHandleRepoIndex(w, r, params)
return
}
- repo_feature := segments[separator_index+3]
- switch repo_feature {
+ repoFeature := segments[sepIndex+3]
+ switch repoFeature {
case "tree":
- params["rest"] = strings.Join(segments[separator_index+4:], "/")
- if len(segments) < separator_index+5 && redirect_with_slash(w, r) {
+ params["rest"] = strings.Join(segments[sepIndex+4:], "/")
+ if len(segments) < sepIndex+5 && redirectDir(w, r) {
return
}
- handle_repo_tree(w, r, params)
+ httpHandleRepoTree(w, r, params)
case "raw":
- params["rest"] = strings.Join(segments[separator_index+4:], "/")
- if len(segments) < separator_index+5 && redirect_with_slash(w, r) {
+ params["rest"] = strings.Join(segments[sepIndex+4:], "/")
+ if len(segments) < sepIndex+5 && redirectDir(w, r) {
return
}
- handle_repo_raw(w, r, params)
+ httpHandleRepoRaw(w, r, params)
case "log":
- if non_empty_last_segments_len > separator_index+4 {
+ if contentfulSegmentsLen > sepIndex+4 {
http.Error(w, "Too many parameters", http.StatusBadRequest)
return
}
- if redirect_with_slash(w, r) {
+ if redirectDir(w, r) {
return
}
- handle_repo_log(w, r, params)
+ httpHandleRepoLog(w, r, params)
case "commit":
- if redirect_without_slash(w, r) {
+ if redirectNoDir(w, r) {
return
}
- params["commit_id"] = segments[separator_index+4]
- handle_repo_commit(w, r, params)
+ params["commit_id"] = segments[sepIndex+4]
+ httpHandleRepoCommit(w, r, params)
case "contrib":
- if redirect_with_slash(w, r) {
+ if redirectDir(w, r) {
return
}
- switch non_empty_last_segments_len {
- case separator_index + 4:
- handle_repo_contrib_index(w, r, params)
- case separator_index + 5:
- params["mr_id"] = segments[separator_index+4]
- handle_repo_contrib_one(w, r, params)
+ switch contentfulSegmentsLen {
+ case sepIndex + 4:
+ httpHandleRepoContribIndex(w, r, params)
+ case sepIndex + 5:
+ params["mr_id"] = segments[sepIndex+4]
+ httpHandleRepoContribOne(w, r, params)
default:
http.Error(w, "Too many parameters", http.StatusBadRequest)
}
default:
- http.Error(w, fmt.Sprintf("Unknown repo feature: %s", repo_feature), http.StatusNotFound)
+ http.Error(w, fmt.Sprintf("Unknown repo feature: %s", repoFeature), http.StatusNotFound)
}
default:
- http.Error(w, fmt.Sprintf("Unknown module type: %s", module_type), http.StatusNotFound)
+ http.Error(w, fmt.Sprintf("Unknown module type: %s", moduleType), http.StatusNotFound)
}
}
}
diff --git a/http_template.go b/http_template.go
index 377ad4c..e8520a9 100644
--- a/http_template.go
+++ b/http_template.go
@@ -5,10 +5,10 @@ package main
import "net/http"
-// render_template abstracts out the annoyances of reporting template rendering
+// renderTemplate abstracts out the annoyances of reporting template rendering
// errors.
-func render_template(w http.ResponseWriter, template_name string, params map[string]any) {
- if err := templates.ExecuteTemplate(w, template_name, params); err != nil {
+func renderTemplate(w http.ResponseWriter, templateName string, params map[string]any) {
+ if err := templates.ExecuteTemplate(w, templateName, params); err != nil {
http.Error(w, "Error rendering template: "+err.Error(), http.StatusInternalServerError)
}
}
diff --git a/http_template_funcs.go b/http_template_funcs.go
index 016d268..9609e61 100644
--- a/http_template_funcs.go
+++ b/http_template_funcs.go
@@ -9,19 +9,19 @@ import (
"strings"
)
-func first_line(s string) string {
+func firstLine(s string) string {
before, _, _ := strings.Cut(s, "\n")
return before
}
-func base_name(s string) string {
+func baseName(s string) string {
return path.Base(s)
}
-func path_escape(s string) string {
+func pathEscape(s string) string {
return url.PathEscape(s)
}
-func query_escape(s string) string {
+func queryEscape(s string) string {
return url.QueryEscape(s)
}
diff --git a/readme_to_html.go b/readme_to_html.go
index 29c61ca..a7c7cb6 100644
--- a/readme_to_html.go
+++ b/readme_to_html.go
@@ -16,50 +16,50 @@ import (
"github.com/yuin/goldmark/extension"
)
-var markdown_converter = goldmark.New(goldmark.WithExtensions(extension.GFM))
+var markdownConverter = goldmark.New(goldmark.WithExtensions(extension.GFM))
-func render_readme_at_tree(tree *object.Tree) (readme_filename string, readme_content template.HTML) {
- var readme_rendered_unsafe bytes.Buffer
- var readme_file *object.File
- var readme_file_contents string
+func renderReadmeAtTree(tree *object.Tree) (readmeFilename string, readmeRenderedSafeHTML template.HTML) {
+ var readmeRenderedUnsafe bytes.Buffer
+ var readmeFile *object.File
+ var readmeFileContents string
var err error
- if readme_file, err = tree.File("README"); err == nil {
- if readme_file_contents, err = readme_file.Contents(); err != nil {
- return "Error fetching README", string_escape_html("Unable to fetch contents of README: " + err.Error())
+ if readmeFile, err = tree.File("README"); err == nil {
+ if readmeFileContents, err = readmeFile.Contents(); err != nil {
+ return "Error fetching README", escapeHTML("Unable to fetch contents of README: " + err.Error())
}
- return "README", template.HTML("<pre>" + html.EscapeString(readme_file_contents) + "</pre>") //#nosec G203
+ return "README", template.HTML("<pre>" + html.EscapeString(readmeFileContents) + "</pre>") //#nosec G203
}
- if readme_file, err = tree.File("README.md"); err == nil {
- if readme_file_contents, err = readme_file.Contents(); err != nil {
- return "Error fetching README", string_escape_html("Unable to fetch contents of README: " + err.Error())
+ if readmeFile, err = tree.File("README.md"); err == nil {
+ if readmeFileContents, err = readmeFile.Contents(); err != nil {
+ return "Error fetching README", escapeHTML("Unable to fetch contents of README: " + err.Error())
}
- if err = markdown_converter.Convert([]byte(readme_file_contents), &readme_rendered_unsafe); err != nil {
- return "Error fetching README", string_escape_html("Unable to render README: " + err.Error())
+ if err = markdownConverter.Convert([]byte(readmeFileContents), &readmeRenderedUnsafe); err != nil {
+ return "Error fetching README", escapeHTML("Unable to render README: " + err.Error())
}
- return "README.md", template.HTML(bluemonday.UGCPolicy().SanitizeBytes(readme_rendered_unsafe.Bytes())) //#nosec G203
+ return "README.md", template.HTML(bluemonday.UGCPolicy().SanitizeBytes(readmeRenderedUnsafe.Bytes())) //#nosec G203
}
- if readme_file, err = tree.File("README.org"); err == nil {
- if readme_file_contents, err = readme_file.Contents(); err != nil {
- return "Error fetching README", string_escape_html("Unable to fetch contents of README: " + err.Error())
+ if readmeFile, err = tree.File("README.org"); err == nil {
+ if readmeFileContents, err = readmeFile.Contents(); err != nil {
+ return "Error fetching README", escapeHTML("Unable to fetch contents of README: " + err.Error())
}
- org_html, err := org.New().Parse(strings.NewReader(readme_file_contents), readme_filename).Write(org.NewHTMLWriter())
+ orgHTML, err := org.New().Parse(strings.NewReader(readmeFileContents), readmeFilename).Write(org.NewHTMLWriter())
if err != nil {
- return "Error fetching README", string_escape_html("Unable to render README: " + err.Error())
+ return "Error fetching README", escapeHTML("Unable to render README: " + err.Error())
}
- return "README.org", template.HTML(bluemonday.UGCPolicy().Sanitize(org_html)) //#nosec G203
+ return "README.org", template.HTML(bluemonday.UGCPolicy().Sanitize(orgHTML)) //#nosec G203
}
return "", ""
}
-func string_escape_html(s string) template.HTML {
+func escapeHTML(s string) template.HTML {
return template.HTML(html.EscapeString(s)) //#nosec G203
}
diff --git a/remote_url.go b/remote_url.go
index 506e35c..c0f4fc1 100644
--- a/remote_url.go
+++ b/remote_url.go
@@ -10,10 +10,10 @@ import (
// We don't use path.Join because it collapses multiple slashes into one.
-func generate_ssh_remote_url(group_path []string, repo_name string) string {
- return strings.TrimSuffix(config.SSH.Root, "/") + "/" + path_escape_cat_segments(group_path) + "/:/repos/" + url.PathEscape(repo_name)
+func genSSHRemoteURL(group_path []string, repo_name string) string {
+ return strings.TrimSuffix(config.SSH.Root, "/") + "/" + segmentsToURL(group_path) + "/:/repos/" + url.PathEscape(repo_name)
}
-func generate_http_remote_url(group_path []string, repo_name string) string {
- return strings.TrimSuffix(config.HTTP.Root, "/") + "/" + path_escape_cat_segments(group_path) + "/:/repos/" + url.PathEscape(repo_name)
+func genHTTPRemoteURL(group_path []string, repo_name string) string {
+ return strings.TrimSuffix(config.HTTP.Root, "/") + "/" + segmentsToURL(group_path) + "/:/repos/" + url.PathEscape(repo_name)
}
diff --git a/resources.go b/resources.go
index bbb03c0..c1b4a9b 100644
--- a/resources.go
+++ b/resources.go
@@ -23,11 +23,11 @@ import (
//go:embed static/* templates/* scripts/* sql/*
//go:embed hookc/*.c
//go:embed vendor/*
-var source_fs embed.FS
+var sourceFS embed.FS
-var source_handler = http.StripPrefix(
+var sourceHandler = http.StripPrefix(
"/:/source/",
- http.FileServer(http.FS(source_fs)),
+ http.FileServer(http.FS(sourceFS)),
)
//go:embed templates/* static/* hookc/hookc
@@ -40,10 +40,10 @@ func loadTemplates() (err error) {
m.Add("text/html", &html.Minifier{TemplateDelims: [2]string{"{{", "}}"}, KeepDefaultAttrVals: true})
templates = template.New("templates").Funcs(template.FuncMap{
- "first_line": first_line,
- "base_name": base_name,
- "path_escape": path_escape,
- "query_escape": query_escape,
+ "first_line": firstLine,
+ "base_name": baseName,
+ "path_escape": pathEscape,
+ "query_escape": queryEscape,
})
err = fs.WalkDir(resourcesFS, "templates", func(path string, d fs.DirEntry, err error) error {
@@ -71,12 +71,12 @@ func loadTemplates() (err error) {
return err
}
-var static_handler http.Handler
+var staticHandler http.Handler
func init() {
- static_fs, err := fs.Sub(resourcesFS, "static")
+ staticFS, err := fs.Sub(resourcesFS, "static")
if err != nil {
panic(err)
}
- static_handler = http.StripPrefix("/:/static/", http.FileServer(http.FS(static_fs)))
+ staticHandler = http.StripPrefix("/:/static/", http.FileServer(http.FS(staticFS)))
}
diff --git a/ssh_handle_receive_pack.go b/ssh_handle_receive_pack.go
index b930784..81e94bc 100644
--- a/ssh_handle_receive_pack.go
+++ b/ssh_handle_receive_pack.go
@@ -9,107 +9,107 @@ import (
"os"
"os/exec"
- glider_ssh "github.com/gliderlabs/ssh"
+ gliderSSH "github.com/gliderlabs/ssh"
"github.com/go-git/go-git/v5"
"go.lindenii.runxiyu.org/lindenii-common/cmap"
)
type packPass struct {
- session glider_ssh.Session
- repo *git.Repository
- pubkey string
- directAccess bool
- repo_path string
- userID int
- userType string
- repoID int
- group_path []string
- repo_name string
- contribReq string
+ session gliderSSH.Session
+ repo *git.Repository
+ pubkey string
+ directAccess bool
+ repoPath string
+ userID int
+ userType string
+ repoID int
+ groupPath []string
+ repoName string
+ contribReq string
}
var packPasses = cmap.Map[string, packPass]{}
-// ssh_handle_receive_pack handles attempts to push to repos.
-func ssh_handle_receive_pack(session glider_ssh.Session, pubkey, repo_identifier string) (err error) {
- group_path, repo_name, repo_id, repo_path, direct_access, contrib_requirements, user_type, user_id, err := get_repo_path_perms_from_ssh_path_pubkey(session.Context(), repo_identifier, pubkey)
+// sshHandleRecvPack handles attempts to push to repos.
+func sshHandleRecvPack(session gliderSSH.Session, pubkey, repoIdentifier string) (err error) {
+ groupPath, repoName, repoID, repoPath, directAccess, contribReq, userType, userID, err := getRepoInfo2(session.Context(), repoIdentifier, pubkey)
if err != nil {
return err
}
- repo, err := git.PlainOpen(repo_path)
+ repo, err := git.PlainOpen(repoPath)
if err != nil {
return err
}
- repo_config, err := repo.Config()
+ repoConf, err := repo.Config()
if err != nil {
return err
}
- repo_config_core := repo_config.Raw.Section("core")
- if repo_config_core == nil {
- return errors.New("Repository has no core section in config")
+ repoConfCore := repoConf.Raw.Section("core")
+ if repoConfCore == nil {
+ return errors.New("repository has no core section in config")
}
- hooksPath := repo_config_core.OptionAll("hooksPath")
+ hooksPath := repoConfCore.OptionAll("hooksPath")
if len(hooksPath) != 1 || hooksPath[0] != config.Hooks.Execs {
- return errors.New("Repository has hooksPath set to an unexpected value")
+ return errors.New("repository has hooksPath set to an unexpected value")
}
- if !direct_access {
- switch contrib_requirements {
+ if !directAccess {
+ switch contribReq {
case "closed":
- if !direct_access {
- return errors.New("You need direct access to push to this repo.")
+ if !directAccess {
+ return errors.New("you need direct access to push to this repo.")
}
case "registered_user":
- if user_type != "registered" {
- return errors.New("You need to be a registered user to push to this repo.")
+ if userType != "registered" {
+ return errors.New("you need to be a registered user to push to this repo.")
}
case "ssh_pubkey":
fallthrough
case "federated":
if pubkey == "" {
- return errors.New("You need to have an SSH public key to push to this repo.")
+ return errors.New("you need to have an SSH public key to push to this repo.")
}
- if user_type == "" {
- user_id, err = add_user_ssh(session.Context(), pubkey)
+ if userType == "" {
+ userID, err = addUserSSH(session.Context(), pubkey)
if err != nil {
return err
}
- fmt.Fprintln(session.Stderr(), "You are now registered as user ID", user_id)
- user_type = "pubkey_only"
+ fmt.Fprintln(session.Stderr(), "you are now registered as user ID", userID)
+ userType = "pubkey_only"
}
case "public":
default:
- panic("unknown contrib_requirements value " + contrib_requirements)
+ panic("unknown contrib_requirements value " + contribReq)
}
}
- cookie, err := random_urlsafe_string(16)
+ cookie, err := randomUrlsafeStr(16)
if err != nil {
fmt.Fprintln(session.Stderr(), "Error while generating cookie:", err)
}
packPasses.Store(cookie, packPass{
- session: session,
- pubkey: pubkey,
- directAccess: direct_access,
- repo_path: repo_path,
- userID: user_id,
- repoID: repo_id,
- group_path: group_path,
- repo_name: repo_name,
- repo: repo,
- contribReq: contrib_requirements,
- userType: user_type,
+ session: session,
+ pubkey: pubkey,
+ directAccess: directAccess,
+ repoPath: repoPath,
+ userID: userID,
+ repoID: repoID,
+ groupPath: groupPath,
+ repoName: repoName,
+ repo: repo,
+ contribReq: contribReq,
+ userType: userType,
})
defer packPasses.Delete(cookie)
// The Delete won't execute until proc.Wait returns unless something
// horribly wrong such as a panic occurs.
- proc := exec.CommandContext(session.Context(), "git-receive-pack", repo_path)
+ proc := exec.CommandContext(session.Context(), "git-receive-pack", repoPath)
proc.Env = append(os.Environ(),
"LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket,
"LINDENII_FORGE_HOOKS_COOKIE="+cookie,
diff --git a/ssh_handle_upload_pack.go b/ssh_handle_upload_pack.go
index 0aa2f17..ab62533 100644
--- a/ssh_handle_upload_pack.go
+++ b/ssh_handle_upload_pack.go
@@ -11,15 +11,15 @@ import (
glider_ssh "github.com/gliderlabs/ssh"
)
-// ssh_handle_upload_pack handles clones/fetches. It just uses git-upload-pack
+// sshHandleUploadPack handles clones/fetches. It just uses git-upload-pack
// and has no ACL checks.
-func ssh_handle_upload_pack(session glider_ssh.Session, pubkey, repo_identifier string) (err error) {
- var repo_path string
- if _, _, _, repo_path, _, _, _, _, err = get_repo_path_perms_from_ssh_path_pubkey(session.Context(), repo_identifier, pubkey); err != nil {
+func sshHandleUploadPack(session glider_ssh.Session, pubkey, repoIdentifier string) (err error) {
+ var repoPath string
+ if _, _, _, repoPath, _, _, _, _, err = getRepoInfo2(session.Context(), repoIdentifier, pubkey); err != nil {
return err
}
- proc := exec.CommandContext(session.Context(), "git-upload-pack", repo_path)
+ proc := exec.CommandContext(session.Context(), "git-upload-pack", repoPath)
proc.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+config.Hooks.Socket)
proc.Stdin = session
proc.Stdout = session
diff --git a/ssh_server.go b/ssh_server.go
index 7fd31c0..42188fb 100644
--- a/ssh_server.go
+++ b/ssh_server.go
@@ -63,13 +63,13 @@ func serveSSH(listener net.Listener) error {
fmt.Fprintln(session.Stderr(), "Too many arguments\r")
return
}
- err = ssh_handle_upload_pack(session, client_public_key_string, cmd[1])
+ err = sshHandleUploadPack(session, client_public_key_string, cmd[1])
case "git-receive-pack":
if len(cmd) > 2 {
fmt.Fprintln(session.Stderr(), "Too many arguments\r")
return
}
- err = ssh_handle_receive_pack(session, client_public_key_string, cmd[1])
+ err = sshHandleRecvPack(session, client_public_key_string, cmd[1])
default:
fmt.Fprintln(session.Stderr(), "Unsupported command: "+cmd[0]+"\r")
return
diff --git a/ssh_utils.go b/ssh_utils.go
index 092cb6e..7fa6cb8 100644
--- a/ssh_utils.go
+++ b/ssh_utils.go
@@ -16,7 +16,7 @@ import (
var err_ssh_illegal_endpoint = errors.New("illegal endpoint during SSH access")
-func get_repo_path_perms_from_ssh_path_pubkey(ctx context.Context, ssh_path, ssh_pubkey string) (group_path []string, repo_name string, repo_id int, repo_path string, direct_access bool, contrib_requirements, user_type string, user_id int, err error) {
+func getRepoInfo2(ctx context.Context, ssh_path, ssh_pubkey string) (group_path []string, repo_name string, repo_id int, repo_path string, direct_access bool, contrib_requirements, user_type string, user_id int, err error) {
var segments []string
var separator_index int
var module_type, module_name string
diff --git a/url.go b/url.go
index 393913f..9809068 100644
--- a/url.go
+++ b/url.go
@@ -11,42 +11,41 @@ import (
)
var (
- err_duplicate_ref_spec = errors.New("duplicate ref spec")
- err_no_ref_spec = errors.New("no ref spec")
+ errDupRefSpec = errors.New("duplicate ref spec")
+ errNoRefSpec = errors.New("no ref spec")
)
-func get_param_ref_and_type(r *http.Request) (ref_type, ref string, err error) {
+func getParamRefTypeName(r *http.Request) (retRefType, retRefName string, err error) {
qr := r.URL.RawQuery
q, err := url.ParseQuery(qr)
if err != nil {
return
}
done := false
- for _, _ref_type := range []string{"commit", "branch", "tag"} {
- _ref, ok := q[_ref_type]
+ for _, refType := range []string{"commit", "branch", "tag"} {
+ refName, ok := q[refType]
if ok {
if done {
- err = err_duplicate_ref_spec
+ err = errDupRefSpec
return
- } else {
- done = true
- if len(_ref) != 1 {
- err = err_duplicate_ref_spec
- return
- }
- ref = _ref[0]
- ref_type = _ref_type
}
+ done = true
+ if len(refName) != 1 {
+ err = errDupRefSpec
+ return
+ }
+ retRefName = refName[0]
+ retRefType = refType
}
}
if !done {
- err = err_no_ref_spec
+ err = errNoRefSpec
}
return
}
-func parse_request_uri(request_uri string) (segments []string, params url.Values, err error) {
- path, params_string, _ := strings.Cut(request_uri, "?")
+func parseReqURI(requestURI string) (segments []string, params url.Values, err error) {
+ path, paramsStr, _ := strings.Cut(requestURI, "?")
segments = strings.Split(strings.TrimPrefix(path, "/"), "/")
@@ -57,20 +56,20 @@ func parse_request_uri(request_uri string) (segments []string, params url.Values
}
}
- params, err = url.ParseQuery(params_string)
+ params, err = url.ParseQuery(paramsStr)
return
}
-func redirect_with_slash(w http.ResponseWriter, r *http.Request) bool {
- request_uri := r.RequestURI
+func redirectDir(w http.ResponseWriter, r *http.Request) bool {
+ requestURI := r.RequestURI
- path_end := strings.IndexAny(request_uri, "?#")
+ pathEnd := strings.IndexAny(requestURI, "?#")
var path, rest string
- if path_end == -1 {
- path = request_uri
+ if pathEnd == -1 {
+ path = requestURI
} else {
- path = request_uri[:path_end]
- rest = request_uri[path_end:]
+ path = requestURI[:pathEnd]
+ rest = requestURI[pathEnd:]
}
if !strings.HasSuffix(path, "/") {
@@ -80,16 +79,16 @@ func redirect_with_slash(w http.ResponseWriter, r *http.Request) bool {
return false
}
-func redirect_without_slash(w http.ResponseWriter, r *http.Request) bool {
- request_uri := r.RequestURI
+func redirectNoDir(w http.ResponseWriter, r *http.Request) bool {
+ requestURI := r.RequestURI
- path_end := strings.IndexAny(request_uri, "?#")
+ pathEnd := strings.IndexAny(requestURI, "?#")
var path, rest string
- if path_end == -1 {
- path = request_uri
+ if pathEnd == -1 {
+ path = requestURI
} else {
- path = request_uri[:path_end]
- rest = request_uri[path_end:]
+ path = requestURI[:pathEnd]
+ rest = requestURI[pathEnd:]
}
if strings.HasSuffix(path, "/") {
@@ -99,22 +98,22 @@ func redirect_without_slash(w http.ResponseWriter, r *http.Request) bool {
return false
}
-func redirect_unconditionally(w http.ResponseWriter, r *http.Request) {
- request_uri := r.RequestURI
+func redirectUnconditionally(w http.ResponseWriter, r *http.Request) {
+ requestURI := r.RequestURI
- path_end := strings.IndexAny(request_uri, "?#")
+ pathEnd := strings.IndexAny(requestURI, "?#")
var path, rest string
- if path_end == -1 {
- path = request_uri
+ if pathEnd == -1 {
+ path = requestURI
} else {
- path = request_uri[:path_end]
- rest = request_uri[path_end:]
+ path = requestURI[:pathEnd]
+ rest = requestURI[pathEnd:]
}
http.Redirect(w, r, path+rest, http.StatusSeeOther)
}
-func path_escape_cat_segments(segments []string) string {
+func segmentsToURL(segments []string) string {
for i, segment := range segments {
segments[i] = url.PathEscape(segment)
}
diff --git a/users.go b/users.go
index 4c2f9a6..98a5efe 100644
--- a/users.go
+++ b/users.go
@@ -9,7 +9,7 @@ import (
"github.com/jackc/pgx/v5"
)
-func add_user_ssh(ctx context.Context, pubkey string) (user_id int, err error) {
+func addUserSSH(ctx context.Context, pubkey string) (user_id int, err error) {
var tx pgx.Tx
if tx, err = database.Begin(ctx); err != nil {