aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRunxi Yu <me@runxiyu.org>2025-08-12 11:01:07 +0800
committerRunxi Yu <me@runxiyu.org>2025-09-15 15:19:12 +0800
commiteb82fdb2dc0903e6125014abd64aceab42c8eb35 (patch)
treec07276ba1595c415ebc28943163d88f3e3180254
parentRemove forge-specific functions from misc (diff)
downloadforge-eb82fdb2dc0903e6125014abd64aceab42c8eb35.tar.gz
forge-eb82fdb2dc0903e6125014abd64aceab42c8eb35.tar.zst
forge-eb82fdb2dc0903e6125014abd64aceab42c8eb35.zip
RefactorHEADmaster
-rw-r--r--.gitignore5
-rw-r--r--LICENSE.GPL675
-rw-r--r--Makefile30
-rw-r--r--README.md9
-rw-r--r--forge.example.scfg (renamed from forge.scfg)20
-rw-r--r--forged/.golangci.yaml42
-rw-r--r--forged/internal/common/ansiec/colors.go (renamed from forged/internal/ansiec/colors.go)20
-rw-r--r--forged/internal/common/ansiec/doc.go (renamed from forged/internal/ansiec/ansiec.go)0
-rw-r--r--forged/internal/common/ansiec/reset.go (renamed from forged/internal/ansiec/reset.go)1
-rw-r--r--forged/internal/common/ansiec/style.go (renamed from forged/internal/ansiec/style.go)1
-rw-r--r--forged/internal/common/argon2id/LICENSE (renamed from LICENSE.MIT)0
-rw-r--r--forged/internal/common/argon2id/argon2id.go (renamed from forged/internal/argon2id/argon2id.go)0
-rw-r--r--forged/internal/common/bare/LICENSE (renamed from LICENSE.APACHE)0
-rw-r--r--forged/internal/common/bare/doc.go (renamed from forged/internal/bare/package.go)0
-rw-r--r--forged/internal/common/bare/errors.go (renamed from forged/internal/bare/errors.go)4
-rw-r--r--forged/internal/common/bare/limit.go (renamed from forged/internal/bare/limit.go)2
-rw-r--r--forged/internal/common/bare/marshal.go (renamed from forged/internal/bare/marshal.go)2
-rw-r--r--forged/internal/common/bare/reader.go (renamed from forged/internal/bare/reader.go)4
-rw-r--r--forged/internal/common/bare/unions.go (renamed from forged/internal/bare/unions.go)6
-rw-r--r--forged/internal/common/bare/unmarshal.go (renamed from forged/internal/bare/unmarshal.go)0
-rw-r--r--forged/internal/common/bare/varint.go (renamed from forged/internal/bare/varint.go)0
-rw-r--r--forged/internal/common/bare/writer.go (renamed from forged/internal/bare/writer.go)6
-rw-r--r--forged/internal/common/cmap/LICENSE (renamed from LICENSE.BSD)0
-rw-r--r--forged/internal/common/cmap/comparable_map.go (renamed from forged/internal/cmap/comparable_map.go)2
-rw-r--r--forged/internal/common/cmap/map.go (renamed from forged/internal/cmap/map.go)2
-rw-r--r--forged/internal/common/humanize/bytes.go (renamed from forged/internal/humanize/bytes.go)0
-rw-r--r--forged/internal/common/misc/back.go (renamed from forged/internal/misc/back.go)0
-rw-r--r--forged/internal/common/misc/iter.go (renamed from forged/internal/misc/iter.go)0
-rw-r--r--forged/internal/common/misc/misc.go (renamed from forged/internal/render/render.go)4
-rw-r--r--forged/internal/common/misc/net.go42
-rw-r--r--forged/internal/common/misc/slices.go (renamed from forged/internal/misc/misc.go)1
-rw-r--r--forged/internal/common/misc/trivial.go (renamed from forged/internal/misc/trivial.go)4
-rw-r--r--forged/internal/common/misc/unsafe.go (renamed from forged/internal/misc/unsafe.go)4
-rw-r--r--forged/internal/common/misc/url.go (renamed from forged/internal/misc/url.go)0
-rw-r--r--forged/internal/common/scfg/.golangci.yaml (renamed from forged/internal/scfg/.golangci.yaml)0
-rw-r--r--forged/internal/common/scfg/LICENSE18
-rw-r--r--forged/internal/common/scfg/reader.go (renamed from forged/internal/scfg/reader.go)8
-rw-r--r--forged/internal/common/scfg/scfg.go (renamed from forged/internal/scfg/scfg.go)0
-rw-r--r--forged/internal/common/scfg/struct.go (renamed from forged/internal/scfg/struct.go)0
-rw-r--r--forged/internal/common/scfg/unmarshal.go (renamed from forged/internal/scfg/unmarshal.go)0
-rw-r--r--forged/internal/common/scfg/writer.go (renamed from forged/internal/scfg/writer.go)0
-rw-r--r--forged/internal/config/config.go111
-rw-r--r--forged/internal/database/database.go13
-rw-r--r--forged/internal/database/queries/.gitignore1
-rw-r--r--forged/internal/embed/.gitignore6
-rw-r--r--forged/internal/embed/embed.go20
-rw-r--r--forged/internal/git2c/perror.go48
-rw-r--r--forged/internal/global/global.go18
-rw-r--r--forged/internal/incoming/hooks/hooks.go81
-rw-r--r--forged/internal/incoming/lmtp/lmtp.go71
-rw-r--r--forged/internal/incoming/ssh/ssh.go90
-rw-r--r--forged/internal/incoming/web/authn.go33
-rw-r--r--forged/internal/incoming/web/handler.go69
-rw-r--r--forged/internal/incoming/web/handlers/group.go156
-rw-r--r--forged/internal/incoming/web/handlers/index.go39
-rw-r--r--forged/internal/incoming/web/handlers/not_implemented.go22
-rw-r--r--forged/internal/incoming/web/handlers/repo/handler.go15
-rw-r--r--forged/internal/incoming/web/handlers/repo/index.go132
-rw-r--r--forged/internal/incoming/web/handlers/repo/raw.go19
-rw-r--r--forged/internal/incoming/web/handlers/repo/tree.go19
-rw-r--r--forged/internal/incoming/web/handlers/special/login.go119
-rw-r--r--forged/internal/incoming/web/router.go419
-rw-r--r--forged/internal/incoming/web/server.go70
-rw-r--r--forged/internal/incoming/web/templates/load.go31
-rw-r--r--forged/internal/incoming/web/templates/renderer.go35
-rw-r--r--forged/internal/incoming/web/types/types.go37
-rw-r--r--forged/internal/ipc/git2c/build.go119
-rw-r--r--forged/internal/ipc/git2c/client.go (renamed from forged/internal/git2c/client.go)16
-rw-r--r--forged/internal/ipc/git2c/cmd_index.go (renamed from forged/internal/git2c/cmd_index.go)6
-rw-r--r--forged/internal/ipc/git2c/cmd_init_repo.go26
-rw-r--r--forged/internal/ipc/git2c/cmd_treeraw.go (renamed from forged/internal/git2c/cmd_treeraw.go)9
-rw-r--r--forged/internal/ipc/git2c/doc.go2
-rw-r--r--forged/internal/ipc/git2c/extra.go286
-rw-r--r--forged/internal/ipc/git2c/git_types.go (renamed from forged/internal/git2c/git_types.go)0
-rw-r--r--forged/internal/ipc/git2c/perror.go87
-rw-r--r--forged/internal/ipc/git2c/tree.go118
-rw-r--r--forged/internal/irc/bot.go176
-rw-r--r--forged/internal/irc/conn.go49
-rw-r--r--forged/internal/irc/errors.go8
-rw-r--r--forged/internal/irc/message.go126
-rw-r--r--forged/internal/irc/source.go50
-rw-r--r--forged/internal/misc/deploy.go22
-rw-r--r--forged/internal/misc/panic.go19
-rw-r--r--forged/internal/oldgit/fmtpatch.go56
-rw-r--r--forged/internal/oldgit/oldgit.go5
-rw-r--r--forged/internal/oldgit/patch.go43
-rw-r--r--forged/internal/render/chroma.go41
-rw-r--r--forged/internal/render/escape.go14
-rw-r--r--forged/internal/render/readme.go34
-rw-r--r--forged/internal/server/server.go87
-rw-r--r--forged/internal/unsorted/acl.go59
-rw-r--r--forged/internal/unsorted/config.go94
-rw-r--r--forged/internal/unsorted/database.go43
-rw-r--r--forged/internal/unsorted/fedauth.go97
-rw-r--r--forged/internal/unsorted/git_hooks_handle_linux.go377
-rw-r--r--forged/internal/unsorted/git_hooks_handle_other.go336
-rw-r--r--forged/internal/unsorted/git_init.go34
-rw-r--r--forged/internal/unsorted/git_misc.go95
-rw-r--r--forged/internal/unsorted/git_plumbing.go188
-rw-r--r--forged/internal/unsorted/git_ref.go37
-rw-r--r--forged/internal/unsorted/http_auth.go26
-rw-r--r--forged/internal/unsorted/http_handle_branches.go46
-rw-r--r--forged/internal/unsorted/http_handle_group_index.go196
-rw-r--r--forged/internal/unsorted/http_handle_index.go26
-rw-r--r--forged/internal/unsorted/http_handle_login.go108
-rw-r--r--forged/internal/unsorted/http_handle_repo_commit.go146
-rw-r--r--forged/internal/unsorted/http_handle_repo_contrib_index.go52
-rw-r--r--forged/internal/unsorted/http_handle_repo_contrib_one.go98
-rw-r--r--forged/internal/unsorted/http_handle_repo_index.go41
-rw-r--r--forged/internal/unsorted/http_handle_repo_info.go107
-rw-r--r--forged/internal/unsorted/http_handle_repo_log.go39
-rw-r--r--forged/internal/unsorted/http_handle_repo_raw.go56
-rw-r--r--forged/internal/unsorted/http_handle_repo_tree.go55
-rw-r--r--forged/internal/unsorted/http_handle_repo_upload_pack.go120
-rw-r--r--forged/internal/unsorted/http_handle_users.go15
-rw-r--r--forged/internal/unsorted/http_server.go276
-rw-r--r--forged/internal/unsorted/http_template.go18
-rw-r--r--forged/internal/unsorted/lmtp_handle_patch.go133
-rw-r--r--forged/internal/unsorted/lmtp_server.go204
-rw-r--r--forged/internal/unsorted/remote_url.go25
-rw-r--r--forged/internal/unsorted/resources.go56
-rw-r--r--forged/internal/unsorted/server.go236
-rw-r--r--forged/internal/unsorted/ssh_handle_receive_pack.go131
-rw-r--r--forged/internal/unsorted/ssh_handle_upload_pack.go39
-rw-r--r--forged/internal/unsorted/ssh_server.go96
-rw-r--r--forged/internal/unsorted/ssh_utils.go79
-rw-r--r--forged/internal/unsorted/unsorted.go5
-rw-r--r--forged/internal/unsorted/users.go35
-rw-r--r--forged/internal/unsorted/version.go6
-rw-r--r--forged/internal/web/error_pages.go60
-rw-r--r--forged/internal/web/web.go5
-rw-r--r--forged/main.go7
-rw-r--r--forged/sql/queries/groups.sql47
-rw-r--r--forged/sql/queries/login.sql8
-rw-r--r--forged/sql/queries/repos.sql9
-rw-r--r--forged/sql/schema.sql221
-rw-r--r--forged/sqlc.yaml15
-rw-r--r--forged/static/style.css44
-rw-r--r--forged/templates/_footer.tmpl2
-rw-r--r--forged/templates/_group_view.tmpl8
-rw-r--r--forged/templates/_header.tmpl16
-rw-r--r--forged/templates/group.tmpl12
-rw-r--r--forged/templates/index.tmpl8
-rw-r--r--forged/templates/login.tmpl4
-rw-r--r--git2d/.gitignore1
-rw-r--r--git2d/bare.c17
-rw-r--r--git2d/bare.h38
-rw-r--r--git2d/cmd1.c15
-rw-r--r--git2d/cmd2.c13
-rw-r--r--git2d/cmd_commit.c188
-rw-r--r--git2d/cmd_diff.c366
-rw-r--r--git2d/cmd_init.c65
-rw-r--r--git2d/cmd_ref.c113
-rw-r--r--git2d/cmd_tree.c120
-rw-r--r--git2d/main.c9
-rw-r--r--git2d/session.c91
-rw-r--r--git2d/x.h26
-rw-r--r--go.mod33
-rw-r--r--go.sum136
-rw-r--r--hookc/.gitignore1
-rwxr-xr-xscripts/update_deps9
-rw-r--r--sql/schema.sql195
162 files changed, 3822 insertions, 5895 deletions
diff --git a/.gitignore b/.gitignore
index 95c0847..9b1c8b1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1 @@
-/forge
-/source.tar.gz
-*.c.BAK
-*.o
+/dist
diff --git a/LICENSE.GPL b/LICENSE.GPL
deleted file mode 100644
index 53d1f3d..0000000
--- a/LICENSE.GPL
+++ /dev/null
@@ -1,675 +0,0 @@
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
-
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Use with the GNU Affero General Public License.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>.
-
-Also add information on how to contact you by electronic and paper mail.
-
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
- <program> Copyright (C) <year> <name of author>
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-<https://www.gnu.org/licenses/>.
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-<https://www.gnu.org/licenses/why-not-lgpl.html>.
-
diff --git a/Makefile b/Makefile
index 2dc95f5..a894e2b 100644
--- a/Makefile
+++ b/Makefile
@@ -7,29 +7,25 @@
# some other build system).
#
-.PHONY: clean
+.PHONY: clean all
CFLAGS = -Wall -Wextra -pedantic -std=c99 -D_GNU_SOURCE
-VERSION = $(shell git describe --tags --always --dirty)
-SOURCE_FILES = $(shell git ls-files)
-EMBED = git2d/git2d hookc/hookc $(wildcard LICENSE*) $(wildcard forged/static/*) $(wildcard forged/templates/*)
-EMBED_ = $(EMBED:%=forged/internal/embed/%)
+all: dist/forged dist/git2d dist/hookc
-forge: $(EMBED_) $(SOURCE_FILES)
- CGO_ENABLED=0 go build -o forge -ldflags '-extldflags "-f no-PIC -static" -X "go.lindenii.runxiyu.org/forge/forged/internal/unsorted.version=$(VERSION)"' -tags 'osusergo netgo static_build' ./forged
+dist/forged: $(shell git ls-files forged)
+ mkdir -p dist
+ sqlc -f forged/sqlc.yaml generate
+ CGO_ENABLED=0 go build -o dist/forged -ldflags '-extldflags "-f no-PIC -static"' -tags 'osusergo netgo static_build' ./forged
-hookc/hookc:
+dist/git2d: $(wildcard git2d/*.c)
+ mkdir -p dist
+ $(CC) $(CFLAGS) -o dist/git2d $^ $(shell pkg-config --cflags --libs libgit2) -lpthread
-git2d/git2d: $(wildcard git2d/*.c)
- $(CC) $(CFLAGS) -o git2d/git2d $^ $(shell pkg-config --cflags --libs libgit2) -lpthread
+dist/hookc: $(wildcard hookc/*.c)
+ mkdir -p dist
+ $(CC) $(CFLAGS) -o dist/hookc $^
clean:
- rm -rf forge hookc/hookc git2d/git2d */*.o
+ rm -rf dist
-forged/internal/embed/%: %
- @mkdir -p $(shell dirname $@)
- @cp $^ $@
-
-forged/internal/embed/.gitignore:
- @touch $@
diff --git a/README.md b/README.md
index 94442dd..db8e65e 100644
--- a/README.md
+++ b/README.md
@@ -26,11 +26,13 @@ primarily designed for self-hosting by small organizations and individuals.
## Planned features
* Further Integration with mailing list workflows
-* Ticket trackers and discussions
+* Further federated authentication
+* Ticket trackers, discussions, RFCs
* Web interface
* Email integration with IMAP archives
* SSH API
* Email access
+* CI system similar to builds.sr.ht
## License
@@ -51,8 +53,7 @@ Note that emailing patches is still experimental.
We have several repo mirrors:
-* [Official repo on our own instance of Lindenii Forge](https://forge.lindenii.runxiyu.org/forge/-/repos/server/)
-* [The Lindenii Project's backup cgit](https://git.lindenii.runxiyu.org/forge.git/)
+* [Official repo on our own instance of Lindenii Forge](https://forge.lindenii.org/forge/-/repos/server/)
+* [The Lindenii Project's backup cgit](https://git.lindenii.org/forge.git/)
* [SourceHut](https://git.sr.ht/~runxiyu/forge/)
-* [Codeberg](https://codeberg.org/lindenii/forge/)
* [GitHub](https://github.com/runxiyu/forge/)
diff --git a/forge.scfg b/forge.example.scfg
index 1c8eeb9..9ef39a6 100644
--- a/forge.scfg
+++ b/forge.example.scfg
@@ -1,4 +1,4 @@
-http {
+web {
# What network transport should we listen on?
# Examples: tcp tcp4 tcp6 unix
net tcp
@@ -20,10 +20,16 @@ http {
read_timeout 120
write_timeout 1800
idle_timeout 120
+ max_header_bytes 20000
# Are we running behind a reverse proxy? If so, we will trust
# X-Forwarded-For headers.
reverse_proxy true
+
+ templates_path /usr/share/lindenii/forge/templates
+ static_path /usr/share/lindenii/forge/static
+
+ shutdown_timeout 10
}
irc {
@@ -40,11 +46,8 @@ git {
# Where should newly-created Git repositories be stored?
repo_dir /var/lib/lindenii/forge/repos
- # Where should git2d listen on?
+ # Where is git2d listening on?
socket /var/run/lindenii/forge/git2d.sock
-
- # Where should we put git2d?
- daemon_path /usr/libexec/lindenii/forge/git2d
}
ssh {
@@ -61,6 +64,8 @@ ssh {
# What is the canonical SSH URL?
root ssh://forge.example.org
+
+ shutdown_timeout 10
}
general {
@@ -68,10 +73,6 @@ general {
}
db {
- # What type of database are we connecting to?
- # Currently only "postgres" is supported.
- type postgres
-
# What is the connection string?
conn postgresql:///lindenii-forge?host=/var/run/postgresql
}
@@ -97,6 +98,7 @@ lmtp {
# General timeouts
read_timeout 300
write_timeout 300
+ shutdown_timeout 10
}
pprof {
diff --git a/forged/.golangci.yaml b/forged/.golangci.yaml
index e475c41..499136b 100644
--- a/forged/.golangci.yaml
+++ b/forged/.golangci.yaml
@@ -4,32 +4,22 @@ linters:
default: all
disable:
- depguard
- - err113 # dynamically defined errors are fine for our purposes
- - forcetypeassert # type assertion failures are usually programming errors
- - gochecknoinits # we use inits sparingly for good reasons
- - godox # they're just used as markers for where needs improvements
- - ireturn # doesn't work well with how we use generics
- - lll # long lines are acceptable
- - mnd # it's a bit ridiculous to replace all of them
- - nakedret # patterns should be consistent
- - nonamedreturns # i like named returns
- - wrapcheck # wrapping all errors is just not necessary
- - varnamelen # "from" and "to" are very valid
- - containedctx
- - godot
- - dogsled
- - maintidx # e
- - nestif # e
- - gocognit # e
- - gocyclo # e
- - dupl # e
- - cyclop # e
- - goconst # e
- - funlen # e
- - wsl # e
- - nlreturn # e
- - unused # e
- - exhaustruct # e
+ - wsl_v5 # tmp
+ - wsl # tmp
+ - unused # tmp
+ - nonamedreturns
+ - err113 # tmp
+ - gochecknoinits # tmp
+ - nlreturn # tmp
+ - cyclop # tmp
+ - gocognit # tmp
+ - varnamelen # tmp
+ - funlen # tmp
+ - lll
+ - mnd # tmp
+ - revive # tmp
+ - godox # tmp
+ - nestif # tmp
linters-settings:
revive:
diff --git a/forged/internal/ansiec/colors.go b/forged/internal/common/ansiec/colors.go
index 8e5f54b..8be2a0c 100644
--- a/forged/internal/ansiec/colors.go
+++ b/forged/internal/common/ansiec/colors.go
@@ -3,18 +3,16 @@
package ansiec
+// ANSI color codes
const (
- Black = "\x1b[30m"
- Red = "\x1b[31m"
- Green = "\x1b[32m"
- Yellow = "\x1b[33m"
- Blue = "\x1b[34m"
- Magenta = "\x1b[35m"
- Cyan = "\x1b[36m"
- White = "\x1b[37m"
-)
-
-const (
+ Black = "\x1b[30m"
+ Red = "\x1b[31m"
+ Green = "\x1b[32m"
+ Yellow = "\x1b[33m"
+ Blue = "\x1b[34m"
+ Magenta = "\x1b[35m"
+ Cyan = "\x1b[36m"
+ White = "\x1b[37m"
BrightBlack = "\x1b[30;1m"
BrightRed = "\x1b[31;1m"
BrightGreen = "\x1b[32;1m"
diff --git a/forged/internal/ansiec/ansiec.go b/forged/internal/common/ansiec/doc.go
index 542c564..542c564 100644
--- a/forged/internal/ansiec/ansiec.go
+++ b/forged/internal/common/ansiec/doc.go
diff --git a/forged/internal/ansiec/reset.go b/forged/internal/common/ansiec/reset.go
index c5b6ba6..51bb312 100644
--- a/forged/internal/ansiec/reset.go
+++ b/forged/internal/common/ansiec/reset.go
@@ -3,4 +3,5 @@
package ansiec
+// Reset the colors and styles
const Reset = "\x1b[0m"
diff --git a/forged/internal/ansiec/style.go b/forged/internal/common/ansiec/style.go
index dd37344..95edbbe 100644
--- a/forged/internal/ansiec/style.go
+++ b/forged/internal/common/ansiec/style.go
@@ -3,6 +3,7 @@
package ansiec
+// ANSI text styles
const (
Bold = "\x1b[1m"
Underline = "\x1b[4m"
diff --git a/LICENSE.MIT b/forged/internal/common/argon2id/LICENSE
index 3649823..3649823 100644
--- a/LICENSE.MIT
+++ b/forged/internal/common/argon2id/LICENSE
diff --git a/forged/internal/argon2id/argon2id.go b/forged/internal/common/argon2id/argon2id.go
index 88df8f6..88df8f6 100644
--- a/forged/internal/argon2id/argon2id.go
+++ b/forged/internal/common/argon2id/argon2id.go
diff --git a/LICENSE.APACHE b/forged/internal/common/bare/LICENSE
index 6b0b127..6b0b127 100644
--- a/LICENSE.APACHE
+++ b/forged/internal/common/bare/LICENSE
diff --git a/forged/internal/bare/package.go b/forged/internal/common/bare/doc.go
index 2f12f55..2f12f55 100644
--- a/forged/internal/bare/package.go
+++ b/forged/internal/common/bare/doc.go
diff --git a/forged/internal/bare/errors.go b/forged/internal/common/bare/errors.go
index 39c951a..4634f0c 100644
--- a/forged/internal/bare/errors.go
+++ b/forged/internal/common/bare/errors.go
@@ -9,12 +9,12 @@ import (
"reflect"
)
-var ErrInvalidStr = errors.New("String contains invalid UTF-8 sequences")
+var ErrInvalidStr = errors.New("string contains invalid UTF-8 sequences")
type UnsupportedTypeError struct {
Type reflect.Type
}
func (e *UnsupportedTypeError) Error() string {
- return fmt.Sprintf("Unsupported type for marshaling: %s\n", e.Type.String())
+ return fmt.Sprintf("unsupported type for marshaling: %s\n", e.Type.String())
}
diff --git a/forged/internal/bare/limit.go b/forged/internal/common/bare/limit.go
index 212bc05..7eece8c 100644
--- a/forged/internal/bare/limit.go
+++ b/forged/internal/common/bare/limit.go
@@ -32,7 +32,7 @@ func MaxMapSize(size uint64) {
// Use MaxUnmarshalBytes to prevent this error from occuring on messages which
// are large by design.
-var ErrLimitExceeded = errors.New("Maximum message size exceeded")
+var ErrLimitExceeded = errors.New("maximum message size exceeded")
// Identical to io.LimitedReader, except it returns our custom error instead of
// EOF if the limit is reached.
diff --git a/forged/internal/bare/marshal.go b/forged/internal/common/bare/marshal.go
index 1ce942d..d4c338e 100644
--- a/forged/internal/bare/marshal.go
+++ b/forged/internal/common/bare/marshal.go
@@ -54,7 +54,7 @@ func MarshalWriter(w *Writer, val interface{}) error {
t := reflect.TypeOf(val)
v := reflect.ValueOf(val)
if t.Kind() != reflect.Ptr {
- return errors.New("Expected val to be pointer type")
+ return errors.New("expected val to be pointer type")
}
return getEncoder(t.Elem())(w, v.Elem())
diff --git a/forged/internal/bare/reader.go b/forged/internal/common/bare/reader.go
index 58325e3..7e872f4 100644
--- a/forged/internal/bare/reader.go
+++ b/forged/internal/common/bare/reader.go
@@ -10,7 +10,7 @@ import (
"math"
"unicode/utf8"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/misc"
)
type byteReader interface {
@@ -157,7 +157,7 @@ func (r *Reader) ReadString() (string, error) {
// Reads a fixed amount of arbitrary data, defined by the length of the slice.
func (r *Reader) ReadDataFixed(dest []byte) error {
- var amt int = 0
+ var amt int
for amt < len(dest) {
n, err := r.base.Read(dest[amt:])
if err != nil {
diff --git a/forged/internal/bare/unions.go b/forged/internal/common/bare/unions.go
index 0270a5f..1020fa0 100644
--- a/forged/internal/bare/unions.go
+++ b/forged/internal/common/bare/unions.go
@@ -21,8 +21,10 @@ type UnionTags struct {
types map[uint64]reflect.Type
}
-var unionInterface = reflect.TypeOf((*Union)(nil)).Elem()
-var unionRegistry map[reflect.Type]*UnionTags
+var (
+ unionInterface = reflect.TypeOf((*Union)(nil)).Elem()
+ unionRegistry map[reflect.Type]*UnionTags
+)
func init() {
unionRegistry = make(map[reflect.Type]*UnionTags)
diff --git a/forged/internal/bare/unmarshal.go b/forged/internal/common/bare/unmarshal.go
index d55f32c..d55f32c 100644
--- a/forged/internal/bare/unmarshal.go
+++ b/forged/internal/common/bare/unmarshal.go
diff --git a/forged/internal/bare/varint.go b/forged/internal/common/bare/varint.go
index a185ac8..a185ac8 100644
--- a/forged/internal/bare/varint.go
+++ b/forged/internal/common/bare/varint.go
diff --git a/forged/internal/bare/writer.go b/forged/internal/common/bare/writer.go
index bada045..1b23c9f 100644
--- a/forged/internal/bare/writer.go
+++ b/forged/internal/common/bare/writer.go
@@ -9,7 +9,7 @@ import (
"io"
"math"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/misc"
)
// A Writer for BARE primitive types.
@@ -92,7 +92,7 @@ func (w *Writer) WriteString(str string) error {
// Writes a fixed amount of arbitrary data, defined by the length of the slice.
func (w *Writer) WriteDataFixed(data []byte) error {
- var amt int = 0
+ var amt int
for amt < len(data) {
n, err := w.base.Write(data[amt:])
if err != nil {
@@ -109,7 +109,7 @@ func (w *Writer) WriteData(data []byte) error {
if err != nil {
return err
}
- var amt int = 0
+ var amt int
for amt < len(data) {
n, err := w.base.Write(data[amt:])
if err != nil {
diff --git a/LICENSE.BSD b/forged/internal/common/cmap/LICENSE
index d5dfee8..d5dfee8 100644
--- a/LICENSE.BSD
+++ b/forged/internal/common/cmap/LICENSE
diff --git a/forged/internal/cmap/comparable_map.go b/forged/internal/common/cmap/comparable_map.go
index cd9d4ce..e89175c 100644
--- a/forged/internal/cmap/comparable_map.go
+++ b/forged/internal/common/cmap/comparable_map.go
@@ -3,7 +3,7 @@
// Copyright 2024 Runxi Yu (porting it to generics)
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.BSD file.
+// license that can be found in the LICENSE file.
package cmap
diff --git a/forged/internal/cmap/map.go b/forged/internal/common/cmap/map.go
index 4f43627..7a1fe5b 100644
--- a/forged/internal/cmap/map.go
+++ b/forged/internal/common/cmap/map.go
@@ -3,7 +3,7 @@
// Copyright 2024 Runxi Yu (porting it to generics)
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.BSD file.
+// license that can be found in the LICENSE file.
// Package cmap provides a generic Map safe for concurrent use.
package cmap
diff --git a/forged/internal/humanize/bytes.go b/forged/internal/common/humanize/bytes.go
index bea504c..bea504c 100644
--- a/forged/internal/humanize/bytes.go
+++ b/forged/internal/common/humanize/bytes.go
diff --git a/forged/internal/misc/back.go b/forged/internal/common/misc/back.go
index 5351359..5351359 100644
--- a/forged/internal/misc/back.go
+++ b/forged/internal/common/misc/back.go
diff --git a/forged/internal/misc/iter.go b/forged/internal/common/misc/iter.go
index 61a96f4..61a96f4 100644
--- a/forged/internal/misc/iter.go
+++ b/forged/internal/common/misc/iter.go
diff --git a/forged/internal/render/render.go b/forged/internal/common/misc/misc.go
index 465e410..e9e10ab 100644
--- a/forged/internal/render/render.go
+++ b/forged/internal/common/misc/misc.go
@@ -1,5 +1,5 @@
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-// Package render provides functions to render code and READMEs.
-package render
+// Package misc provides miscellaneous functions and other definitions.
+package misc
diff --git a/forged/internal/common/misc/net.go b/forged/internal/common/misc/net.go
new file mode 100644
index 0000000..967ea77
--- /dev/null
+++ b/forged/internal/common/misc/net.go
@@ -0,0 +1,42 @@
+package misc
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net"
+ "syscall"
+)
+
+func ListenUnixSocket(ctx context.Context, path string) (listener net.Listener, replaced bool, err error) {
+ listenConfig := net.ListenConfig{} //exhaustruct:ignore
+ listener, err = listenConfig.Listen(ctx, "unix", path)
+ if errors.Is(err, syscall.EADDRINUSE) {
+ replaced = true
+ unlinkErr := syscall.Unlink(path)
+ if unlinkErr != nil {
+ return listener, false, fmt.Errorf("remove existing socket %q: %w", path, unlinkErr)
+ }
+ listener, err = listenConfig.Listen(ctx, "unix", path)
+ }
+ if err != nil {
+ return listener, replaced, fmt.Errorf("listen on unix socket %q: %w", path, err)
+ }
+ return listener, replaced, nil
+}
+
+func Listen(ctx context.Context, net_, addr string) (listener net.Listener, err error) {
+ if net_ == "unix" {
+ listener, _, err = ListenUnixSocket(ctx, addr)
+ if err != nil {
+ return listener, fmt.Errorf("listen unix socket for web: %w", err)
+ }
+ } else {
+ listenConfig := net.ListenConfig{} //exhaustruct:ignore
+ listener, err = listenConfig.Listen(ctx, net_, addr)
+ if err != nil {
+ return listener, fmt.Errorf("listen %s for web: %w", net_, err)
+ }
+ }
+ return listener, nil
+}
diff --git a/forged/internal/misc/misc.go b/forged/internal/common/misc/slices.go
index 398020a..3ad0211 100644
--- a/forged/internal/misc/misc.go
+++ b/forged/internal/common/misc/slices.go
@@ -1,7 +1,6 @@
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-// Package misc provides miscellaneous functions and other definitions.
package misc
import "strings"
diff --git a/forged/internal/misc/trivial.go b/forged/internal/common/misc/trivial.go
index e59c17e..83901e0 100644
--- a/forged/internal/misc/trivial.go
+++ b/forged/internal/common/misc/trivial.go
@@ -28,13 +28,13 @@ func QueryEscape(s string) string {
}
// Dereference dereferences a pointer.
-func Dereference[T any](p *T) T {
+func Dereference[T any](p *T) T { //nolint:ireturn
return *p
}
// DereferenceOrZero dereferences a pointer. If the pointer is nil, the zero
// value of its associated type is returned instead.
-func DereferenceOrZero[T any](p *T) T {
+func DereferenceOrZero[T any](p *T) T { //nolint:ireturn
if p != nil {
return *p
}
diff --git a/forged/internal/misc/unsafe.go b/forged/internal/common/misc/unsafe.go
index 6c2192f..d827e7f 100644
--- a/forged/internal/misc/unsafe.go
+++ b/forged/internal/common/misc/unsafe.go
@@ -9,12 +9,12 @@ import "unsafe"
// Memory is borrowed from the string.
// The resulting byte slice must not be modified in any form.
func StringToBytes(s string) (bytes []byte) {
- return unsafe.Slice(unsafe.StringData(s), len(s))
+ return unsafe.Slice(unsafe.StringData(s), len(s)) //#nosec G103
}
// BytesToString converts a byte slice to a string without copying the bytes.
// Memory is borrowed from the byte slice.
// The source byte slice must not be modified.
func BytesToString(b []byte) string {
- return unsafe.String(unsafe.SliceData(b), len(b))
+ return unsafe.String(unsafe.SliceData(b), len(b)) //#nosec G103
}
diff --git a/forged/internal/misc/url.go b/forged/internal/common/misc/url.go
index 346ff76..346ff76 100644
--- a/forged/internal/misc/url.go
+++ b/forged/internal/common/misc/url.go
diff --git a/forged/internal/scfg/.golangci.yaml b/forged/internal/common/scfg/.golangci.yaml
index 59f1970..59f1970 100644
--- a/forged/internal/scfg/.golangci.yaml
+++ b/forged/internal/common/scfg/.golangci.yaml
diff --git a/forged/internal/common/scfg/LICENSE b/forged/internal/common/scfg/LICENSE
new file mode 100644
index 0000000..3649823
--- /dev/null
+++ b/forged/internal/common/scfg/LICENSE
@@ -0,0 +1,18 @@
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/forged/internal/scfg/reader.go b/forged/internal/common/scfg/reader.go
index 6a2bedc..b0e2cc0 100644
--- a/forged/internal/scfg/reader.go
+++ b/forged/internal/common/scfg/reader.go
@@ -15,12 +15,16 @@ import (
const maxNestingDepth = 1000
// Load loads a configuration file.
-func Load(path string) (Block, error) {
+func Load(path string) (block Block, err error) {
f, err := os.Open(path)
if err != nil {
return nil, err
}
- defer f.Close()
+ defer func() {
+ if cerr := f.Close(); err == nil && cerr != nil {
+ err = cerr
+ }
+ }()
return Read(f)
}
diff --git a/forged/internal/scfg/scfg.go b/forged/internal/common/scfg/scfg.go
index 4533e63..4533e63 100644
--- a/forged/internal/scfg/scfg.go
+++ b/forged/internal/common/scfg/scfg.go
diff --git a/forged/internal/scfg/struct.go b/forged/internal/common/scfg/struct.go
index 98ec943..98ec943 100644
--- a/forged/internal/scfg/struct.go
+++ b/forged/internal/common/scfg/struct.go
diff --git a/forged/internal/scfg/unmarshal.go b/forged/internal/common/scfg/unmarshal.go
index 8befc10..8befc10 100644
--- a/forged/internal/scfg/unmarshal.go
+++ b/forged/internal/common/scfg/unmarshal.go
diff --git a/forged/internal/scfg/writer.go b/forged/internal/common/scfg/writer.go
index 02a07fe..02a07fe 100644
--- a/forged/internal/scfg/writer.go
+++ b/forged/internal/common/scfg/writer.go
diff --git a/forged/internal/config/config.go b/forged/internal/config/config.go
new file mode 100644
index 0000000..1825882
--- /dev/null
+++ b/forged/internal/config/config.go
@@ -0,0 +1,111 @@
+package config
+
+import (
+ "bufio"
+ "fmt"
+ "log/slog"
+ "os"
+
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/scfg"
+)
+
+type Config struct {
+ DB DB `scfg:"db"`
+ Web Web `scfg:"web"`
+ Hooks Hooks `scfg:"hooks"`
+ LMTP LMTP `scfg:"lmtp"`
+ SSH SSH `scfg:"ssh"`
+ IRC IRC `scfg:"irc"`
+ Git Git `scfg:"git"`
+ General General `scfg:"general"`
+ Pprof Pprof `scfg:"pprof"`
+}
+
+type DB struct {
+ Conn string `scfg:"conn"`
+}
+
+type Web struct {
+ Net string `scfg:"net"`
+ Addr string `scfg:"addr"`
+ Root string `scfg:"root"`
+ CookieExpiry int `scfg:"cookie_expiry"`
+ ReadTimeout uint32 `scfg:"read_timeout"`
+ WriteTimeout uint32 `scfg:"write_timeout"`
+ IdleTimeout uint32 `scfg:"idle_timeout"`
+ MaxHeaderBytes int `scfg:"max_header_bytes"`
+ ReverseProxy bool `scfg:"reverse_proxy"`
+ ShutdownTimeout uint32 `scfg:"shutdown_timeout"`
+ TemplatesPath string `scfg:"templates_path"`
+ StaticPath string `scfg:"static_path"`
+}
+
+type Hooks struct {
+ Socket string `scfg:"socket"`
+ Execs string `scfg:"execs"`
+}
+
+type LMTP struct {
+ Socket string `scfg:"socket"`
+ Domain string `scfg:"domain"`
+ MaxSize int64 `scfg:"max_size"`
+ WriteTimeout uint32 `scfg:"write_timeout"`
+ ReadTimeout uint32 `scfg:"read_timeout"`
+}
+
+type SSH struct {
+ Net string `scfg:"net"`
+ Addr string `scfg:"addr"`
+ Key string `scfg:"key"`
+ Root string `scfg:"root"`
+ ShutdownTimeout uint32 `scfg:"shutdown_timeout"`
+}
+
+type IRC struct {
+ Net string `scfg:"net"`
+ Addr string `scfg:"addr"`
+ TLS bool `scfg:"tls"`
+ SendQ uint `scfg:"sendq"`
+ Nick string `scfg:"nick"`
+ User string `scfg:"user"`
+ Gecos string `scfg:"gecos"`
+}
+
+type Git struct {
+ RepoDir string `scfg:"repo_dir"`
+ Socket string `scfg:"socket"`
+}
+
+type General struct {
+ Title string `scfg:"title"`
+}
+
+type Pprof struct {
+ Net string `scfg:"net"`
+ Addr string `scfg:"addr"`
+}
+
+func Open(path string) (config Config, err error) {
+ var configFile *os.File
+
+ configFile, err = os.Open(path) //#nosec G304
+ if err != nil {
+ err = fmt.Errorf("open config file: %w", err)
+ return config, err
+ }
+ defer func() {
+ _ = configFile.Close()
+ }()
+
+ decoder := scfg.NewDecoder(bufio.NewReader(configFile))
+ err = decoder.Decode(&config)
+ if err != nil {
+ err = fmt.Errorf("decode config file: %w", err)
+ return config, err
+ }
+ for _, u := range decoder.UnknownDirectives() {
+ slog.Warn("unknown configuration directive", "directive", u)
+ }
+
+ return config, err
+}
diff --git a/forged/internal/database/database.go b/forged/internal/database/database.go
index b995adc..d96af6b 100644
--- a/forged/internal/database/database.go
+++ b/forged/internal/database/database.go
@@ -6,20 +6,19 @@ package database
import (
"context"
+ "fmt"
"github.com/jackc/pgx/v5/pgxpool"
)
-// Database is a wrapper around pgxpool.Pool to provide a common interface for
-// other packages in the forge.
type Database struct {
*pgxpool.Pool
}
-// Open opens a new database connection pool using the provided connection
-// string. It returns a Database instance and an error if any occurs.
-// It is run indefinitely in the background.
-func Open(connString string) (Database, error) {
- db, err := pgxpool.New(context.Background(), connString)
+func Open(ctx context.Context, conn string) (Database, error) {
+ db, err := pgxpool.New(ctx, conn)
+ if err != nil {
+ err = fmt.Errorf("create pgxpool: %w", err)
+ }
return Database{db}, err
}
diff --git a/forged/internal/database/queries/.gitignore b/forged/internal/database/queries/.gitignore
new file mode 100644
index 0000000..1307f6d
--- /dev/null
+++ b/forged/internal/database/queries/.gitignore
@@ -0,0 +1 @@
+/*.go
diff --git a/forged/internal/embed/.gitignore b/forged/internal/embed/.gitignore
deleted file mode 100644
index 36bd410..0000000
--- a/forged/internal/embed/.gitignore
+++ /dev/null
@@ -1,6 +0,0 @@
-/hookc/hookc
-/git2d/git2d
-/static
-/templates
-/LICENSE*
-/forged
diff --git a/forged/internal/embed/embed.go b/forged/internal/embed/embed.go
deleted file mode 100644
index f731538..0000000
--- a/forged/internal/embed/embed.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-// Package embed provides embedded filesystems created in build-time.
-package embed
-
-import "embed"
-
-// Source contains the licenses collected at build time.
-// It is intended to be served to the user.
-//
-//go:embed LICENSE*
-var Source embed.FS
-
-// Resources contains the templates and static files used by the web interface,
-// as well as the git backend daemon and the hookc helper.
-//
-//go:embed forged/templates/* forged/static/*
-//go:embed hookc/hookc git2d/git2d
-var Resources embed.FS
diff --git a/forged/internal/git2c/perror.go b/forged/internal/git2c/perror.go
deleted file mode 100644
index 96bffd5..0000000
--- a/forged/internal/git2c/perror.go
+++ /dev/null
@@ -1,48 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-// TODO: Make the C part report detailed error messages too
-
-package git2c
-
-import "errors"
-
-var (
- Success error
- ErrUnknown = errors.New("git2c: unknown error")
- ErrPath = errors.New("git2c: get tree entry by path failed")
- ErrRevparse = errors.New("git2c: revparse failed")
- ErrReadme = errors.New("git2c: no readme")
- ErrBlobExpected = errors.New("git2c: blob expected")
- ErrEntryToObject = errors.New("git2c: tree entry to object conversion failed")
- ErrBlobRawContent = errors.New("git2c: get blob raw content failed")
- ErrRevwalk = errors.New("git2c: revwalk failed")
- ErrRevwalkPushHead = errors.New("git2c: revwalk push head failed")
- ErrBareProto = errors.New("git2c: bare protocol error")
-)
-
-func Perror(errno uint) error {
- switch errno {
- case 0:
- return Success
- case 3:
- return ErrPath
- case 4:
- return ErrRevparse
- case 5:
- return ErrReadme
- case 6:
- return ErrBlobExpected
- case 7:
- return ErrEntryToObject
- case 8:
- return ErrBlobRawContent
- case 9:
- return ErrRevwalk
- case 10:
- return ErrRevwalkPushHead
- case 11:
- return ErrBareProto
- }
- return ErrUnknown
-}
diff --git a/forged/internal/global/global.go b/forged/internal/global/global.go
new file mode 100644
index 0000000..99f85e7
--- /dev/null
+++ b/forged/internal/global/global.go
@@ -0,0 +1,18 @@
+package global
+
+import (
+ "go.lindenii.runxiyu.org/forge/forged/internal/config"
+ "go.lindenii.runxiyu.org/forge/forged/internal/database"
+ "go.lindenii.runxiyu.org/forge/forged/internal/database/queries"
+)
+
+type Global struct {
+ ForgeTitle string // should be removed since it's in Config
+ ForgeVersion string
+ SSHPubkey string
+ SSHFingerprint string
+
+ Config *config.Config
+ Queries *queries.Queries
+ DB *database.Database
+}
diff --git a/forged/internal/incoming/hooks/hooks.go b/forged/internal/incoming/hooks/hooks.go
new file mode 100644
index 0000000..effd104
--- /dev/null
+++ b/forged/internal/incoming/hooks/hooks.go
@@ -0,0 +1,81 @@
+package hooks
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net"
+ "time"
+
+ "github.com/gliderlabs/ssh"
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/cmap"
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/misc"
+ "go.lindenii.runxiyu.org/forge/forged/internal/global"
+)
+
+type Server struct {
+ hookMap cmap.Map[string, hookInfo]
+ socketPath string
+ executablesPath string
+ global *global.Global
+}
+type hookInfo struct {
+ session ssh.Session
+ pubkey string
+ directAccess bool
+ repoPath string
+ userID int
+ userType string
+ repoID int
+ groupPath []string
+ repoName string
+ contribReq string
+}
+
+func New(global *global.Global) (server *Server) {
+ cfg := global.Config.Hooks
+ return &Server{
+ socketPath: cfg.Socket,
+ executablesPath: cfg.Execs,
+ hookMap: cmap.Map[string, hookInfo]{},
+ global: global,
+ }
+}
+
+func (server *Server) Run(ctx context.Context) error {
+ listener, _, err := misc.ListenUnixSocket(ctx, server.socketPath)
+ if err != nil {
+ return fmt.Errorf("listen unix socket for hooks: %w", err)
+ }
+ defer func() {
+ _ = listener.Close()
+ }()
+
+ stop := context.AfterFunc(ctx, func() {
+ _ = listener.Close()
+ })
+ defer stop()
+
+ for {
+ conn, err := listener.Accept()
+ if err != nil {
+ if errors.Is(err, net.ErrClosed) || ctx.Err() != nil {
+ return nil
+ }
+ return fmt.Errorf("accept conn: %w", err)
+ }
+
+ go server.handleConn(ctx, conn)
+ }
+}
+
+func (server *Server) handleConn(ctx context.Context, conn net.Conn) {
+ defer func() {
+ _ = conn.Close()
+ }()
+ unblock := context.AfterFunc(ctx, func() {
+ _ = conn.SetDeadline(time.Now())
+ _ = conn.Close()
+ })
+ defer unblock()
+}
diff --git a/forged/internal/incoming/lmtp/lmtp.go b/forged/internal/incoming/lmtp/lmtp.go
new file mode 100644
index 0000000..c8918f8
--- /dev/null
+++ b/forged/internal/incoming/lmtp/lmtp.go
@@ -0,0 +1,71 @@
+package lmtp
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net"
+ "time"
+
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/misc"
+ "go.lindenii.runxiyu.org/forge/forged/internal/global"
+)
+
+type Server struct {
+ socket string
+ domain string
+ maxSize int64
+ writeTimeout uint32
+ readTimeout uint32
+ global *global.Global
+}
+
+func New(global *global.Global) (server *Server) {
+ cfg := global.Config.LMTP
+ return &Server{
+ socket: cfg.Socket,
+ domain: cfg.Domain,
+ maxSize: cfg.MaxSize,
+ writeTimeout: cfg.WriteTimeout,
+ readTimeout: cfg.ReadTimeout,
+ global: global,
+ }
+}
+
+func (server *Server) Run(ctx context.Context) error {
+ listener, _, err := misc.ListenUnixSocket(ctx, server.socket)
+ if err != nil {
+ return fmt.Errorf("listen unix socket for LMTP: %w", err)
+ }
+ defer func() {
+ _ = listener.Close()
+ }()
+
+ stop := context.AfterFunc(ctx, func() {
+ _ = listener.Close()
+ })
+ defer stop()
+
+ for {
+ conn, err := listener.Accept()
+ if err != nil {
+ if errors.Is(err, net.ErrClosed) || ctx.Err() != nil {
+ return nil
+ }
+ return fmt.Errorf("accept conn: %w", err)
+ }
+
+ go server.handleConn(ctx, conn)
+ }
+}
+
+func (server *Server) handleConn(ctx context.Context, conn net.Conn) {
+ defer func() {
+ _ = conn.Close()
+ }()
+ unblock := context.AfterFunc(ctx, func() {
+ _ = conn.SetDeadline(time.Now())
+ _ = conn.Close()
+ })
+ defer unblock()
+}
diff --git a/forged/internal/incoming/ssh/ssh.go b/forged/internal/incoming/ssh/ssh.go
new file mode 100644
index 0000000..1f27be2
--- /dev/null
+++ b/forged/internal/incoming/ssh/ssh.go
@@ -0,0 +1,90 @@
+package ssh
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "os"
+ "time"
+
+ gliderssh "github.com/gliderlabs/ssh"
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/misc"
+ "go.lindenii.runxiyu.org/forge/forged/internal/global"
+ gossh "golang.org/x/crypto/ssh"
+)
+
+type Server struct {
+ gliderServer *gliderssh.Server
+ privkey gossh.Signer
+ net string
+ addr string
+ root string
+ shutdownTimeout uint32
+ global *global.Global
+}
+
+func New(global *global.Global) (server *Server, err error) {
+ cfg := global.Config.SSH
+ server = &Server{
+ net: cfg.Net,
+ addr: cfg.Addr,
+ root: cfg.Root,
+ shutdownTimeout: cfg.ShutdownTimeout,
+ global: global,
+ } //exhaustruct:ignore
+
+ var privkeyBytes []byte
+
+ privkeyBytes, err = os.ReadFile(cfg.Key)
+ if err != nil {
+ return server, fmt.Errorf("read SSH private key: %w", err)
+ }
+
+ server.privkey, err = gossh.ParsePrivateKey(privkeyBytes)
+ if err != nil {
+ return server, fmt.Errorf("parse SSH private key: %w", err)
+ }
+
+ server.global.SSHPubkey = misc.BytesToString(gossh.MarshalAuthorizedKey(server.privkey.PublicKey()))
+ server.global.SSHFingerprint = gossh.FingerprintSHA256(server.privkey.PublicKey())
+
+ server.gliderServer = &gliderssh.Server{
+ Handler: handle,
+ PublicKeyHandler: func(ctx gliderssh.Context, key gliderssh.PublicKey) bool { return true },
+ KeyboardInteractiveHandler: func(ctx gliderssh.Context, challenge gossh.KeyboardInteractiveChallenge) bool { return true },
+ } //exhaustruct:ignore
+ server.gliderServer.AddHostKey(server.privkey)
+
+ return server, nil
+}
+
+func (server *Server) Run(ctx context.Context) (err error) {
+ listener, err := misc.Listen(ctx, server.net, server.addr)
+ if err != nil {
+ return fmt.Errorf("listen for SSH: %w", err)
+ }
+ defer func() {
+ _ = listener.Close()
+ }()
+
+ stop := context.AfterFunc(ctx, func() {
+ shCtx, cancel := context.WithTimeout(context.WithoutCancel(ctx), time.Duration(server.shutdownTimeout)*time.Second)
+ defer cancel()
+ _ = server.gliderServer.Shutdown(shCtx)
+ _ = listener.Close()
+ })
+ defer stop()
+
+ err = server.gliderServer.Serve(listener)
+ if err != nil {
+ if errors.Is(err, gliderssh.ErrServerClosed) || ctx.Err() != nil {
+ return nil
+ }
+ return fmt.Errorf("serve SSH: %w", err)
+ }
+ panic("unreachable")
+}
+
+func handle(session gliderssh.Session) {
+ panic("SSH server handler not implemented yet")
+}
diff --git a/forged/internal/incoming/web/authn.go b/forged/internal/incoming/web/authn.go
new file mode 100644
index 0000000..9754eb1
--- /dev/null
+++ b/forged/internal/incoming/web/authn.go
@@ -0,0 +1,33 @@
+package web
+
+import (
+ "crypto/sha256"
+ "errors"
+ "fmt"
+ "net/http"
+
+ "github.com/jackc/pgx/v5"
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/types"
+)
+
+func userResolver(r *http.Request) (string, string, error) {
+ cookie, err := r.Cookie("session")
+ if err != nil {
+ if errors.Is(err, http.ErrNoCookie) {
+ return "", "", nil
+ }
+ return "", "", err
+ }
+
+ tokenHash := sha256.Sum256([]byte(cookie.Value))
+
+ session, err := types.Base(r).Global.Queries.GetUserFromSession(r.Context(), tokenHash[:])
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return "", "", nil
+ }
+ return "", "", err
+ }
+
+ return fmt.Sprint(session.UserID), session.Username, nil
+}
diff --git a/forged/internal/incoming/web/handler.go b/forged/internal/incoming/web/handler.go
new file mode 100644
index 0000000..e0e6ced
--- /dev/null
+++ b/forged/internal/incoming/web/handler.go
@@ -0,0 +1,69 @@
+package web
+
+import (
+ "html/template"
+ "net/http"
+
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/misc"
+ "go.lindenii.runxiyu.org/forge/forged/internal/global"
+ handlers "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/handlers"
+ repoHandlers "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/handlers/repo"
+ specialHandlers "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/handlers/special"
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/templates"
+)
+
+type handler struct {
+ r *Router
+}
+
+func NewHandler(global *global.Global) *handler {
+ cfg := global.Config.Web
+ h := &handler{r: NewRouter().ReverseProxy(cfg.ReverseProxy).Global(global).UserResolver(userResolver)}
+
+ staticFS := http.FileServer(http.Dir(cfg.StaticPath))
+ h.r.ANYHTTP("-/static/*rest",
+ http.StripPrefix("/-/static/", staticFS),
+ WithDirIfEmpty("rest"),
+ )
+
+ funcs := template.FuncMap{
+ "path_escape": misc.PathEscape,
+ "query_escape": misc.QueryEscape,
+ "minus": misc.Minus,
+ "first_line": misc.FirstLine,
+ "dereference_error": misc.DereferenceOrZero[error],
+ }
+ t := templates.MustParseDir(cfg.TemplatesPath, funcs)
+ renderer := templates.New(t)
+
+ indexHTTP := handlers.NewIndexHTTP(renderer)
+ loginHTTP := specialHandlers.NewLoginHTTP(renderer, cfg.CookieExpiry)
+ groupHTTP := handlers.NewGroupHTTP(renderer)
+ repoHTTP := repoHandlers.NewHTTP(renderer)
+ notImpl := handlers.NewNotImplementedHTTP(renderer)
+
+ h.r.GET("/", indexHTTP.Index)
+
+ h.r.ANY("-/login", loginHTTP.Login)
+ h.r.ANY("-/users", notImpl.Handle)
+
+ h.r.GET("@group/", groupHTTP.Index)
+ h.r.POST("@group/", groupHTTP.Post)
+
+ h.r.GET("@group/-/repos/:repo/", repoHTTP.Index)
+ h.r.ANY("@group/-/repos/:repo/info", notImpl.Handle)
+ h.r.ANY("@group/-/repos/:repo/git-upload-pack", notImpl.Handle)
+ h.r.GET("@group/-/repos/:repo/branches/", notImpl.Handle)
+ h.r.GET("@group/-/repos/:repo/log/", notImpl.Handle)
+ h.r.GET("@group/-/repos/:repo/commit/:commit", notImpl.Handle)
+ h.r.GET("@group/-/repos/:repo/tree/*rest", repoHTTP.Tree, WithDirIfEmpty("rest"))
+ h.r.GET("@group/-/repos/:repo/raw/*rest", repoHTTP.Raw, WithDirIfEmpty("rest"))
+ h.r.GET("@group/-/repos/:repo/contrib/", notImpl.Handle)
+ h.r.GET("@group/-/repos/:repo/contrib/:mr", notImpl.Handle)
+
+ return h
+}
+
+func (h *handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+ h.r.ServeHTTP(w, r)
+}
diff --git a/forged/internal/incoming/web/handlers/group.go b/forged/internal/incoming/web/handlers/group.go
new file mode 100644
index 0000000..4823cb7
--- /dev/null
+++ b/forged/internal/incoming/web/handlers/group.go
@@ -0,0 +1,156 @@
+package handlers
+
+import (
+ "fmt"
+ "log/slog"
+ "net/http"
+ "path/filepath"
+ "strconv"
+
+ "github.com/jackc/pgx/v5"
+ "go.lindenii.runxiyu.org/forge/forged/internal/database/queries"
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/templates"
+ wtypes "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/types"
+ "go.lindenii.runxiyu.org/forge/forged/internal/ipc/git2c"
+)
+
+type GroupHTTP struct {
+ r templates.Renderer
+}
+
+func NewGroupHTTP(r templates.Renderer) *GroupHTTP {
+ return &GroupHTTP{
+ r: r,
+ }
+}
+
+func (h *GroupHTTP) Index(w http.ResponseWriter, r *http.Request, _ wtypes.Vars) {
+ base := wtypes.Base(r)
+ userID, err := strconv.ParseInt(base.UserID, 10, 64)
+ if err != nil {
+ userID = 0
+ }
+
+ queryParams := queries.GetGroupByPathParams{
+ Column1: base.URLSegments,
+ UserID: userID,
+ }
+ p, err := base.Global.Queries.GetGroupByPath(r.Context(), queryParams)
+ if err != nil {
+ slog.Error("failed to get group ID by path", "error", err)
+ http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+ return
+ }
+ subgroups, err := base.Global.Queries.GetSubgroups(r.Context(), &p.ID)
+ if err != nil {
+ slog.Error("failed to get subgroups", "error", err)
+ http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+ // TODO: gracefully fail this part of the page
+ }
+ repos, err := base.Global.Queries.GetReposInGroup(r.Context(), p.ID)
+ if err != nil {
+ slog.Error("failed to get repos in group", "error", err)
+ http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+ // TODO: gracefully fail this part of the page
+ }
+ err = h.r.Render(w, "group", struct {
+ BaseData *wtypes.BaseData
+ Subgroups []queries.GetSubgroupsRow
+ Repos []queries.GetReposInGroupRow
+ Description string
+ DirectAccess bool
+ }{
+ BaseData: base,
+ Subgroups: subgroups,
+ Repos: repos,
+ Description: p.Description,
+ DirectAccess: p.HasRole,
+ })
+ if err != nil {
+ slog.Error("failed to render index page", "error", err)
+ }
+}
+
+func (h *GroupHTTP) Post(w http.ResponseWriter, r *http.Request, _ wtypes.Vars) {
+ base := wtypes.Base(r)
+ userID, err := strconv.ParseInt(base.UserID, 10, 64)
+ if err != nil {
+ userID = 0
+ }
+
+ queryParams := queries.GetGroupByPathParams{
+ Column1: base.URLSegments,
+ UserID: userID,
+ }
+ p, err := base.Global.Queries.GetGroupByPath(r.Context(), queryParams)
+ if err != nil {
+ slog.Error("failed to get group ID by path", "error", err)
+ http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+ return
+ }
+
+ if !p.HasRole {
+ http.Error(w, "You do not have the necessary permissions to create repositories in this group.", http.StatusForbidden)
+ return
+ }
+
+ name := r.PostFormValue("repo_name")
+ desc := r.PostFormValue("repo_desc")
+ contrib := r.PostFormValue("repo_contrib")
+ if name == "" {
+ http.Error(w, "Repo name is required", http.StatusBadRequest)
+ return
+ }
+
+ if contrib == "" || contrib == "public" {
+ contrib = "open"
+ }
+
+ tx, err := base.Global.DB.BeginTx(r.Context(), pgx.TxOptions{})
+ if err != nil {
+ slog.Error("begin tx failed", "error", err)
+ http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+ return
+ }
+ defer func() { _ = tx.Rollback(r.Context()) }()
+
+ txq := base.Global.Queries.WithTx(tx)
+ var descPtr *string
+ if desc != "" {
+ descPtr = &desc
+ }
+ repoID, err := txq.InsertRepo(r.Context(), queries.InsertRepoParams{
+ GroupID: p.ID,
+ Name: name,
+ Description: descPtr,
+ ContribRequirements: contrib,
+ })
+ if err != nil {
+ slog.Error("insert repo failed", "error", err)
+ http.Error(w, "Failed to create repository", http.StatusInternalServerError)
+ return
+ }
+
+ repoPath := filepath.Join(base.Global.Config.Git.RepoDir, fmt.Sprintf("%d.git", repoID))
+
+ gitc, err := git2c.NewClient(r.Context(), base.Global.Config.Git.Socket)
+ if err != nil {
+ slog.Error("git2d connect failed", "error", err)
+ http.Error(w, "Failed to initialize repository (backend)", http.StatusInternalServerError)
+ return
+ }
+ defer func() { _ = gitc.Close() }()
+ if err = gitc.InitRepo(repoPath, base.Global.Config.Hooks.Execs); err != nil {
+ slog.Error("git2d init failed", "error", err)
+ http.Error(w, "Failed to initialize repository", http.StatusInternalServerError)
+ return
+ }
+
+ if err = tx.Commit(r.Context()); err != nil {
+ slog.Error("commit tx failed", "error", err)
+ http.Error(w, "Failed to finalize repository creation", http.StatusInternalServerError)
+ return
+ }
+
+ http.Redirect(w, r, r.URL.Path, http.StatusSeeOther)
+}
diff --git a/forged/internal/incoming/web/handlers/index.go b/forged/internal/incoming/web/handlers/index.go
new file mode 100644
index 0000000..a758b07
--- /dev/null
+++ b/forged/internal/incoming/web/handlers/index.go
@@ -0,0 +1,39 @@
+package handlers
+
+import (
+ "log"
+ "net/http"
+
+ "go.lindenii.runxiyu.org/forge/forged/internal/database/queries"
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/templates"
+ wtypes "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/types"
+)
+
+type IndexHTTP struct {
+ r templates.Renderer
+}
+
+func NewIndexHTTP(r templates.Renderer) *IndexHTTP {
+ return &IndexHTTP{
+ r: r,
+ }
+}
+
+func (h *IndexHTTP) Index(w http.ResponseWriter, r *http.Request, _ wtypes.Vars) {
+ groups, err := wtypes.Base(r).Global.Queries.GetRootGroups(r.Context())
+ if err != nil {
+ http.Error(w, "failed to get root groups", http.StatusInternalServerError)
+ log.Println("failed to get root groups", "error", err)
+ return
+ }
+ err = h.r.Render(w, "index", struct {
+ BaseData *wtypes.BaseData
+ Groups []queries.GetRootGroupsRow
+ }{
+ BaseData: wtypes.Base(r),
+ Groups: groups,
+ })
+ if err != nil {
+ log.Println("failed to render index page", "error", err)
+ }
+}
diff --git a/forged/internal/incoming/web/handlers/not_implemented.go b/forged/internal/incoming/web/handlers/not_implemented.go
new file mode 100644
index 0000000..6813c88
--- /dev/null
+++ b/forged/internal/incoming/web/handlers/not_implemented.go
@@ -0,0 +1,22 @@
+package handlers
+
+import (
+ "net/http"
+
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/templates"
+ wtypes "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/types"
+)
+
+type NotImplementedHTTP struct {
+ r templates.Renderer
+}
+
+func NewNotImplementedHTTP(r templates.Renderer) *NotImplementedHTTP {
+ return &NotImplementedHTTP{
+ r: r,
+ }
+}
+
+func (h *NotImplementedHTTP) Handle(w http.ResponseWriter, _ *http.Request, _ wtypes.Vars) {
+ http.Error(w, "not implemented", http.StatusNotImplemented)
+}
diff --git a/forged/internal/incoming/web/handlers/repo/handler.go b/forged/internal/incoming/web/handlers/repo/handler.go
new file mode 100644
index 0000000..2881d7d
--- /dev/null
+++ b/forged/internal/incoming/web/handlers/repo/handler.go
@@ -0,0 +1,15 @@
+package repo
+
+import (
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/templates"
+)
+
+type HTTP struct {
+ r templates.Renderer
+}
+
+func NewHTTP(r templates.Renderer) *HTTP {
+ return &HTTP{
+ r: r,
+ }
+}
diff --git a/forged/internal/incoming/web/handlers/repo/index.go b/forged/internal/incoming/web/handlers/repo/index.go
new file mode 100644
index 0000000..c2cb24a
--- /dev/null
+++ b/forged/internal/incoming/web/handlers/repo/index.go
@@ -0,0 +1,132 @@
+package repo
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "log/slog"
+ "net/http"
+ "net/url"
+ "path/filepath"
+ "strings"
+
+ "github.com/yuin/goldmark"
+ "github.com/yuin/goldmark/extension"
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/misc"
+ "go.lindenii.runxiyu.org/forge/forged/internal/database/queries"
+ wtypes "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/types"
+ "go.lindenii.runxiyu.org/forge/forged/internal/ipc/git2c"
+)
+
+func (h *HTTP) Index(w http.ResponseWriter, r *http.Request, v wtypes.Vars) {
+ base := wtypes.Base(r)
+ repoName := v["repo"]
+ slog.Info("repo index", "group_path", base.GroupPath, "repo", repoName)
+
+ var userID int64
+ if base.UserID != "" {
+ _, _ = fmt.Sscan(base.UserID, &userID)
+ }
+ grp, err := base.Global.Queries.GetGroupByPath(r.Context(), queries.GetGroupByPathParams{
+ Column1: base.GroupPath,
+ UserID: userID,
+ })
+ if err != nil {
+ slog.Error("get group by path", "error", err)
+ http.Error(w, "Group not found", http.StatusNotFound)
+ return
+ }
+
+ repoRow, err := base.Global.Queries.GetRepoByGroupAndName(r.Context(), queries.GetRepoByGroupAndNameParams{
+ GroupID: grp.ID,
+ Name: repoName,
+ })
+ if err != nil {
+ slog.Error("get repo by name", "error", err)
+ http.Error(w, "Repository not found", http.StatusNotFound)
+ return
+ }
+
+ repoPath := filepath.Join(base.Global.Config.Git.RepoDir, fmt.Sprintf("%d.git", repoRow.ID))
+
+ var commits []git2c.Commit
+ var readme template.HTML
+ var commitsErr error
+ var readmeFile *git2c.FilenameContents
+ var cerr error
+ client, err := git2c.NewClient(r.Context(), base.Global.Config.Git.Socket)
+ if err == nil {
+ defer func() { _ = client.Close() }()
+ commits, readmeFile, cerr = client.CmdIndex(repoPath)
+ if cerr != nil {
+ commitsErr = cerr
+ slog.Error("git2d CmdIndex failed", "error", cerr, "path", repoPath)
+ } else if readmeFile != nil {
+ nameLower := strings.ToLower(readmeFile.Filename)
+ if strings.HasSuffix(nameLower, ".md") || strings.HasSuffix(nameLower, ".markdown") || nameLower == "readme" {
+ md := goldmark.New(
+ goldmark.WithExtensions(extension.GFM),
+ )
+ var buf bytes.Buffer
+ if err := md.Convert(readmeFile.Content, &buf); err == nil {
+ readme = template.HTML(buf.String())
+ } else {
+ readme = template.HTML(template.HTMLEscapeString(string(readmeFile.Content)))
+ }
+ } else {
+ readme = template.HTML(template.HTMLEscapeString(string(readmeFile.Content)))
+ }
+ }
+ } else {
+ commitsErr = err
+ slog.Error("git2d connect failed", "error", err)
+ }
+
+ sshRoot := strings.TrimSuffix(base.Global.Config.SSH.Root, "/")
+ httpRoot := strings.TrimSuffix(base.Global.Config.Web.Root, "/")
+ pathPart := misc.SegmentsToURL(base.GroupPath) + "/-/repos/" + url.PathEscape(repoRow.Name)
+ sshURL := ""
+ httpURL := ""
+ if sshRoot != "" {
+ sshURL = sshRoot + "/" + pathPart
+ }
+ if httpRoot != "" {
+ httpURL = httpRoot + "/" + pathPart
+ }
+
+ var notes []string
+ if len(commits) == 0 && commitsErr == nil {
+ notes = append(notes, "This repository has no commits yet.")
+ }
+ if readme == template.HTML("") {
+ notes = append(notes, "No README found in the default branch.")
+ }
+ if sshURL == "" && httpURL == "" {
+ notes = append(notes, "Clone URLs not configured (missing SSH root and HTTP root).")
+ }
+
+ cloneURL := sshURL
+ if cloneURL == "" {
+ cloneURL = httpURL
+ }
+
+ data := map[string]any{
+ "BaseData": base,
+ "group_path": base.GroupPath,
+ "repo_name": repoRow.Name,
+ "repo_description": repoRow.Description,
+ "ssh_clone_url": cloneURL,
+ "ref_name": base.RefName,
+ "commits": commits,
+ "commits_err": &commitsErr,
+ "readme": readme,
+ "notes": notes,
+ "global": map[string]any{
+ "forge_title": base.Global.ForgeTitle,
+ },
+ }
+ if err := h.r.Render(w, "repo_index", data); err != nil {
+ slog.Error("render repo index", "error", err)
+ http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+ }
+}
diff --git a/forged/internal/incoming/web/handlers/repo/raw.go b/forged/internal/incoming/web/handlers/repo/raw.go
new file mode 100644
index 0000000..8bdfae3
--- /dev/null
+++ b/forged/internal/incoming/web/handlers/repo/raw.go
@@ -0,0 +1,19 @@
+package repo
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ wtypes "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/types"
+)
+
+func (h *HTTP) Raw(w http.ResponseWriter, r *http.Request, v wtypes.Vars) {
+ base := wtypes.Base(r)
+ repo := v["repo"]
+ rest := v["rest"]
+ if base.DirMode && rest != "" && !strings.HasSuffix(rest, "/") {
+ rest += "/"
+ }
+ _, _ = fmt.Fprintf(w, "raw: repo=%q path=%q", repo, rest)
+}
diff --git a/forged/internal/incoming/web/handlers/repo/tree.go b/forged/internal/incoming/web/handlers/repo/tree.go
new file mode 100644
index 0000000..236dd48
--- /dev/null
+++ b/forged/internal/incoming/web/handlers/repo/tree.go
@@ -0,0 +1,19 @@
+package repo
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ wtypes "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/types"
+)
+
+func (h *HTTP) Tree(w http.ResponseWriter, r *http.Request, v wtypes.Vars) {
+ base := wtypes.Base(r)
+ repo := v["repo"]
+ rest := v["rest"] // may be ""
+ if base.DirMode && rest != "" && !strings.HasSuffix(rest, "/") {
+ rest += "/"
+ }
+ _, _ = fmt.Fprintf(w, "tree: repo=%q path=%q", repo, rest)
+}
diff --git a/forged/internal/incoming/web/handlers/special/login.go b/forged/internal/incoming/web/handlers/special/login.go
new file mode 100644
index 0000000..5672f1f
--- /dev/null
+++ b/forged/internal/incoming/web/handlers/special/login.go
@@ -0,0 +1,119 @@
+package handlers
+
+import (
+ "crypto/rand"
+ "crypto/sha256"
+ "errors"
+ "log"
+ "net/http"
+ "time"
+
+ "github.com/jackc/pgx/v5"
+ "github.com/jackc/pgx/v5/pgtype"
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/argon2id"
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/misc"
+ "go.lindenii.runxiyu.org/forge/forged/internal/database/queries"
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/templates"
+ wtypes "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/types"
+)
+
+type LoginHTTP struct {
+ r templates.Renderer
+ cookieExpiry int
+}
+
+func NewLoginHTTP(r templates.Renderer, cookieExpiry int) *LoginHTTP {
+ return &LoginHTTP{
+ r: r,
+ cookieExpiry: cookieExpiry,
+ }
+}
+
+func (h *LoginHTTP) Login(w http.ResponseWriter, r *http.Request, _ wtypes.Vars) {
+ renderLoginPage := func(loginError string) bool {
+ err := h.r.Render(w, "login", struct {
+ BaseData *wtypes.BaseData
+ LoginError string
+ }{
+ BaseData: wtypes.Base(r),
+ LoginError: loginError,
+ })
+ if err != nil {
+ log.Println("failed to render login page", "error", err)
+ http.Error(w, "Failed to render login page", http.StatusInternalServerError)
+ return true
+ }
+ return false
+ }
+
+ if r.Method == http.MethodGet {
+ renderLoginPage("")
+ return
+ }
+
+ username := r.PostFormValue("username")
+ password := r.PostFormValue("password")
+
+ userCreds, err := wtypes.Base(r).Global.Queries.GetUserCreds(r.Context(), &username)
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ renderLoginPage("User not found")
+ return
+ }
+ log.Println("failed to get user credentials", "error", err)
+ http.Error(w, "Failed to get user credentials", http.StatusInternalServerError)
+ return
+ }
+
+ if userCreds.PasswordHash == "" {
+ renderLoginPage("No password set for this user")
+ return
+ }
+
+ passwordMatches, err := argon2id.ComparePasswordAndHash(password, userCreds.PasswordHash)
+ if err != nil {
+ log.Println("failed to compare password and hash", "error", err)
+ http.Error(w, "Failed to verify password", http.StatusInternalServerError)
+ return
+ }
+
+ if !passwordMatches {
+ renderLoginPage("Invalid password")
+ return
+ }
+
+ cookieValue := rand.Text()
+
+ now := time.Now()
+ expiry := now.Add(time.Duration(h.cookieExpiry) * time.Second)
+
+ cookie := &http.Cookie{
+ Name: "session",
+ Value: cookieValue,
+ SameSite: http.SameSiteLaxMode,
+ HttpOnly: true,
+ Secure: false, // TODO
+ Expires: expiry,
+ Path: "/",
+ } //exhaustruct:ignore
+
+ http.SetCookie(w, cookie)
+
+ tokenHash := sha256.Sum256(misc.StringToBytes(cookieValue))
+
+ err = wtypes.Base(r).Global.Queries.InsertSession(r.Context(), queries.InsertSessionParams{
+ UserID: userCreds.ID,
+ TokenHash: tokenHash[:],
+ ExpiresAt: pgtype.Timestamptz{
+ Time: expiry,
+ Valid: true,
+ },
+ })
+ if err != nil {
+ log.Println("failed to insert session", "error", err)
+ http.Error(w, "Failed to create session", http.StatusInternalServerError)
+ return
+ }
+
+ http.Redirect(w, r, "/", http.StatusSeeOther)
+}
diff --git a/forged/internal/incoming/web/router.go b/forged/internal/incoming/web/router.go
new file mode 100644
index 0000000..3809afb
--- /dev/null
+++ b/forged/internal/incoming/web/router.go
@@ -0,0 +1,419 @@
+package web
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "sort"
+ "strings"
+
+ "go.lindenii.runxiyu.org/forge/forged/internal/global"
+ wtypes "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web/types"
+)
+
+type UserResolver func(*http.Request) (id string, username string, err error)
+
+type ErrorRenderers struct {
+ BadRequest func(http.ResponseWriter, *wtypes.BaseData, string)
+ BadRequestColon func(http.ResponseWriter, *wtypes.BaseData)
+ NotFound func(http.ResponseWriter, *wtypes.BaseData)
+ ServerError func(http.ResponseWriter, *wtypes.BaseData, string)
+}
+
+type dirPolicy int
+
+const (
+ dirIgnore dirPolicy = iota
+ dirRequire
+ dirForbid
+ dirRequireIfEmpty
+)
+
+type patKind uint8
+
+const (
+ lit patKind = iota
+ param
+ splat
+ group // @group, must be first token
+)
+
+type patSeg struct {
+ kind patKind
+ lit string
+ key string
+}
+
+type route struct {
+ method string
+ rawPattern string
+ wantDir dirPolicy
+ ifEmptyKey string
+ segs []patSeg
+ h wtypes.HandlerFunc
+ hh http.Handler
+ priority int
+}
+
+type Router struct {
+ routes []route
+ errors ErrorRenderers
+ user UserResolver
+ global *global.Global
+ reverseProxy bool
+}
+
+func NewRouter() *Router { return &Router{} }
+
+func (r *Router) Global(g *global.Global) *Router {
+ r.global = g
+ return r
+}
+func (r *Router) ReverseProxy(enabled bool) *Router { r.reverseProxy = enabled; return r }
+func (r *Router) Errors(e ErrorRenderers) *Router { r.errors = e; return r }
+func (r *Router) UserResolver(u UserResolver) *Router { r.user = u; return r }
+
+type RouteOption func(*route)
+
+func WithDir() RouteOption { return func(rt *route) { rt.wantDir = dirRequire } }
+func WithoutDir() RouteOption { return func(rt *route) { rt.wantDir = dirForbid } }
+func WithDirIfEmpty(param string) RouteOption {
+ return func(rt *route) { rt.wantDir = dirRequireIfEmpty; rt.ifEmptyKey = param }
+}
+
+func (r *Router) GET(pattern string, f wtypes.HandlerFunc, opts ...RouteOption) {
+ r.handle("GET", pattern, f, nil, opts...)
+}
+
+func (r *Router) POST(pattern string, f wtypes.HandlerFunc, opts ...RouteOption) {
+ r.handle("POST", pattern, f, nil, opts...)
+}
+
+func (r *Router) ANY(pattern string, f wtypes.HandlerFunc, opts ...RouteOption) {
+ r.handle("", pattern, f, nil, opts...)
+}
+
+func (r *Router) ANYHTTP(pattern string, hh http.Handler, opts ...RouteOption) {
+ r.handle("", pattern, nil, hh, opts...)
+}
+
+func (r *Router) handle(method, pattern string, f wtypes.HandlerFunc, hh http.Handler, opts ...RouteOption) {
+ want := dirIgnore
+ if strings.HasSuffix(pattern, "/") {
+ want = dirRequire
+ pattern = strings.TrimSuffix(pattern, "/")
+ } else if pattern != "" {
+ want = dirForbid
+ }
+ segs, prio := compilePattern(pattern)
+ rt := route{
+ method: method,
+ rawPattern: pattern,
+ wantDir: want,
+ segs: segs,
+ h: f,
+ hh: hh,
+ priority: prio,
+ }
+ for _, o := range opts {
+ o(&rt)
+ }
+ r.routes = append(r.routes, rt)
+
+ sort.SliceStable(r.routes, func(i, j int) bool {
+ return r.routes[i].priority > r.routes[j].priority
+ })
+}
+
+func (r *Router) ServeHTTP(w http.ResponseWriter, req *http.Request) {
+ segments, dirMode, err := splitAndUnescapePath(req.URL.EscapedPath())
+ if err != nil {
+ r.err400(w, &wtypes.BaseData{Global: r.global}, "Error parsing request URI: "+err.Error())
+ return
+ }
+ for _, s := range segments {
+ if strings.Contains(s, ":") {
+ r.err400Colon(w, &wtypes.BaseData{Global: r.global})
+ return
+ }
+ }
+
+ bd := &wtypes.BaseData{
+ Global: r.global,
+ URLSegments: segments,
+ DirMode: dirMode,
+ }
+ req = req.WithContext(wtypes.WithBaseData(req.Context(), bd))
+
+ bd.RefType, bd.RefName, err = GetParamRefTypeName(req)
+ if err != nil {
+ r.err400(w, bd, "Error parsing ref query parameters: "+err.Error())
+ return
+ }
+
+ if r.user != nil {
+ uid, uname, uerr := r.user(req)
+ if uerr != nil {
+ r.err500(w, bd, "Error getting user info from request: "+uerr.Error())
+ return
+ }
+ bd.UserID = uid
+ bd.Username = uname
+ }
+
+ method := req.Method
+ var pathMatched bool
+ var matchedRaw string
+
+ for _, rt := range r.routes {
+ ok, vars, sepIdx := match(rt.segs, segments)
+ if !ok {
+ continue
+ }
+ pathMatched = true
+ matchedRaw = rt.rawPattern
+
+ switch rt.wantDir {
+ case dirRequire:
+ if !dirMode && redirectAddSlash(w, req) {
+ return
+ }
+ case dirForbid:
+ if dirMode && redirectDropSlash(w, req) {
+ return
+ }
+ case dirRequireIfEmpty:
+ if v := vars[rt.ifEmptyKey]; v == "" && !dirMode && redirectAddSlash(w, req) {
+ return
+ }
+ }
+
+ bd.SeparatorIndex = sepIdx
+ if g := vars["group"]; g == "" {
+ bd.GroupPath = []string{}
+ } else {
+ bd.GroupPath = strings.Split(g, "/")
+ }
+
+ if rt.method != "" && rt.method != method && (method != http.MethodHead || rt.method != http.MethodGet) {
+ continue
+ }
+
+ if rt.h != nil {
+ rt.h(w, req, wtypes.Vars(vars))
+ } else if rt.hh != nil {
+ rt.hh.ServeHTTP(w, req)
+ } else {
+ r.err500(w, bd, "route has no handler")
+ }
+ return
+ }
+
+ if pathMatched {
+ w.Header().Set("Allow", allowForPattern(r.routes, matchedRaw))
+ http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
+ return
+ }
+ r.err404(w, bd)
+}
+
+func compilePattern(pat string) ([]patSeg, int) {
+ if pat == "" || pat == "/" {
+ return nil, 1000
+ }
+ pat = strings.Trim(pat, "/")
+ raw := strings.Split(pat, "/")
+
+ segs := make([]patSeg, 0, len(raw))
+ prio := 0
+ for i, t := range raw {
+ switch {
+ case t == "@group":
+ if i != 0 {
+ segs = append(segs, patSeg{kind: lit, lit: t})
+ prio += 10
+ continue
+ }
+ segs = append(segs, patSeg{kind: group})
+ prio += 1
+ case strings.HasPrefix(t, ":"):
+ segs = append(segs, patSeg{kind: param, key: t[1:]})
+ prio += 5
+ case strings.HasPrefix(t, "*"):
+ segs = append(segs, patSeg{kind: splat, key: t[1:]})
+ default:
+ segs = append(segs, patSeg{kind: lit, lit: t})
+ prio += 10
+ }
+ }
+ return segs, prio
+}
+
+func match(pat []patSeg, segs []string) (bool, map[string]string, int) {
+ vars := make(map[string]string)
+ i := 0
+ sepIdx := -1
+ for pi := 0; pi < len(pat); pi++ {
+ ps := pat[pi]
+ switch ps.kind {
+ case group:
+ start := i
+ for i < len(segs) && segs[i] != "-" {
+ i++
+ }
+ if start < i {
+ vars["group"] = strings.Join(segs[start:i], "/")
+ } else {
+ vars["group"] = ""
+ }
+ if i < len(segs) && segs[i] == "-" {
+ sepIdx = i
+ }
+ case lit:
+ if i >= len(segs) || segs[i] != ps.lit {
+ return false, nil, -1
+ }
+ i++
+ case param:
+ if i >= len(segs) {
+ return false, nil, -1
+ }
+ vars[ps.key] = segs[i]
+ i++
+ case splat:
+ if i < len(segs) {
+ vars[ps.key] = strings.Join(segs[i:], "/")
+ i = len(segs)
+ } else {
+ vars[ps.key] = ""
+ }
+ pi = len(pat)
+ }
+ }
+ if i != len(segs) {
+ return false, nil, -1
+ }
+ return true, vars, sepIdx
+}
+
+func splitAndUnescapePath(escaped string) ([]string, bool, error) {
+ if escaped == "" {
+ return nil, false, nil
+ }
+ dir := strings.HasSuffix(escaped, "/")
+ path := strings.Trim(escaped, "/")
+ if path == "" {
+ return []string{}, dir, nil
+ }
+ raw := strings.Split(path, "/")
+ out := make([]string, 0, len(raw))
+ for _, seg := range raw {
+ u, err := url.PathUnescape(seg)
+ if err != nil {
+ return nil, dir, err
+ }
+ if u != "" {
+ out = append(out, u)
+ }
+ }
+ return out, dir, nil
+}
+
+func redirectAddSlash(w http.ResponseWriter, r *http.Request) bool {
+ u := *r.URL
+ u.Path = u.EscapedPath() + "/"
+ http.Redirect(w, r, u.String(), http.StatusTemporaryRedirect)
+ return true
+}
+
+func redirectDropSlash(w http.ResponseWriter, r *http.Request) bool {
+ u := *r.URL
+ u.Path = strings.TrimRight(u.EscapedPath(), "/")
+ if u.Path == "" {
+ u.Path = "/"
+ }
+ http.Redirect(w, r, u.String(), http.StatusTemporaryRedirect)
+ return true
+}
+
+func allowForPattern(routes []route, raw string) string {
+ seen := map[string]struct{}{}
+ out := make([]string, 0, 4)
+ for _, rt := range routes {
+ if rt.rawPattern != raw || rt.method == "" {
+ continue
+ }
+ if _, ok := seen[rt.method]; ok {
+ continue
+ }
+ seen[rt.method] = struct{}{}
+ out = append(out, rt.method)
+ }
+ sort.Strings(out)
+ return strings.Join(out, ", ")
+}
+
+func (r *Router) err400(w http.ResponseWriter, b *wtypes.BaseData, msg string) {
+ if r.errors.BadRequest != nil {
+ r.errors.BadRequest(w, b, msg)
+ return
+ }
+ http.Error(w, msg, http.StatusBadRequest)
+}
+
+func (r *Router) err400Colon(w http.ResponseWriter, b *wtypes.BaseData) {
+ if r.errors.BadRequestColon != nil {
+ r.errors.BadRequestColon(w, b)
+ return
+ }
+ http.Error(w, "bad request", http.StatusBadRequest)
+}
+
+func (r *Router) err404(w http.ResponseWriter, b *wtypes.BaseData) {
+ if r.errors.NotFound != nil {
+ r.errors.NotFound(w, b)
+ return
+ }
+ http.NotFound(w, nil)
+}
+
+func (r *Router) err500(w http.ResponseWriter, b *wtypes.BaseData, msg string) {
+ if r.errors.ServerError != nil {
+ r.errors.ServerError(w, b, msg)
+ return
+ }
+ http.Error(w, msg, http.StatusInternalServerError)
+}
+
+func GetParamRefTypeName(request *http.Request) (retRefType, retRefName string, err error) {
+ rawQuery := request.URL.RawQuery
+ queryValues, err := url.ParseQuery(rawQuery)
+ if err != nil {
+ return
+ }
+ done := false
+ for _, refType := range []string{"commit", "branch", "tag"} {
+ refName, ok := queryValues[refType]
+ if ok {
+ if done {
+ err = errDupRefSpec
+ return
+ }
+ done = true
+ if len(refName) != 1 {
+ err = errDupRefSpec
+ return
+ }
+ retRefName = refName[0]
+ retRefType = refType
+ }
+ }
+ if !done {
+ retRefType = ""
+ retRefName = ""
+ err = nil
+ }
+ return
+}
+
+var errDupRefSpec = fmt.Errorf("duplicate ref specifications")
diff --git a/forged/internal/incoming/web/server.go b/forged/internal/incoming/web/server.go
new file mode 100644
index 0000000..ab70aec
--- /dev/null
+++ b/forged/internal/incoming/web/server.go
@@ -0,0 +1,70 @@
+package web
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net"
+ "net/http"
+ "time"
+
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/misc"
+ "go.lindenii.runxiyu.org/forge/forged/internal/global"
+)
+
+type Server struct {
+ net string
+ addr string
+ root string
+ httpServer *http.Server
+ shutdownTimeout uint32
+ global *global.Global
+}
+
+func New(global *global.Global) *Server {
+ cfg := global.Config.Web
+ httpServer := &http.Server{
+ Handler: NewHandler(global),
+ ReadTimeout: time.Duration(cfg.ReadTimeout) * time.Second,
+ WriteTimeout: time.Duration(cfg.WriteTimeout) * time.Second,
+ IdleTimeout: time.Duration(cfg.IdleTimeout) * time.Second,
+ MaxHeaderBytes: cfg.MaxHeaderBytes,
+ } //exhaustruct:ignore
+ return &Server{
+ net: cfg.Net,
+ addr: cfg.Addr,
+ root: cfg.Root,
+ shutdownTimeout: cfg.ShutdownTimeout,
+ httpServer: httpServer,
+ global: global,
+ }
+}
+
+func (server *Server) Run(ctx context.Context) (err error) {
+ server.httpServer.BaseContext = func(_ net.Listener) context.Context { return ctx }
+
+ listener, err := misc.Listen(ctx, server.net, server.addr)
+ if err != nil {
+ return fmt.Errorf("listen for web: %w", err)
+ }
+ defer func() {
+ _ = listener.Close()
+ }()
+
+ stop := context.AfterFunc(ctx, func() {
+ shCtx, cancel := context.WithTimeout(context.WithoutCancel(ctx), time.Duration(server.shutdownTimeout)*time.Second)
+ defer cancel()
+ _ = server.httpServer.Shutdown(shCtx)
+ _ = listener.Close()
+ })
+ defer stop()
+
+ err = server.httpServer.Serve(listener)
+ if err != nil {
+ if errors.Is(err, http.ErrServerClosed) || ctx.Err() != nil {
+ return nil
+ }
+ return fmt.Errorf("serve web: %w", err)
+ }
+ panic("unreachable")
+}
diff --git a/forged/internal/incoming/web/templates/load.go b/forged/internal/incoming/web/templates/load.go
new file mode 100644
index 0000000..4a6fc49
--- /dev/null
+++ b/forged/internal/incoming/web/templates/load.go
@@ -0,0 +1,31 @@
+package templates
+
+import (
+ "html/template"
+ "io/fs"
+ "os"
+ "path/filepath"
+)
+
+func MustParseDir(dir string, funcs template.FuncMap) *template.Template {
+ base := template.New("").Funcs(funcs)
+
+ err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+ if d.IsDir() {
+ return nil
+ }
+ b, err := os.ReadFile(path)
+ if err != nil {
+ return err
+ }
+ _, err = base.Parse(string(b))
+ return err
+ })
+ if err != nil {
+ panic(err)
+ }
+ return base
+}
diff --git a/forged/internal/incoming/web/templates/renderer.go b/forged/internal/incoming/web/templates/renderer.go
new file mode 100644
index 0000000..350e9ec
--- /dev/null
+++ b/forged/internal/incoming/web/templates/renderer.go
@@ -0,0 +1,35 @@
+package templates
+
+import (
+ "bytes"
+ "html/template"
+ "log/slog"
+ "net/http"
+)
+
+type Renderer interface {
+ Render(w http.ResponseWriter, name string, data any) error
+}
+
+type tmplRenderer struct {
+ t *template.Template
+}
+
+func New(t *template.Template) Renderer {
+ return &tmplRenderer{t: t}
+}
+
+func (r *tmplRenderer) Render(w http.ResponseWriter, name string, data any) error {
+ var buf bytes.Buffer
+ if err := r.t.ExecuteTemplate(&buf, name, data); err != nil {
+ slog.Error("template render failed", "name", name, "error", err)
+ return err
+ }
+ w.Header().Set("Content-Type", "text/html; charset=utf-8")
+ n, err := w.Write(buf.Bytes())
+ if err != nil {
+ return err
+ }
+ slog.Info("template rendered", "name", name, "bytes", n)
+ return nil
+}
diff --git a/forged/internal/incoming/web/types/types.go b/forged/internal/incoming/web/types/types.go
new file mode 100644
index 0000000..4b9a65a
--- /dev/null
+++ b/forged/internal/incoming/web/types/types.go
@@ -0,0 +1,37 @@
+package types
+
+import (
+ "context"
+ "net/http"
+
+ "go.lindenii.runxiyu.org/forge/forged/internal/global"
+)
+
+type BaseData struct {
+ UserID string
+ Username string
+ URLSegments []string
+ DirMode bool
+ GroupPath []string
+ SeparatorIndex int
+ RefType string
+ RefName string
+ Global *global.Global
+}
+
+type ctxKey struct{}
+
+func WithBaseData(ctx context.Context, b *BaseData) context.Context {
+ return context.WithValue(ctx, ctxKey{}, b)
+}
+
+func Base(r *http.Request) *BaseData {
+ if v, ok := r.Context().Value(ctxKey{}).(*BaseData); ok && v != nil {
+ return v
+ }
+ return &BaseData{}
+}
+
+type Vars map[string]string
+
+type HandlerFunc func(http.ResponseWriter, *http.Request, Vars)
diff --git a/forged/internal/ipc/git2c/build.go b/forged/internal/ipc/git2c/build.go
new file mode 100644
index 0000000..3d1b7a0
--- /dev/null
+++ b/forged/internal/ipc/git2c/build.go
@@ -0,0 +1,119 @@
+// SPDX-License-Identifier: AGPL-3.0-only
+// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+
+package git2c
+
+import (
+ "encoding/hex"
+ "fmt"
+ "path"
+ "sort"
+ "strings"
+)
+
+func (c *Client) BuildTreeRecursive(repoPath, baseTreeHex string, updates map[string]string) (string, error) {
+ treeCache := make(map[string][]TreeEntryRaw)
+ var walk func(prefix, hexid string) error
+ walk = func(prefix, hexid string) error {
+ ents, err := c.TreeListByOID(repoPath, hexid)
+ if err != nil {
+ return err
+ }
+ treeCache[prefix] = ents
+ for _, e := range ents {
+ if e.Mode == 40000 {
+ sub := path.Join(prefix, e.Name)
+ if err := walk(sub, e.OID); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+ }
+ if err := walk("", baseTreeHex); err != nil {
+ return "", err
+ }
+
+ for p, blob := range updates {
+ parts := strings.Split(p, "/")
+ dir := strings.Join(parts[:len(parts)-1], "/")
+ name := parts[len(parts)-1]
+ entries := treeCache[dir]
+ found := false
+ for i := range entries {
+ if entries[i].Name == name {
+ if blob == "" {
+ entries = append(entries[:i], entries[i+1:]...)
+ } else {
+ entries[i].Mode = 0o100644
+ entries[i].OID = blob
+ }
+ found = true
+ break
+ }
+ }
+ if !found && blob != "" {
+ entries = append(entries, TreeEntryRaw{Mode: 0o100644, Name: name, OID: blob})
+ }
+ treeCache[dir] = entries
+ }
+
+ built := make(map[string]string)
+ var build func(prefix string) (string, error)
+ build = func(prefix string) (string, error) {
+ entries := treeCache[prefix]
+ for i := range entries {
+ if entries[i].Mode == 0o40000 || entries[i].Mode == 40000 {
+ sub := path.Join(prefix, entries[i].Name)
+ var ok bool
+ var oid string
+ if oid, ok = built[sub]; !ok {
+ var err error
+ oid, err = build(sub)
+ if err != nil {
+ return "", err
+ }
+ }
+ entries[i].Mode = 0o40000
+ entries[i].OID = oid
+ }
+ }
+ sort.Slice(entries, func(i, j int) bool {
+ ni, nj := entries[i].Name, entries[j].Name
+ if ni == nj {
+ return entries[i].Mode != 0o40000 && entries[j].Mode == 0o40000
+ }
+ if strings.HasPrefix(nj, ni) && len(ni) < len(nj) {
+ return entries[i].Mode != 0o40000
+ }
+ if strings.HasPrefix(ni, nj) && len(nj) < len(ni) {
+ return entries[j].Mode == 0o40000
+ }
+ return ni < nj
+ })
+ wr := make([]TreeEntryRaw, 0, len(entries))
+ for _, e := range entries {
+ if e.OID == "" {
+ continue
+ }
+ if e.Mode == 40000 {
+ e.Mode = 0o40000
+ }
+ if _, err := hex.DecodeString(e.OID); err != nil {
+ return "", fmt.Errorf("invalid OID hex for %s/%s: %w", prefix, e.Name, err)
+ }
+ wr = append(wr, TreeEntryRaw{Mode: e.Mode, Name: e.Name, OID: e.OID})
+ }
+ id, err := c.WriteTree(repoPath, wr)
+ if err != nil {
+ return "", err
+ }
+ built[prefix] = id
+ return id, nil
+ }
+ root, err := build("")
+ if err != nil {
+ return "", err
+ }
+ return root, nil
+}
diff --git a/forged/internal/git2c/client.go b/forged/internal/ipc/git2c/client.go
index ed9390c..8b11035 100644
--- a/forged/internal/git2c/client.go
+++ b/forged/internal/ipc/git2c/client.go
@@ -1,14 +1,14 @@
// SPDX-License-Identifier: AGPL-3.0-only
// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-// Package git2c provides routines to interact with the git2d backend daemon.
package git2c
import (
+ "context"
"fmt"
"net"
- "go.lindenii.runxiyu.org/forge/forged/internal/bare"
+ "go.lindenii.runxiyu.org/forge/forged/internal/common/bare"
)
// Client represents a connection to the git2d backend daemon.
@@ -20,8 +20,9 @@ type Client struct {
}
// NewClient establishes a connection to a git2d socket and returns a new Client.
-func NewClient(socketPath string) (*Client, error) {
- conn, err := net.Dial("unix", socketPath)
+func NewClient(ctx context.Context, socketPath string) (*Client, error) {
+ dialer := &net.Dialer{} //exhaustruct:ignore
+ conn, err := dialer.DialContext(ctx, "unix", socketPath)
if err != nil {
return nil, fmt.Errorf("git2d connection failed: %w", err)
}
@@ -38,9 +39,12 @@ func NewClient(socketPath string) (*Client, error) {
}
// Close terminates the underlying socket connection.
-func (c *Client) Close() error {
+func (c *Client) Close() (err error) {
if c.conn != nil {
- return c.conn.Close()
+ err = c.conn.Close()
+ if err != nil {
+ return fmt.Errorf("close underlying socket: %w", err)
+ }
}
return nil
}
diff --git a/forged/internal/git2c/cmd_index.go b/forged/internal/ipc/git2c/cmd_index.go
index 8862b2c..e9fc435 100644
--- a/forged/internal/git2c/cmd_index.go
+++ b/forged/internal/ipc/git2c/cmd_index.go
@@ -13,10 +13,12 @@ import (
// CmdIndex requests a repository index from git2d and returns the list of commits
// and the contents of a README file if available.
func (c *Client) CmdIndex(repoPath string) ([]Commit, *FilenameContents, error) {
- if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ err := c.writer.WriteData([]byte(repoPath))
+ if err != nil {
return nil, nil, fmt.Errorf("sending repo path failed: %w", err)
}
- if err := c.writer.WriteUint(1); err != nil {
+ err = c.writer.WriteUint(1)
+ if err != nil {
return nil, nil, fmt.Errorf("sending command failed: %w", err)
}
diff --git a/forged/internal/ipc/git2c/cmd_init_repo.go b/forged/internal/ipc/git2c/cmd_init_repo.go
new file mode 100644
index 0000000..ae1e92a
--- /dev/null
+++ b/forged/internal/ipc/git2c/cmd_init_repo.go
@@ -0,0 +1,26 @@
+// SPDX-License-Identifier: AGPL-3.0-only
+// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+
+package git2c
+
+import "fmt"
+
+func (c *Client) InitRepo(repoPath, hooksPath string) error {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(15); err != nil {
+ return fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(hooksPath)); err != nil {
+ return fmt.Errorf("sending hooks path failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return Perror(status)
+ }
+ return nil
+}
diff --git a/forged/internal/git2c/cmd_treeraw.go b/forged/internal/ipc/git2c/cmd_treeraw.go
index 492cb84..89b702c 100644
--- a/forged/internal/git2c/cmd_treeraw.go
+++ b/forged/internal/ipc/git2c/cmd_treeraw.go
@@ -12,13 +12,16 @@ import (
// CmdTreeRaw queries git2d for a tree or blob object at the given path within the repository.
// It returns either a directory listing or the contents of a file.
func (c *Client) CmdTreeRaw(repoPath, pathSpec string) ([]TreeEntry, string, error) {
- if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ err := c.writer.WriteData([]byte(repoPath))
+ if err != nil {
return nil, "", fmt.Errorf("sending repo path failed: %w", err)
}
- if err := c.writer.WriteUint(2); err != nil {
+ err = c.writer.WriteUint(2)
+ if err != nil {
return nil, "", fmt.Errorf("sending command failed: %w", err)
}
- if err := c.writer.WriteData([]byte(pathSpec)); err != nil {
+ err = c.writer.WriteData([]byte(pathSpec))
+ if err != nil {
return nil, "", fmt.Errorf("sending path failed: %w", err)
}
diff --git a/forged/internal/ipc/git2c/doc.go b/forged/internal/ipc/git2c/doc.go
new file mode 100644
index 0000000..e14dae0
--- /dev/null
+++ b/forged/internal/ipc/git2c/doc.go
@@ -0,0 +1,2 @@
+// Package git2c provides routines to interact with the git2d backend daemon.
+package git2c
diff --git a/forged/internal/ipc/git2c/extra.go b/forged/internal/ipc/git2c/extra.go
new file mode 100644
index 0000000..4d3a07e
--- /dev/null
+++ b/forged/internal/ipc/git2c/extra.go
@@ -0,0 +1,286 @@
+// SPDX-License-Identifier: AGPL-3.0-only
+// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+
+package git2c
+
+import (
+ "encoding/hex"
+ "fmt"
+ "time"
+)
+
+func (c *Client) ResolveRef(repoPath, refType, refName string) (string, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return "", fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(3); err != nil {
+ return "", fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(refType)); err != nil {
+ return "", fmt.Errorf("sending ref type failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(refName)); err != nil {
+ return "", fmt.Errorf("sending ref name failed: %w", err)
+ }
+
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return "", fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return "", Perror(status)
+ }
+ id, err := c.reader.ReadData()
+ if err != nil {
+ return "", fmt.Errorf("reading oid failed: %w", err)
+ }
+ return hex.EncodeToString(id), nil
+}
+
+func (c *Client) ListBranches(repoPath string) ([]string, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return nil, fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(4); err != nil {
+ return nil, fmt.Errorf("sending command failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return nil, fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return nil, Perror(status)
+ }
+ count, err := c.reader.ReadUint()
+ if err != nil {
+ return nil, fmt.Errorf("reading count failed: %w", err)
+ }
+ branches := make([]string, 0, count)
+ for range count {
+ name, err := c.reader.ReadData()
+ if err != nil {
+ return nil, fmt.Errorf("reading branch name failed: %w", err)
+ }
+ branches = append(branches, string(name))
+ }
+ return branches, nil
+}
+
+func (c *Client) FormatPatch(repoPath, commitHex string) (string, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return "", fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(5); err != nil {
+ return "", fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(commitHex)); err != nil {
+ return "", fmt.Errorf("sending commit failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return "", fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return "", Perror(status)
+ }
+ buf, err := c.reader.ReadData()
+ if err != nil {
+ return "", fmt.Errorf("reading patch failed: %w", err)
+ }
+ return string(buf), nil
+}
+
+func (c *Client) CommitPatch(repoPath, commitHex string) (parentHex string, stats string, patch string, err error) {
+ if err = c.writer.WriteData([]byte(repoPath)); err != nil {
+ return "", "", "", fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err = c.writer.WriteUint(6); err != nil {
+ return "", "", "", fmt.Errorf("sending command failed: %w", err)
+ }
+ if err = c.writer.WriteData([]byte(commitHex)); err != nil {
+ return "", "", "", fmt.Errorf("sending commit failed: %w", err)
+ }
+ status, err2 := c.reader.ReadUint()
+ if err2 != nil {
+ return "", "", "", fmt.Errorf("reading status failed: %w", err2)
+ }
+ if status != 0 {
+ return "", "", "", Perror(status)
+ }
+ id, err2 := c.reader.ReadData()
+ if err2 != nil {
+ return "", "", "", fmt.Errorf("reading parent oid failed: %w", err2)
+ }
+ statsBytes, err2 := c.reader.ReadData()
+ if err2 != nil {
+ return "", "", "", fmt.Errorf("reading stats failed: %w", err2)
+ }
+ patchBytes, err2 := c.reader.ReadData()
+ if err2 != nil {
+ return "", "", "", fmt.Errorf("reading patch failed: %w", err2)
+ }
+ return hex.EncodeToString(id), string(statsBytes), string(patchBytes), nil
+}
+
+func (c *Client) MergeBase(repoPath, hexA, hexB string) (string, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return "", fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(7); err != nil {
+ return "", fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(hexA)); err != nil {
+ return "", fmt.Errorf("sending oid A failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(hexB)); err != nil {
+ return "", fmt.Errorf("sending oid B failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return "", fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return "", Perror(status)
+ }
+ base, err := c.reader.ReadData()
+ if err != nil {
+ return "", fmt.Errorf("reading base oid failed: %w", err)
+ }
+ return hex.EncodeToString(base), nil
+}
+
+func (c *Client) Log(repoPath, refSpec string, n uint) ([]Commit, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return nil, fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(8); err != nil {
+ return nil, fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(refSpec)); err != nil {
+ return nil, fmt.Errorf("sending refspec failed: %w", err)
+ }
+ if err := c.writer.WriteUint(uint64(n)); err != nil {
+ return nil, fmt.Errorf("sending limit failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return nil, fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return nil, Perror(status)
+ }
+ var out []Commit
+ for {
+ id, err := c.reader.ReadData()
+ if err != nil {
+ break
+ }
+ title, _ := c.reader.ReadData()
+ authorName, _ := c.reader.ReadData()
+ authorEmail, _ := c.reader.ReadData()
+ date, _ := c.reader.ReadData()
+ out = append(out, Commit{
+ Hash: hex.EncodeToString(id),
+ Author: string(authorName),
+ Email: string(authorEmail),
+ Date: string(date),
+ Message: string(title),
+ })
+ }
+ return out, nil
+}
+
+func (c *Client) CommitTreeOID(repoPath, commitHex string) (string, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return "", fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(12); err != nil {
+ return "", fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(commitHex)); err != nil {
+ return "", fmt.Errorf("sending oid failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return "", fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return "", Perror(status)
+ }
+ id, err := c.reader.ReadData()
+ if err != nil {
+ return "", fmt.Errorf("reading tree oid failed: %w", err)
+ }
+ return hex.EncodeToString(id), nil
+}
+
+func (c *Client) CommitCreate(repoPath, treeHex string, parents []string, authorName, authorEmail string, when time.Time, message string) (string, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return "", fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(13); err != nil {
+ return "", fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(treeHex)); err != nil {
+ return "", fmt.Errorf("sending tree oid failed: %w", err)
+ }
+ if err := c.writer.WriteUint(uint64(len(parents))); err != nil {
+ return "", fmt.Errorf("sending parents count failed: %w", err)
+ }
+ for _, p := range parents {
+ if err := c.writer.WriteData([]byte(p)); err != nil {
+ return "", fmt.Errorf("sending parent oid failed: %w", err)
+ }
+ }
+ if err := c.writer.WriteData([]byte(authorName)); err != nil {
+ return "", fmt.Errorf("sending author name failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(authorEmail)); err != nil {
+ return "", fmt.Errorf("sending author email failed: %w", err)
+ }
+ if err := c.writer.WriteInt(when.Unix()); err != nil {
+ return "", fmt.Errorf("sending when failed: %w", err)
+ }
+ _, offset := when.Zone()
+ if err := c.writer.WriteInt(int64(offset / 60)); err != nil {
+ return "", fmt.Errorf("sending tz offset failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(message)); err != nil {
+ return "", fmt.Errorf("sending message failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return "", fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return "", Perror(status)
+ }
+ id, err := c.reader.ReadData()
+ if err != nil {
+ return "", fmt.Errorf("reading commit oid failed: %w", err)
+ }
+ return hex.EncodeToString(id), nil
+}
+
+func (c *Client) UpdateRef(repoPath, refName, commitHex string) error {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(14); err != nil {
+ return fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(refName)); err != nil {
+ return fmt.Errorf("sending ref name failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(commitHex)); err != nil {
+ return fmt.Errorf("sending commit oid failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return Perror(status)
+ }
+ return nil
+}
diff --git a/forged/internal/git2c/git_types.go b/forged/internal/ipc/git2c/git_types.go
index bf13f05..bf13f05 100644
--- a/forged/internal/git2c/git_types.go
+++ b/forged/internal/ipc/git2c/git_types.go
diff --git a/forged/internal/ipc/git2c/perror.go b/forged/internal/ipc/git2c/perror.go
new file mode 100644
index 0000000..4be2a07
--- /dev/null
+++ b/forged/internal/ipc/git2c/perror.go
@@ -0,0 +1,87 @@
+// SPDX-License-Identifier: AGPL-3.0-only
+// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+
+// TODO: Make the C part report detailed error messages too
+
+package git2c
+
+import "errors"
+
+var (
+ ErrUnknown = errors.New("git2c: unknown error")
+ ErrPath = errors.New("git2c: get tree entry by path failed")
+ ErrRevparse = errors.New("git2c: revparse failed")
+ ErrReadme = errors.New("git2c: no readme")
+ ErrBlobExpected = errors.New("git2c: blob expected")
+ ErrEntryToObject = errors.New("git2c: tree entry to object conversion failed")
+ ErrBlobRawContent = errors.New("git2c: get blob raw content failed")
+ ErrRevwalk = errors.New("git2c: revwalk failed")
+ ErrRevwalkPushHead = errors.New("git2c: revwalk push head failed")
+ ErrBareProto = errors.New("git2c: bare protocol error")
+ ErrRefResolve = errors.New("git2c: ref resolve failed")
+ ErrBranches = errors.New("git2c: list branches failed")
+ ErrCommitLookup = errors.New("git2c: commit lookup failed")
+ ErrDiff = errors.New("git2c: diff failed")
+ ErrMergeBaseNone = errors.New("git2c: no merge base found")
+ ErrMergeBase = errors.New("git2c: merge base failed")
+ ErrCommitCreate = errors.New("git2c: commit create failed")
+ ErrUpdateRef = errors.New("git2c: update ref failed")
+ ErrCommitTree = errors.New("git2c: commit tree lookup failed")
+ ErrInitRepoCreate = errors.New("git2c: init repo: create failed")
+ ErrInitRepoConfig = errors.New("git2c: init repo: open config failed")
+ ErrInitRepoSetHooksPath = errors.New("git2c: init repo: set core.hooksPath failed")
+ ErrInitRepoSetAdvertisePushOptions = errors.New("git2c: init repo: set receive.advertisePushOptions failed")
+ ErrInitRepoMkdir = errors.New("git2c: init repo: create directory failed")
+)
+
+func Perror(errno uint64) error {
+ switch errno {
+ case 0:
+ return nil
+ case 3:
+ return ErrPath
+ case 4:
+ return ErrRevparse
+ case 5:
+ return ErrReadme
+ case 6:
+ return ErrBlobExpected
+ case 7:
+ return ErrEntryToObject
+ case 8:
+ return ErrBlobRawContent
+ case 9:
+ return ErrRevwalk
+ case 10:
+ return ErrRevwalkPushHead
+ case 11:
+ return ErrBareProto
+ case 12:
+ return ErrRefResolve
+ case 13:
+ return ErrBranches
+ case 14:
+ return ErrCommitLookup
+ case 15:
+ return ErrDiff
+ case 16:
+ return ErrMergeBaseNone
+ case 17:
+ return ErrMergeBase
+ case 18:
+ return ErrUpdateRef
+ case 19:
+ return ErrCommitCreate
+ case 20:
+ return ErrInitRepoCreate
+ case 21:
+ return ErrInitRepoConfig
+ case 22:
+ return ErrInitRepoSetHooksPath
+ case 23:
+ return ErrInitRepoSetAdvertisePushOptions
+ case 24:
+ return ErrInitRepoMkdir
+ }
+ return ErrUnknown
+}
diff --git a/forged/internal/ipc/git2c/tree.go b/forged/internal/ipc/git2c/tree.go
new file mode 100644
index 0000000..f598e14
--- /dev/null
+++ b/forged/internal/ipc/git2c/tree.go
@@ -0,0 +1,118 @@
+// SPDX-License-Identifier: AGPL-3.0-only
+// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+
+package git2c
+
+import (
+ "encoding/hex"
+ "fmt"
+)
+
+type TreeEntryRaw struct {
+ Mode uint64
+ Name string
+ OID string // hex
+}
+
+func (c *Client) TreeListByOID(repoPath, treeHex string) ([]TreeEntryRaw, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return nil, fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(9); err != nil {
+ return nil, fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(treeHex)); err != nil {
+ return nil, fmt.Errorf("sending tree oid failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return nil, fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return nil, Perror(status)
+ }
+ count, err := c.reader.ReadUint()
+ if err != nil {
+ return nil, fmt.Errorf("reading count failed: %w", err)
+ }
+ entries := make([]TreeEntryRaw, 0, count)
+ for range count {
+ mode, err := c.reader.ReadUint()
+ if err != nil {
+ return nil, fmt.Errorf("reading mode failed: %w", err)
+ }
+ name, err := c.reader.ReadData()
+ if err != nil {
+ return nil, fmt.Errorf("reading name failed: %w", err)
+ }
+ id, err := c.reader.ReadData()
+ if err != nil {
+ return nil, fmt.Errorf("reading oid failed: %w", err)
+ }
+ entries = append(entries, TreeEntryRaw{Mode: mode, Name: string(name), OID: hex.EncodeToString(id)})
+ }
+ return entries, nil
+}
+
+func (c *Client) WriteTree(repoPath string, entries []TreeEntryRaw) (string, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return "", fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(10); err != nil {
+ return "", fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteUint(uint64(len(entries))); err != nil {
+ return "", fmt.Errorf("sending count failed: %w", err)
+ }
+ for _, e := range entries {
+ if err := c.writer.WriteUint(e.Mode); err != nil {
+ return "", fmt.Errorf("sending mode failed: %w", err)
+ }
+ if err := c.writer.WriteData([]byte(e.Name)); err != nil {
+ return "", fmt.Errorf("sending name failed: %w", err)
+ }
+ raw, err := hex.DecodeString(e.OID)
+ if err != nil {
+ return "", fmt.Errorf("decode oid hex: %w", err)
+ }
+ if err := c.writer.WriteDataFixed(raw); err != nil {
+ return "", fmt.Errorf("sending oid failed: %w", err)
+ }
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return "", fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return "", Perror(status)
+ }
+ id, err := c.reader.ReadData()
+ if err != nil {
+ return "", fmt.Errorf("reading oid failed: %w", err)
+ }
+ return hex.EncodeToString(id), nil
+}
+
+func (c *Client) WriteBlob(repoPath string, content []byte) (string, error) {
+ if err := c.writer.WriteData([]byte(repoPath)); err != nil {
+ return "", fmt.Errorf("sending repo path failed: %w", err)
+ }
+ if err := c.writer.WriteUint(11); err != nil {
+ return "", fmt.Errorf("sending command failed: %w", err)
+ }
+ if err := c.writer.WriteData(content); err != nil {
+ return "", fmt.Errorf("sending blob content failed: %w", err)
+ }
+ status, err := c.reader.ReadUint()
+ if err != nil {
+ return "", fmt.Errorf("reading status failed: %w", err)
+ }
+ if status != 0 {
+ return "", Perror(status)
+ }
+ id, err := c.reader.ReadData()
+ if err != nil {
+ return "", fmt.Errorf("reading oid failed: %w", err)
+ }
+ return hex.EncodeToString(id), nil
+}
diff --git a/forged/internal/irc/bot.go b/forged/internal/irc/bot.go
deleted file mode 100644
index 1c6d32f..0000000
--- a/forged/internal/irc/bot.go
+++ /dev/null
@@ -1,176 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-// Package irc provides basic IRC bot functionality.
-package irc
-
-import (
- "crypto/tls"
- "log/slog"
- "net"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-// Config contains IRC connection and identity settings for the bot.
-// This should usually be a part of the primary config struct.
-type Config struct {
- Net string `scfg:"net"`
- Addr string `scfg:"addr"`
- TLS bool `scfg:"tls"`
- SendQ uint `scfg:"sendq"`
- Nick string `scfg:"nick"`
- User string `scfg:"user"`
- Gecos string `scfg:"gecos"`
-}
-
-// Bot represents an IRC bot client that handles events and allows for sending messages.
-type Bot struct {
- config *Config
- ircSendBuffered chan string
- ircSendDirectChan chan misc.ErrorBack[string]
-}
-
-// NewBot creates a new Bot instance using the provided configuration.
-func NewBot(c *Config) (b *Bot) {
- b = &Bot{
- config: c,
- }
- return
-}
-
-// Connect establishes a new IRC session and starts handling incoming and outgoing messages.
-// This method blocks until an error occurs or the connection is closed.
-func (b *Bot) Connect() error {
- var err error
- var underlyingConn net.Conn
- if b.config.TLS {
- underlyingConn, err = tls.Dial(b.config.Net, b.config.Addr, nil)
- } else {
- underlyingConn, err = net.Dial(b.config.Net, b.config.Addr)
- }
- if err != nil {
- return err
- }
- defer underlyingConn.Close()
-
- conn := NewConn(underlyingConn)
-
- logAndWriteLn := func(s string) (n int, err error) {
- slog.Debug("irc tx", "line", s)
- return conn.WriteString(s + "\r\n")
- }
-
- _, err = logAndWriteLn("NICK " + b.config.Nick)
- if err != nil {
- return err
- }
- _, err = logAndWriteLn("USER " + b.config.User + " 0 * :" + b.config.Gecos)
- if err != nil {
- return err
- }
-
- readLoopError := make(chan error)
- writeLoopAbort := make(chan struct{})
- go func() {
- for {
- select {
- case <-writeLoopAbort:
- return
- default:
- }
-
- msg, line, err := conn.ReadMessage()
- if err != nil {
- readLoopError <- err
- return
- }
-
- slog.Debug("irc rx", "line", line)
-
- switch msg.Command {
- case "001":
- _, err = logAndWriteLn("JOIN #chat")
- if err != nil {
- readLoopError <- err
- return
- }
- case "PING":
- _, err = logAndWriteLn("PONG :" + msg.Args[0])
- if err != nil {
- readLoopError <- err
- return
- }
- case "JOIN":
- c, ok := msg.Source.(Client)
- if !ok {
- slog.Error("unable to convert source of JOIN to client")
- }
- if c.Nick != b.config.Nick {
- continue
- }
- default:
- }
- }
- }()
-
- for {
- select {
- case err = <-readLoopError:
- return err
- case line := <-b.ircSendBuffered:
- _, err = logAndWriteLn(line)
- if err != nil {
- select {
- case b.ircSendBuffered <- line:
- default:
- slog.Error("unable to requeue message", "line", line)
- }
- writeLoopAbort <- struct{}{}
- return err
- }
- case lineErrorBack := <-b.ircSendDirectChan:
- _, err = logAndWriteLn(lineErrorBack.Content)
- lineErrorBack.ErrorChan <- err
- if err != nil {
- writeLoopAbort <- struct{}{}
- return err
- }
- }
- }
-}
-
-// SendDirect sends an IRC message directly to the connection and bypasses
-// the buffering system.
-func (b *Bot) SendDirect(line string) error {
- ech := make(chan error, 1)
-
- b.ircSendDirectChan <- misc.ErrorBack[string]{
- Content: line,
- ErrorChan: ech,
- }
-
- return <-ech
-}
-
-// Send queues a message to be sent asynchronously via the buffered send queue.
-// If the queue is full, the message is dropped and an error is logged.
-func (b *Bot) Send(line string) {
- select {
- case b.ircSendBuffered <- line:
- default:
- slog.Error("irc sendq full", "line", line)
- }
-}
-
-// ConnectLoop continuously attempts to maintain an IRC session.
-// If the connection drops, it automatically retries with no delay.
-func (b *Bot) ConnectLoop() {
- b.ircSendBuffered = make(chan string, b.config.SendQ)
- b.ircSendDirectChan = make(chan misc.ErrorBack[string])
-
- for {
- err := b.Connect()
- slog.Error("irc session error", "error", err)
- }
-}
diff --git a/forged/internal/irc/conn.go b/forged/internal/irc/conn.go
deleted file mode 100644
index b975b72..0000000
--- a/forged/internal/irc/conn.go
+++ /dev/null
@@ -1,49 +0,0 @@
-package irc
-
-import (
- "bufio"
- "net"
- "slices"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-type Conn struct {
- netConn net.Conn
- bufReader *bufio.Reader
-}
-
-func NewConn(netConn net.Conn) Conn {
- return Conn{
- netConn: netConn,
- bufReader: bufio.NewReader(netConn),
- }
-}
-
-func (c *Conn) ReadMessage() (msg Message, line string, err error) {
- raw, err := c.bufReader.ReadSlice('\n')
- if err != nil {
- return
- }
-
- if raw[len(raw)-1] == '\n' {
- raw = raw[:len(raw)-1]
- }
- if raw[len(raw)-1] == '\r' {
- raw = raw[:len(raw)-1]
- }
-
- lineBytes := slices.Clone(raw)
- line = misc.BytesToString(lineBytes)
- msg, err = Parse(lineBytes)
-
- return
-}
-
-func (c *Conn) Write(p []byte) (n int, err error) {
- return c.netConn.Write(p)
-}
-
-func (c *Conn) WriteString(s string) (n int, err error) {
- return c.netConn.Write(misc.StringToBytes(s))
-}
diff --git a/forged/internal/irc/errors.go b/forged/internal/irc/errors.go
deleted file mode 100644
index 3506c70..0000000
--- a/forged/internal/irc/errors.go
+++ /dev/null
@@ -1,8 +0,0 @@
-package irc
-
-import "errors"
-
-var (
- ErrInvalidIRCv3Tag = errors.New("invalid ircv3 tag")
- ErrMalformedMsg = errors.New("malformed irc message")
-)
diff --git a/forged/internal/irc/message.go b/forged/internal/irc/message.go
deleted file mode 100644
index 84b6867..0000000
--- a/forged/internal/irc/message.go
+++ /dev/null
@@ -1,126 +0,0 @@
-// SPDX-License-Identifier: MIT
-// SPDX-FileCopyrightText: Copyright (c) 2018-2024 luk3yx <https://luk3yx.github.io>
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package irc
-
-import (
- "bytes"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-type Message struct {
- Command string
- Source Source
- Tags map[string]string
- Args []string
-}
-
-// All strings returned are borrowed from the input byte slice.
-func Parse(raw []byte) (msg Message, err error) {
- sp := bytes.Split(raw, []byte{' '}) // TODO: Use bytes.Cut instead here
-
- if bytes.HasPrefix(sp[0], []byte{'@'}) { // TODO: Check size manually
- if len(sp[0]) < 2 {
- err = ErrMalformedMsg
- return
- }
- sp[0] = sp[0][1:]
-
- msg.Tags, err = tagsToMap(sp[0])
- if err != nil {
- return
- }
-
- if len(sp) < 2 {
- err = ErrMalformedMsg
- return
- }
- sp = sp[1:]
- } else {
- msg.Tags = nil // TODO: Is a nil map the correct thing to use here?
- }
-
- if bytes.HasPrefix(sp[0], []byte{':'}) { // TODO: Check size manually
- if len(sp[0]) < 2 {
- err = ErrMalformedMsg
- return
- }
- sp[0] = sp[0][1:]
-
- msg.Source = parseSource(sp[0])
-
- if len(sp) < 2 {
- err = ErrMalformedMsg
- return
- }
- sp = sp[1:]
- }
-
- msg.Command = misc.BytesToString(sp[0])
- if len(sp) < 2 {
- return
- }
- sp = sp[1:]
-
- for i := 0; i < len(sp); i++ {
- if len(sp[i]) == 0 {
- continue
- }
- if sp[i][0] == ':' {
- if len(sp[i]) < 2 {
- sp[i] = []byte{}
- } else {
- sp[i] = sp[i][1:]
- }
- msg.Args = append(msg.Args, misc.BytesToString(bytes.Join(sp[i:], []byte{' '})))
- // TODO: Avoid Join by not using sp in the first place
- break
- }
- msg.Args = append(msg.Args, misc.BytesToString(sp[i]))
- }
-
- return
-}
-
-var ircv3TagEscapes = map[byte]byte{ //nolint:gochecknoglobals
- ':': ';',
- 's': ' ',
- 'r': '\r',
- 'n': '\n',
-}
-
-func tagsToMap(raw []byte) (tags map[string]string, err error) {
- tags = make(map[string]string)
- for rawTag := range bytes.SplitSeq(raw, []byte{';'}) {
- key, value, found := bytes.Cut(rawTag, []byte{'='})
- if !found {
- err = ErrInvalidIRCv3Tag
- return
- }
- if len(value) == 0 {
- tags[misc.BytesToString(key)] = ""
- } else {
- if !bytes.Contains(value, []byte{'\\'}) {
- tags[misc.BytesToString(key)] = misc.BytesToString(value)
- } else {
- valueUnescaped := bytes.NewBuffer(make([]byte, 0, len(value)))
- for i := 0; i < len(value); i++ {
- if value[i] == '\\' {
- i++
- byteUnescaped, ok := ircv3TagEscapes[value[i]]
- if !ok {
- byteUnescaped = value[i]
- }
- valueUnescaped.WriteByte(byteUnescaped)
- } else {
- valueUnescaped.WriteByte(value[i])
- }
- }
- tags[misc.BytesToString(key)] = misc.BytesToString(valueUnescaped.Bytes())
- }
- }
- }
- return
-}
diff --git a/forged/internal/irc/source.go b/forged/internal/irc/source.go
deleted file mode 100644
index d955f45..0000000
--- a/forged/internal/irc/source.go
+++ /dev/null
@@ -1,50 +0,0 @@
-// SPDX-License-Identifier: MIT
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package irc
-
-import (
- "bytes"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-type Source interface {
- AsSourceString() string
-}
-
-func parseSource(s []byte) Source {
- nick, userhost, found := bytes.Cut(s, []byte{'!'})
- if !found {
- return Server{name: misc.BytesToString(s)}
- }
-
- user, host, found := bytes.Cut(userhost, []byte{'@'})
- if !found {
- return Server{name: misc.BytesToString(s)}
- }
-
- return Client{
- Nick: misc.BytesToString(nick),
- User: misc.BytesToString(user),
- Host: misc.BytesToString(host),
- }
-}
-
-type Server struct {
- name string
-}
-
-func (s Server) AsSourceString() string {
- return s.name
-}
-
-type Client struct {
- Nick string
- User string
- Host string
-}
-
-func (c Client) AsSourceString() string {
- return c.Nick + "!" + c.User + "@" + c.Host
-}
diff --git a/forged/internal/misc/deploy.go b/forged/internal/misc/deploy.go
deleted file mode 100644
index 3ee5f92..0000000
--- a/forged/internal/misc/deploy.go
+++ /dev/null
@@ -1,22 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package misc
-
-import (
- "io"
- "io/fs"
- "os"
-)
-
-// DeployBinary copies the contents of a binary file to the target destination path.
-// The destination file is created with executable permissions.
-func DeployBinary(src fs.File, dst string) (err error) {
- var dstFile *os.File
- if dstFile, err = os.OpenFile(dst, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755); err != nil {
- return err
- }
- defer dstFile.Close()
- _, err = io.Copy(dstFile, src)
- return err
-}
diff --git a/forged/internal/misc/panic.go b/forged/internal/misc/panic.go
deleted file mode 100644
index 34c49c5..0000000
--- a/forged/internal/misc/panic.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package misc
-
-// FirstOrPanic returns the value or panics if the error is non-nil.
-func FirstOrPanic[T any](v T, err error) T {
- if err != nil {
- panic(err)
- }
- return v
-}
-
-// NoneOrPanic panics if the provided error is non-nil.
-func NoneOrPanic(err error) {
- if err != nil {
- panic(err)
- }
-}
diff --git a/forged/internal/oldgit/fmtpatch.go b/forged/internal/oldgit/fmtpatch.go
deleted file mode 100644
index 79be5d8..0000000
--- a/forged/internal/oldgit/fmtpatch.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package oldgit
-
-import (
- "bytes"
- "fmt"
- "strings"
- "time"
-
- "github.com/go-git/go-git/v5/plumbing/object"
-)
-
-// FmtCommitPatch formats a commit object as if it was returned by
-// git-format-patch.
-func FmtCommitPatch(commit *object.Commit) (final string, err error) {
- var patch *object.Patch
- var buf bytes.Buffer
- var author object.Signature
- var date string
- var commitTitle, commitDetails string
-
- if _, patch, err = CommitToPatch(commit); err != nil {
- return "", err
- }
-
- author = commit.Author
- date = author.When.Format(time.RFC1123Z)
-
- commitTitle, commitDetails, _ = strings.Cut(commit.Message, "\n")
-
- // This date is hardcoded in Git.
- fmt.Fprintf(&buf, "From %s Mon Sep 17 00:00:00 2001\n", commit.Hash)
- fmt.Fprintf(&buf, "From: %s <%s>\n", author.Name, author.Email)
- fmt.Fprintf(&buf, "Date: %s\n", date)
- fmt.Fprintf(&buf, "Subject: [PATCH] %s\n\n", commitTitle)
-
- if commitDetails != "" {
- commitDetails1, commitDetails2, _ := strings.Cut(commitDetails, "\n")
- if strings.TrimSpace(commitDetails1) == "" {
- commitDetails = commitDetails2
- }
- buf.WriteString(commitDetails)
- buf.WriteString("\n")
- }
- buf.WriteString("---\n")
- fmt.Fprint(&buf, patch.Stats().String())
- fmt.Fprintln(&buf)
-
- buf.WriteString(patch.String())
-
- fmt.Fprintf(&buf, "\n-- \n2.48.1\n")
-
- return buf.String(), nil
-}
diff --git a/forged/internal/oldgit/oldgit.go b/forged/internal/oldgit/oldgit.go
deleted file mode 100644
index 4c99d6a..0000000
--- a/forged/internal/oldgit/oldgit.go
+++ /dev/null
@@ -1,5 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-// Package oldgit provides deprecated functions that depend on go-git.
-package oldgit
diff --git a/forged/internal/oldgit/patch.go b/forged/internal/oldgit/patch.go
deleted file mode 100644
index fc8ef98..0000000
--- a/forged/internal/oldgit/patch.go
+++ /dev/null
@@ -1,43 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package oldgit
-
-import (
- "errors"
-
- "github.com/go-git/go-git/v5/plumbing"
- "github.com/go-git/go-git/v5/plumbing/object"
-)
-
-// CommitToPatch creates an [object.Patch] from the first parent of a given
-// [object.Commit].
-//
-// TODO: This function should be deprecated as it only diffs with the first
-// parent and does not correctly handle merge commits.
-func CommitToPatch(commit *object.Commit) (parentCommitHash plumbing.Hash, patch *object.Patch, err error) {
- var parentCommit *object.Commit
- var commitTree *object.Tree
-
- parentCommit, err = commit.Parent(0)
- switch {
- case errors.Is(err, object.ErrParentNotFound):
- if commitTree, err = commit.Tree(); err != nil {
- return
- }
- if patch, err = NullTree.Patch(commitTree); err != nil {
- return
- }
- case err != nil:
- return
- default:
- parentCommitHash = parentCommit.Hash
- if patch, err = parentCommit.Patch(commit); err != nil {
- return
- }
- }
- return
-}
-
-// NullTree is a tree object that is empty and has no hash.
-var NullTree object.Tree //nolint:gochecknoglobals
diff --git a/forged/internal/render/chroma.go b/forged/internal/render/chroma.go
deleted file mode 100644
index 64bfde0..0000000
--- a/forged/internal/render/chroma.go
+++ /dev/null
@@ -1,41 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package render
-
-import (
- "bytes"
- "html/template"
-
- chromaHTML "github.com/alecthomas/chroma/v2/formatters/html"
- chromaLexers "github.com/alecthomas/chroma/v2/lexers"
- chromaStyles "github.com/alecthomas/chroma/v2/styles"
-)
-
-// Highlight returns HTML with syntax highlighting for the given file content,
-// using Chroma. The lexer is selected based on the filename.
-// If tokenization or formatting fails, a fallback <pre> block is returned with the error.
-func Highlight(filename, content string) template.HTML {
- lexer := chromaLexers.Match(filename)
- if lexer == nil {
- lexer = chromaLexers.Fallback
- }
-
- iterator, err := lexer.Tokenise(nil, content)
- if err != nil {
- return template.HTML("<pre>Error tokenizing file: " + err.Error() + "</pre>") //#nosec G203`
- }
-
- var buf bytes.Buffer
- style := chromaStyles.Get("autumn")
- formatter := chromaHTML.New(
- chromaHTML.WithClasses(true),
- chromaHTML.TabWidth(8),
- )
-
- if err := formatter.Format(&buf, style, iterator); err != nil {
- return template.HTML("<pre>Error formatting file: " + err.Error() + "</pre>") //#nosec G203
- }
-
- return template.HTML(buf.Bytes()) //#nosec G203
-}
diff --git a/forged/internal/render/escape.go b/forged/internal/render/escape.go
deleted file mode 100644
index 031e333..0000000
--- a/forged/internal/render/escape.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package render
-
-import (
- "html"
- "html/template"
-)
-
-// EscapeHTML just escapes a string and wraps it in [template.HTML].
-func EscapeHTML(s string) template.HTML {
- return template.HTML(html.EscapeString(s)) //#nosec G203
-}
diff --git a/forged/internal/render/readme.go b/forged/internal/render/readme.go
deleted file mode 100644
index fa1be7e..0000000
--- a/forged/internal/render/readme.go
+++ /dev/null
@@ -1,34 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package render
-
-import (
- "bytes"
- "html"
- "html/template"
- "strings"
-
- "github.com/microcosm-cc/bluemonday"
- "github.com/yuin/goldmark"
- "github.com/yuin/goldmark/extension"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-var markdownConverter = goldmark.New(goldmark.WithExtensions(extension.GFM)) //nolint:gochecknoglobals
-
-// renderReadme renders and sanitizes README content from a byte slice and filename.
-func Readme(data []byte, filename string) (string, template.HTML) {
- switch strings.ToLower(filename) {
- case "readme":
- return "README", template.HTML("<pre>" + html.EscapeString(misc.BytesToString(data)) + "</pre>") //#nosec G203
- case "readme.md":
- var buf bytes.Buffer
- if err := markdownConverter.Convert(data, &buf); err != nil {
- return "Error fetching README", EscapeHTML("Unable to render README: " + err.Error())
- }
- return "README.md", template.HTML(bluemonday.UGCPolicy().SanitizeBytes(buf.Bytes())) //#nosec G203
- default:
- return filename, template.HTML("<pre>" + html.EscapeString(misc.BytesToString(data)) + "</pre>") //#nosec G203
- }
-}
diff --git a/forged/internal/server/server.go b/forged/internal/server/server.go
new file mode 100644
index 0000000..39a6823
--- /dev/null
+++ b/forged/internal/server/server.go
@@ -0,0 +1,87 @@
+package server
+
+import (
+ "context"
+ "fmt"
+
+ "go.lindenii.runxiyu.org/forge/forged/internal/config"
+ "go.lindenii.runxiyu.org/forge/forged/internal/database"
+ "go.lindenii.runxiyu.org/forge/forged/internal/database/queries"
+ "go.lindenii.runxiyu.org/forge/forged/internal/global"
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/hooks"
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/lmtp"
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/ssh"
+ "go.lindenii.runxiyu.org/forge/forged/internal/incoming/web"
+ "golang.org/x/sync/errgroup"
+)
+
+type Server struct {
+ config config.Config
+
+ database database.Database
+ hookServer *hooks.Server
+ lmtpServer *lmtp.Server
+ webServer *web.Server
+ sshServer *ssh.Server
+
+ global global.Global
+}
+
+func New(configPath string) (server *Server, err error) {
+ server = &Server{} //exhaustruct:ignore
+
+ server.config, err = config.Open(configPath)
+ if err != nil {
+ return server, fmt.Errorf("open config: %w", err)
+ }
+
+ queries := queries.New(&server.database)
+
+ server.global.ForgeVersion = "unknown" // TODO
+ server.global.ForgeTitle = server.config.General.Title
+ server.global.Config = &server.config
+ server.global.Queries = queries
+
+ server.hookServer = hooks.New(&server.global)
+ server.lmtpServer = lmtp.New(&server.global)
+ server.webServer = web.New(&server.global)
+ server.sshServer, err = ssh.New(&server.global)
+ if err != nil {
+ return server, fmt.Errorf("create SSH server: %w", err)
+ }
+
+ return server, nil
+}
+
+func (server *Server) Run(ctx context.Context) (err error) {
+ // TODO: Not running git2d because it should be run separately.
+ // This needs to be documented somewhere, hence a TODO here for now.
+
+ g, gctx := errgroup.WithContext(ctx)
+
+ server.database, err = database.Open(gctx, server.config.DB.Conn)
+ if err != nil {
+ return fmt.Errorf("open database: %w", err)
+ }
+ defer server.database.Close()
+
+ // TODO: neater way to do this for transactions in querypool?
+ server.global.DB = &server.database
+
+ g.Go(func() error { return server.hookServer.Run(gctx) })
+ g.Go(func() error { return server.lmtpServer.Run(gctx) })
+ g.Go(func() error { return server.webServer.Run(gctx) })
+ g.Go(func() error { return server.sshServer.Run(gctx) })
+
+ err = g.Wait()
+ if err != nil {
+ return fmt.Errorf("server error: %w", err)
+ }
+
+ err = ctx.Err()
+ if err != nil {
+ return fmt.Errorf("context exceeded: %w", err)
+ }
+
+ return nil
+}
diff --git a/forged/internal/unsorted/acl.go b/forged/internal/unsorted/acl.go
deleted file mode 100644
index c2e887d..0000000
--- a/forged/internal/unsorted/acl.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "context"
-
- "github.com/jackc/pgx/v5/pgtype"
-)
-
-// getRepoInfo returns the filesystem path and direct access permission for a
-// given repo and a provided ssh public key.
-//
-// TODO: Revamp.
-func (s *Server) getRepoInfo(ctx context.Context, groupPath []string, repoName, sshPubkey string) (repoID int, fsPath string, access bool, contribReq, userType string, userID int, err error) {
- err = s.database.QueryRow(ctx, `
-WITH RECURSIVE group_path_cte AS (
- -- Start: match the first name in the path where parent_group IS NULL
- SELECT
- id,
- parent_group,
- name,
- 1 AS depth
- FROM groups
- WHERE name = ($1::text[])[1]
- AND parent_group IS NULL
-
- UNION ALL
-
- -- Recurse: join next segment of the path
- SELECT
- g.id,
- g.parent_group,
- g.name,
- group_path_cte.depth + 1
- FROM groups g
- JOIN group_path_cte ON g.parent_group = group_path_cte.id
- WHERE g.name = ($1::text[])[group_path_cte.depth + 1]
- AND group_path_cte.depth + 1 <= cardinality($1::text[])
-)
-SELECT
- r.id,
- r.filesystem_path,
- CASE WHEN ugr.user_id IS NOT NULL THEN TRUE ELSE FALSE END AS has_role_in_group,
- r.contrib_requirements,
- COALESCE(u.type, ''),
- COALESCE(u.id, 0)
-FROM group_path_cte g
-JOIN repos r ON r.group_id = g.id
-LEFT JOIN ssh_public_keys s ON s.key_string = $3
-LEFT JOIN users u ON u.id = s.user_id
-LEFT JOIN user_group_roles ugr ON ugr.group_id = g.id AND ugr.user_id = u.id
-WHERE g.depth = cardinality($1::text[])
- AND r.name = $2
-`, pgtype.FlatArray[string](groupPath), repoName, sshPubkey,
- ).Scan(&repoID, &fsPath, &access, &contribReq, &userType, &userID)
- return
-}
diff --git a/forged/internal/unsorted/config.go b/forged/internal/unsorted/config.go
deleted file mode 100644
index 9f07480..0000000
--- a/forged/internal/unsorted/config.go
+++ /dev/null
@@ -1,94 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "bufio"
- "errors"
- "log/slog"
- "os"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/database"
- "go.lindenii.runxiyu.org/forge/forged/internal/irc"
- "go.lindenii.runxiyu.org/forge/forged/internal/scfg"
-)
-
-type Config struct {
- HTTP struct {
- Net string `scfg:"net"`
- Addr string `scfg:"addr"`
- CookieExpiry int `scfg:"cookie_expiry"`
- Root string `scfg:"root"`
- ReadTimeout uint32 `scfg:"read_timeout"`
- WriteTimeout uint32 `scfg:"write_timeout"`
- IdleTimeout uint32 `scfg:"idle_timeout"`
- ReverseProxy bool `scfg:"reverse_proxy"`
- } `scfg:"http"`
- Hooks struct {
- Socket string `scfg:"socket"`
- Execs string `scfg:"execs"`
- } `scfg:"hooks"`
- LMTP struct {
- Socket string `scfg:"socket"`
- Domain string `scfg:"domain"`
- MaxSize int64 `scfg:"max_size"`
- WriteTimeout uint32 `scfg:"write_timeout"`
- ReadTimeout uint32 `scfg:"read_timeout"`
- } `scfg:"lmtp"`
- Git struct {
- RepoDir string `scfg:"repo_dir"`
- Socket string `scfg:"socket"`
- DaemonPath string `scfg:"daemon_path"`
- } `scfg:"git"`
- SSH struct {
- Net string `scfg:"net"`
- Addr string `scfg:"addr"`
- Key string `scfg:"key"`
- Root string `scfg:"root"`
- } `scfg:"ssh"`
- IRC irc.Config `scfg:"irc"`
- General struct {
- Title string `scfg:"title"`
- } `scfg:"general"`
- DB struct {
- Type string `scfg:"type"`
- Conn string `scfg:"conn"`
- } `scfg:"db"`
- Pprof struct {
- Net string `scfg:"net"`
- Addr string `scfg:"addr"`
- } `scfg:"pprof"`
-}
-
-// LoadConfig loads a configuration file from the specified path and unmarshals
-// it to the global [config] struct. This may race with concurrent reads from
-// [config]; additional synchronization is necessary if the configuration is to
-// be made reloadable.
-func (s *Server) loadConfig(path string) (err error) {
- var configFile *os.File
- if configFile, err = os.Open(path); err != nil {
- return err
- }
- defer configFile.Close()
-
- decoder := scfg.NewDecoder(bufio.NewReader(configFile))
- if err = decoder.Decode(&s.config); err != nil {
- return err
- }
- for _, u := range decoder.UnknownDirectives() {
- slog.Warn("unknown configuration directive", "directive", u)
- }
-
- if s.config.DB.Type != "postgres" {
- return errors.New("unsupported database type")
- }
-
- if s.database, err = database.Open(s.config.DB.Conn); err != nil {
- return err
- }
-
- s.globalData["forge_title"] = s.config.General.Title
-
- return nil
-}
diff --git a/forged/internal/unsorted/database.go b/forged/internal/unsorted/database.go
deleted file mode 100644
index 222b0c4..0000000
--- a/forged/internal/unsorted/database.go
+++ /dev/null
@@ -1,43 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "context"
-
- "github.com/jackc/pgx/v5"
-)
-
-// TODO: All database handling logic in all request handlers must be revamped.
-// We must ensure that each request has all logic in one transaction (subject
-// to exceptions if appropriate) so they get a consistent view of the database
-// at a single point. A failure to do so may cause things as serious as
-// privilege escalation.
-
-// queryNameDesc is a helper function that executes a query and returns a
-// list of nameDesc results. The query must return two string arguments, i.e. a
-// name and a description.
-func (s *Server) queryNameDesc(ctx context.Context, query string, args ...any) (result []nameDesc, err error) {
- var rows pgx.Rows
-
- if rows, err = s.database.Query(ctx, query, args...); err != nil {
- return nil, err
- }
- defer rows.Close()
-
- for rows.Next() {
- var name, description string
- if err = rows.Scan(&name, &description); err != nil {
- return nil, err
- }
- result = append(result, nameDesc{name, description})
- }
- return result, rows.Err()
-}
-
-// nameDesc holds a name and a description.
-type nameDesc struct {
- Name string
- Description string
-}
diff --git a/forged/internal/unsorted/fedauth.go b/forged/internal/unsorted/fedauth.go
deleted file mode 100644
index f54649b..0000000
--- a/forged/internal/unsorted/fedauth.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "bufio"
- "context"
- "errors"
- "io"
- "net/http"
- "net/url"
- "strings"
-
- "github.com/jackc/pgx/v5"
-)
-
-// fedauth checks whether a user's SSH public key matches the remote username
-// they claim to have on the service. If so, the association is recorded.
-func (s *Server) fedauth(ctx context.Context, userID int, service, remoteUsername, pubkey string) (bool, error) {
- var err error
-
- matched := false
- usernameEscaped := url.PathEscape(remoteUsername)
-
- var req *http.Request
- switch service {
- // TODO: Services should be configurable by the instance administrator
- // and should not be hardcoded in the source code.
- case "sr.ht":
- req, err = http.NewRequestWithContext(ctx, http.MethodGet, "https://meta.sr.ht/~"+usernameEscaped+".keys", nil)
- case "github":
- req, err = http.NewRequestWithContext(ctx, http.MethodGet, "https://github.com/"+usernameEscaped+".keys", nil)
- case "codeberg":
- req, err = http.NewRequestWithContext(ctx, http.MethodGet, "https://codeberg.org/"+usernameEscaped+".keys", nil)
- case "tangled":
- req, err = http.NewRequestWithContext(ctx, http.MethodGet, "https://tangled.sh/keys/"+usernameEscaped, nil)
- // TODO: Don't rely on one webview
- default:
- return false, errors.New("unknown federated service")
- }
- if err != nil {
- return false, err
- }
-
- resp, err := http.DefaultClient.Do(req)
- if err != nil {
- return false, err
- }
- defer func() {
- _ = resp.Body.Close()
- }()
- buf := bufio.NewReader(resp.Body)
-
- for {
- line, err := buf.ReadString('\n')
- if errors.Is(err, io.EOF) {
- break
- } else if err != nil {
- return false, err
- }
-
- lineSplit := strings.Split(line, " ")
- if len(lineSplit) < 2 {
- continue
- }
- line = strings.Join(lineSplit[:2], " ")
-
- if line == pubkey {
- matched = true
- break
- }
- }
-
- if !matched {
- return false, nil
- }
-
- var txn pgx.Tx
- if txn, err = s.database.Begin(ctx); err != nil {
- return false, err
- }
- defer func() {
- _ = txn.Rollback(ctx)
- }()
- if _, err = txn.Exec(ctx, `UPDATE users SET type = 'federated' WHERE id = $1 AND type = 'pubkey_only'`, userID); err != nil {
- return false, err
- }
- if _, err = txn.Exec(ctx, `INSERT INTO federated_identities (user_id, service, remote_username) VALUES ($1, $2, $3)`, userID, service, remoteUsername); err != nil {
- return false, err
- }
- if err = txn.Commit(ctx); err != nil {
- return false, err
- }
-
- return true, nil
-}
diff --git a/forged/internal/unsorted/git_hooks_handle_linux.go b/forged/internal/unsorted/git_hooks_handle_linux.go
deleted file mode 100644
index f904550..0000000
--- a/forged/internal/unsorted/git_hooks_handle_linux.go
+++ /dev/null
@@ -1,377 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-//
-//go:build linux
-
-package unsorted
-
-import (
- "bytes"
- "context"
- "encoding/binary"
- "errors"
- "fmt"
- "io"
- "net"
- "os"
- "path/filepath"
- "strconv"
- "strings"
- "syscall"
-
- "github.com/go-git/go-git/v5/plumbing"
- "github.com/go-git/go-git/v5/plumbing/object"
- "github.com/jackc/pgx/v5"
- "go.lindenii.runxiyu.org/forge/forged/internal/ansiec"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-var (
- errGetFD = errors.New("unable to get file descriptor")
- errGetUcred = errors.New("failed getsockopt")
-)
-
-// hooksHandler handles a connection from hookc via the
-// unix socket.
-func (s *Server) hooksHandler(conn net.Conn) {
- var ctx context.Context
- var cancel context.CancelFunc
- var ucred *syscall.Ucred
- var err error
- var cookie []byte
- var packPass packPass
- var sshStderr io.Writer
- var hookRet byte
-
- defer conn.Close()
- ctx, cancel = context.WithCancel(context.Background())
- defer cancel()
-
- // There aren't reasonable cases where someone would run this as
- // another user.
- if ucred, err = getUcred(conn); err != nil {
- if _, err = conn.Write([]byte{1}); err != nil {
- return
- }
- writeRedError(conn, "\nUnable to get peer credentials: %v", err)
- return
- }
- uint32uid := uint32(os.Getuid()) //#nosec G115
- if ucred.Uid != uint32uid {
- if _, err = conn.Write([]byte{1}); err != nil {
- return
- }
- writeRedError(conn, "\nUID mismatch")
- return
- }
-
- cookie = make([]byte, 64)
- if _, err = conn.Read(cookie); err != nil {
- if _, err = conn.Write([]byte{1}); err != nil {
- return
- }
- writeRedError(conn, "\nFailed to read cookie: %v", err)
- return
- }
-
- {
- var ok bool
- packPass, ok = s.packPasses.Load(misc.BytesToString(cookie))
- if !ok {
- if _, err = conn.Write([]byte{1}); err != nil {
- return
- }
- writeRedError(conn, "\nInvalid handler cookie")
- return
- }
- }
-
- sshStderr = packPass.session.Stderr()
-
- _, _ = sshStderr.Write([]byte{'\n'})
-
- hookRet = func() byte {
- var argc64 uint64
- if err = binary.Read(conn, binary.NativeEndian, &argc64); err != nil {
- writeRedError(sshStderr, "Failed to read argc: %v", err)
- return 1
- }
- var args []string
- for range argc64 {
- var arg bytes.Buffer
- for {
- nextByte := make([]byte, 1)
- n, err := conn.Read(nextByte)
- if err != nil || n != 1 {
- writeRedError(sshStderr, "Failed to read arg: %v", err)
- return 1
- }
- if nextByte[0] == 0 {
- break
- }
- arg.WriteByte(nextByte[0])
- }
- args = append(args, arg.String())
- }
-
- gitEnv := make(map[string]string)
- for {
- var envLine bytes.Buffer
- for {
- nextByte := make([]byte, 1)
- n, err := conn.Read(nextByte)
- if err != nil || n != 1 {
- writeRedError(sshStderr, "Failed to read environment variable: %v", err)
- return 1
- }
- if nextByte[0] == 0 {
- break
- }
- envLine.WriteByte(nextByte[0])
- }
- if envLine.Len() == 0 {
- break
- }
- kv := envLine.String()
- parts := strings.SplitN(kv, "=", 2)
- if len(parts) < 2 {
- writeRedError(sshStderr, "Invalid environment variable line: %v", kv)
- return 1
- }
- gitEnv[parts[0]] = parts[1]
- }
-
- var stdin bytes.Buffer
- if _, err = io.Copy(&stdin, conn); err != nil {
- writeRedError(conn, "Failed to read to the stdin buffer: %v", err)
- }
-
- switch filepath.Base(args[0]) {
- case "pre-receive":
- if packPass.directAccess {
- return 0
- }
- allOK := true
- for {
- var line, oldOID, rest, newIOID, refName string
- var found bool
- var oldHash, newHash plumbing.Hash
- var oldCommit, newCommit *object.Commit
- var pushOptCount int
-
- pushOptCount, err = strconv.Atoi(gitEnv["GIT_PUSH_OPTION_COUNT"])
- if err != nil {
- writeRedError(sshStderr, "Failed to parse GIT_PUSH_OPTION_COUNT: %v", err)
- return 1
- }
-
- // TODO: Allow existing users (even if they are already federated or registered) to add a federated user ID... though perhaps this should be in the normal SSH interface instead of the git push interface?
- // Also it'd be nice to be able to combine users or whatever
- if packPass.contribReq == "federated" && packPass.userType != "federated" && packPass.userType != "registered" {
- if pushOptCount == 0 {
- writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
- return 1
- }
- for pushOptIndex := range pushOptCount {
- pushOpt, ok := gitEnv[fmt.Sprintf("GIT_PUSH_OPTION_%d", pushOptIndex)]
- if !ok {
- writeRedError(sshStderr, "Failed to get push option %d", pushOptIndex)
- return 1
- }
- if strings.HasPrefix(pushOpt, "fedid=") {
- fedUserID := strings.TrimPrefix(pushOpt, "fedid=")
- service, username, found := strings.Cut(fedUserID, ":")
- if !found {
- writeRedError(sshStderr, "Invalid federated user identifier %#v does not contain a colon", fedUserID)
- return 1
- }
-
- ok, err := s.fedauth(ctx, packPass.userID, service, username, packPass.pubkey)
- if err != nil {
- writeRedError(sshStderr, "Failed to verify federated user identifier %#v: %v", fedUserID, err)
- return 1
- }
- if !ok {
- writeRedError(sshStderr, "Failed to verify federated user identifier %#v: you don't seem to be on the list", fedUserID)
- return 1
- }
-
- break
- }
- if pushOptIndex == pushOptCount-1 {
- writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
- return 1
- }
- }
- }
-
- line, err = stdin.ReadString('\n')
- if errors.Is(err, io.EOF) {
- break
- } else if err != nil {
- writeRedError(sshStderr, "Failed to read pre-receive line: %v", err)
- return 1
- }
- line = line[:len(line)-1]
-
- oldOID, rest, found = strings.Cut(line, " ")
- if !found {
- writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
- return 1
- }
-
- newIOID, refName, found = strings.Cut(rest, " ")
- if !found {
- writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
- return 1
- }
-
- if strings.HasPrefix(refName, "refs/heads/contrib/") {
- if allZero(oldOID) { // New branch
- fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
- var newMRLocalID int
-
- if packPass.userID != 0 {
- err = s.database.QueryRow(ctx,
- "INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING repo_local_id",
- packPass.repoID, packPass.userID, strings.TrimPrefix(refName, "refs/heads/"),
- ).Scan(&newMRLocalID)
- } else {
- err = s.database.QueryRow(ctx,
- "INSERT INTO merge_requests (repo_id, source_ref, status) VALUES ($1, $2, 'open') RETURNING repo_local_id",
- packPass.repoID, strings.TrimPrefix(refName, "refs/heads/"),
- ).Scan(&newMRLocalID)
- }
- if err != nil {
- writeRedError(sshStderr, "Error creating merge request: %v", err)
- return 1
- }
- mergeRequestWebURL := fmt.Sprintf("%s/contrib/%d/", s.genHTTPRemoteURL(packPass.groupPath, packPass.repoName), newMRLocalID)
- fmt.Fprintln(sshStderr, ansiec.Blue+"Created merge request at", mergeRequestWebURL+ansiec.Reset)
-
- s.ircBot.Send("PRIVMSG #chat :New merge request at " + mergeRequestWebURL)
- } else { // Existing contrib branch
- var existingMRUser int
- var isAncestor bool
-
- err = s.database.QueryRow(ctx,
- "SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2",
- strings.TrimPrefix(refName, "refs/heads/"), packPass.repoID,
- ).Scan(&existingMRUser)
- if err != nil {
- if errors.Is(err, pgx.ErrNoRows) {
- writeRedError(sshStderr, "No existing merge request for existing contrib branch: %v", err)
- } else {
- writeRedError(sshStderr, "Error querying for existing merge request: %v", err)
- }
- return 1
- }
- if existingMRUser == 0 {
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs to unowned MR)")
- continue
- }
-
- if existingMRUser != packPass.userID {
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs another user's MR)")
- continue
- }
-
- oldHash = plumbing.NewHash(oldOID)
-
- if oldCommit, err = packPass.repo.CommitObject(oldHash); err != nil {
- writeRedError(sshStderr, "Daemon failed to get old commit: %v", err)
- return 1
- }
-
- // Potential BUG: I'm not sure if new_commit is guaranteed to be
- // detectable as they haven't been merged into the main repo's
- // objects yet. But it seems to work, and I don't think there's
- // any reason for this to only work intermitently.
- newHash = plumbing.NewHash(newIOID)
- if newCommit, err = packPass.repo.CommitObject(newHash); err != nil {
- writeRedError(sshStderr, "Daemon failed to get new commit: %v", err)
- return 1
- }
-
- if isAncestor, err = oldCommit.IsAncestor(newCommit); err != nil {
- writeRedError(sshStderr, "Daemon failed to check if old commit is ancestor: %v", err)
- return 1
- }
-
- if !isAncestor {
- // TODO: Create MR snapshot ref instead
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(force pushes are not supported yet)")
- continue
- }
-
- fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
- }
- } else { // Non-contrib branch
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(you cannot push to branches outside of contrib/*)")
- }
- }
-
- fmt.Fprintln(sshStderr)
- if allOK {
- fmt.Fprintln(sshStderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)")
- return 0
- }
- fmt.Fprintln(sshStderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)")
- return 1
- default:
- fmt.Fprintln(sshStderr, ansiec.Red+"Invalid hook:", args[0]+ansiec.Reset)
- return 1
- }
- }()
-
- fmt.Fprintln(sshStderr)
-
- _, _ = conn.Write([]byte{hookRet})
-}
-
-// serveGitHooks handles connections on the specified network listener and
-// treats incoming connections as those from git hook handlers by spawning
-// sessions. The listener must be a SOCK_STREAM UNIX domain socket. The
-// function itself blocks.
-func (s *Server) serveGitHooks(listener net.Listener) error {
- for {
- conn, err := listener.Accept()
- if err != nil {
- return err
- }
- go s.hooksHandler(conn)
- }
-}
-
-// getUcred fetches connection credentials as a [syscall.Ucred] from a given
-// [net.Conn]. It panics when conn is not a [net.UnixConn].
-func getUcred(conn net.Conn) (ucred *syscall.Ucred, err error) {
- unixConn := conn.(*net.UnixConn)
- var unixConnFD *os.File
-
- if unixConnFD, err = unixConn.File(); err != nil {
- return nil, errGetFD
- }
- defer unixConnFD.Close()
-
- if ucred, err = syscall.GetsockoptUcred(int(unixConnFD.Fd()), syscall.SOL_SOCKET, syscall.SO_PEERCRED); err != nil {
- return nil, errGetUcred
- }
- return ucred, nil
-}
-
-// allZero returns true if all runes in a given string are '0'. The comparison
-// is not constant time and must not be used in contexts where time-based side
-// channel attacks are a concern.
-func allZero(s string) bool {
- for _, r := range s {
- if r != '0' {
- return false
- }
- }
- return true
-}
diff --git a/forged/internal/unsorted/git_hooks_handle_other.go b/forged/internal/unsorted/git_hooks_handle_other.go
deleted file mode 100644
index 70b2072..0000000
--- a/forged/internal/unsorted/git_hooks_handle_other.go
+++ /dev/null
@@ -1,336 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-//
-//go:build !linux
-
-package unsorted
-
-import (
- "bytes"
- "context"
- "encoding/binary"
- "errors"
- "fmt"
- "io"
- "net"
- "path/filepath"
- "strconv"
- "strings"
-
- "github.com/go-git/go-git/v5/plumbing"
- "github.com/go-git/go-git/v5/plumbing/object"
- "github.com/jackc/pgx/v5"
- "go.lindenii.runxiyu.org/forge/forged/internal/ansiec"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-// hooksHandler handles a connection from hookc via the
-// unix socket.
-func (s *Server) hooksHandler(conn net.Conn) {
- var ctx context.Context
- var cancel context.CancelFunc
- var err error
- var cookie []byte
- var packPass packPass
- var sshStderr io.Writer
- var hookRet byte
-
- defer conn.Close()
- ctx, cancel = context.WithCancel(context.Background())
- defer cancel()
-
- // TODO: ucred-like checks
-
- cookie = make([]byte, 64)
- if _, err = conn.Read(cookie); err != nil {
- if _, err = conn.Write([]byte{1}); err != nil {
- return
- }
- writeRedError(conn, "\nFailed to read cookie: %v", err)
- return
- }
-
- {
- var ok bool
- packPass, ok = s.packPasses.Load(misc.BytesToString(cookie))
- if !ok {
- if _, err = conn.Write([]byte{1}); err != nil {
- return
- }
- writeRedError(conn, "\nInvalid handler cookie")
- return
- }
- }
-
- sshStderr = packPass.session.Stderr()
-
- _, _ = sshStderr.Write([]byte{'\n'})
-
- hookRet = func() byte {
- var argc64 uint64
- if err = binary.Read(conn, binary.NativeEndian, &argc64); err != nil {
- writeRedError(sshStderr, "Failed to read argc: %v", err)
- return 1
- }
- var args []string
- for range argc64 {
- var arg bytes.Buffer
- for {
- nextByte := make([]byte, 1)
- n, err := conn.Read(nextByte)
- if err != nil || n != 1 {
- writeRedError(sshStderr, "Failed to read arg: %v", err)
- return 1
- }
- if nextByte[0] == 0 {
- break
- }
- arg.WriteByte(nextByte[0])
- }
- args = append(args, arg.String())
- }
-
- gitEnv := make(map[string]string)
- for {
- var envLine bytes.Buffer
- for {
- nextByte := make([]byte, 1)
- n, err := conn.Read(nextByte)
- if err != nil || n != 1 {
- writeRedError(sshStderr, "Failed to read environment variable: %v", err)
- return 1
- }
- if nextByte[0] == 0 {
- break
- }
- envLine.WriteByte(nextByte[0])
- }
- if envLine.Len() == 0 {
- break
- }
- kv := envLine.String()
- parts := strings.SplitN(kv, "=", 2)
- if len(parts) < 2 {
- writeRedError(sshStderr, "Invalid environment variable line: %v", kv)
- return 1
- }
- gitEnv[parts[0]] = parts[1]
- }
-
- var stdin bytes.Buffer
- if _, err = io.Copy(&stdin, conn); err != nil {
- writeRedError(conn, "Failed to read to the stdin buffer: %v", err)
- }
-
- switch filepath.Base(args[0]) {
- case "pre-receive":
- if packPass.directAccess {
- return 0
- }
- allOK := true
- for {
- var line, oldOID, rest, newIOID, refName string
- var found bool
- var oldHash, newHash plumbing.Hash
- var oldCommit, newCommit *object.Commit
- var pushOptCount int
-
- pushOptCount, err = strconv.Atoi(gitEnv["GIT_PUSH_OPTION_COUNT"])
- if err != nil {
- writeRedError(sshStderr, "Failed to parse GIT_PUSH_OPTION_COUNT: %v", err)
- return 1
- }
-
- // TODO: Allow existing users (even if they are already federated or registered) to add a federated user ID... though perhaps this should be in the normal SSH interface instead of the git push interface?
- // Also it'd be nice to be able to combine users or whatever
- if packPass.contribReq == "federated" && packPass.userType != "federated" && packPass.userType != "registered" {
- if pushOptCount == 0 {
- writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
- return 1
- }
- for pushOptIndex := range pushOptCount {
- pushOpt, ok := gitEnv[fmt.Sprintf("GIT_PUSH_OPTION_%d", pushOptIndex)]
- if !ok {
- writeRedError(sshStderr, "Failed to get push option %d", pushOptIndex)
- return 1
- }
- if strings.HasPrefix(pushOpt, "fedid=") {
- fedUserID := strings.TrimPrefix(pushOpt, "fedid=")
- service, username, found := strings.Cut(fedUserID, ":")
- if !found {
- writeRedError(sshStderr, "Invalid federated user identifier %#v does not contain a colon", fedUserID)
- return 1
- }
-
- ok, err := s.fedauth(ctx, packPass.userID, service, username, packPass.pubkey)
- if err != nil {
- writeRedError(sshStderr, "Failed to verify federated user identifier %#v: %v", fedUserID, err)
- return 1
- }
- if !ok {
- writeRedError(sshStderr, "Failed to verify federated user identifier %#v: you don't seem to be on the list", fedUserID)
- return 1
- }
-
- break
- }
- if pushOptIndex == pushOptCount-1 {
- writeRedError(sshStderr, "This repo requires contributors to be either federated or registered users. You must supply your federated user ID as a push option. For example, git push -o fedid=sr.ht:runxiyu")
- return 1
- }
- }
- }
-
- line, err = stdin.ReadString('\n')
- if errors.Is(err, io.EOF) {
- break
- } else if err != nil {
- writeRedError(sshStderr, "Failed to read pre-receive line: %v", err)
- return 1
- }
- line = line[:len(line)-1]
-
- oldOID, rest, found = strings.Cut(line, " ")
- if !found {
- writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
- return 1
- }
-
- newIOID, refName, found = strings.Cut(rest, " ")
- if !found {
- writeRedError(sshStderr, "Invalid pre-receive line: %v", line)
- return 1
- }
-
- if strings.HasPrefix(refName, "refs/heads/contrib/") {
- if allZero(oldOID) { // New branch
- fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
- var newMRLocalID int
-
- if packPass.userID != 0 {
- err = s.database.QueryRow(ctx,
- "INSERT INTO merge_requests (repo_id, creator, source_ref, status) VALUES ($1, $2, $3, 'open') RETURNING repo_local_id",
- packPass.repoID, packPass.userID, strings.TrimPrefix(refName, "refs/heads/"),
- ).Scan(&newMRLocalID)
- } else {
- err = s.database.QueryRow(ctx,
- "INSERT INTO merge_requests (repo_id, source_ref, status) VALUES ($1, $2, 'open') RETURNING repo_local_id",
- packPass.repoID, strings.TrimPrefix(refName, "refs/heads/"),
- ).Scan(&newMRLocalID)
- }
- if err != nil {
- writeRedError(sshStderr, "Error creating merge request: %v", err)
- return 1
- }
- mergeRequestWebURL := fmt.Sprintf("%s/contrib/%d/", s.genHTTPRemoteURL(packPass.groupPath, packPass.repoName), newMRLocalID)
- fmt.Fprintln(sshStderr, ansiec.Blue+"Created merge request at", mergeRequestWebURL+ansiec.Reset)
-
- s.ircBot.Send("PRIVMSG #chat :New merge request at " + mergeRequestWebURL)
- } else { // Existing contrib branch
- var existingMRUser int
- var isAncestor bool
-
- err = s.database.QueryRow(ctx,
- "SELECT COALESCE(creator, 0) FROM merge_requests WHERE source_ref = $1 AND repo_id = $2",
- strings.TrimPrefix(refName, "refs/heads/"), packPass.repoID,
- ).Scan(&existingMRUser)
- if err != nil {
- if errors.Is(err, pgx.ErrNoRows) {
- writeRedError(sshStderr, "No existing merge request for existing contrib branch: %v", err)
- } else {
- writeRedError(sshStderr, "Error querying for existing merge request: %v", err)
- }
- return 1
- }
- if existingMRUser == 0 {
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs to unowned MR)")
- continue
- }
-
- if existingMRUser != packPass.userID {
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(branch belongs another user's MR)")
- continue
- }
-
- oldHash = plumbing.NewHash(oldOID)
-
- if oldCommit, err = packPass.repo.CommitObject(oldHash); err != nil {
- writeRedError(sshStderr, "Daemon failed to get old commit: %v", err)
- return 1
- }
-
- // Potential BUG: I'm not sure if new_commit is guaranteed to be
- // detectable as they haven't been merged into the main repo's
- // objects yet. But it seems to work, and I don't think there's
- // any reason for this to only work intermitently.
- newHash = plumbing.NewHash(newIOID)
- if newCommit, err = packPass.repo.CommitObject(newHash); err != nil {
- writeRedError(sshStderr, "Daemon failed to get new commit: %v", err)
- return 1
- }
-
- if isAncestor, err = oldCommit.IsAncestor(newCommit); err != nil {
- writeRedError(sshStderr, "Daemon failed to check if old commit is ancestor: %v", err)
- return 1
- }
-
- if !isAncestor {
- // TODO: Create MR snapshot ref instead
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(force pushes are not supported yet)")
- continue
- }
-
- fmt.Fprintln(sshStderr, ansiec.Blue+"POK"+ansiec.Reset, refName)
- }
- } else { // Non-contrib branch
- allOK = false
- fmt.Fprintln(sshStderr, ansiec.Red+"NAK"+ansiec.Reset, refName, "(you cannot push to branches outside of contrib/*)")
- }
- }
-
- fmt.Fprintln(sshStderr)
- if allOK {
- fmt.Fprintln(sshStderr, "Overall "+ansiec.Green+"ACK"+ansiec.Reset+" (all checks passed)")
- return 0
- }
- fmt.Fprintln(sshStderr, "Overall "+ansiec.Red+"NAK"+ansiec.Reset+" (one or more branches failed checks)")
- return 1
- default:
- fmt.Fprintln(sshStderr, ansiec.Red+"Invalid hook:", args[0]+ansiec.Reset)
- return 1
- }
- }()
-
- fmt.Fprintln(sshStderr)
-
- _, _ = conn.Write([]byte{hookRet})
-}
-
-// serveGitHooks handles connections on the specified network listener and
-// treats incoming connections as those from git hook handlers by spawning
-// sessions. The listener must be a SOCK_STREAM UNIX domain socket. The
-// function itself blocks.
-func (s *Server) serveGitHooks(listener net.Listener) error {
- for {
- conn, err := listener.Accept()
- if err != nil {
- return err
- }
- go s.hooksHandler(conn)
- }
-}
-
-// allZero returns true if all runes in a given string are '0'. The comparison
-// is not constant time and must not be used in contexts where time-based side
-// channel attacks are a concern.
-func allZero(s string) bool {
- for _, r := range s {
- if r != '0' {
- return false
- }
- }
- return true
-}
diff --git a/forged/internal/unsorted/git_init.go b/forged/internal/unsorted/git_init.go
deleted file mode 100644
index a9bba78..0000000
--- a/forged/internal/unsorted/git_init.go
+++ /dev/null
@@ -1,34 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "github.com/go-git/go-git/v5"
- gitConfig "github.com/go-git/go-git/v5/config"
- gitFmtConfig "github.com/go-git/go-git/v5/plumbing/format/config"
-)
-
-// gitInit initializes a bare git repository with the forge-deployed hooks
-// directory as the hooksPath.
-func (s *Server) gitInit(repoPath string) (err error) {
- var repo *git.Repository
- var gitConf *gitConfig.Config
-
- if repo, err = git.PlainInit(repoPath, true); err != nil {
- return err
- }
-
- if gitConf, err = repo.Config(); err != nil {
- return err
- }
-
- gitConf.Raw.SetOption("core", gitFmtConfig.NoSubsection, "hooksPath", s.config.Hooks.Execs)
- gitConf.Raw.SetOption("receive", gitFmtConfig.NoSubsection, "advertisePushOptions", "true")
-
- if err = repo.SetConfig(gitConf); err != nil {
- return err
- }
-
- return nil
-}
diff --git a/forged/internal/unsorted/git_misc.go b/forged/internal/unsorted/git_misc.go
deleted file mode 100644
index dd93726..0000000
--- a/forged/internal/unsorted/git_misc.go
+++ /dev/null
@@ -1,95 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "context"
- "errors"
- "io"
- "iter"
-
- "github.com/go-git/go-git/v5"
- "github.com/go-git/go-git/v5/plumbing/object"
- "github.com/jackc/pgx/v5/pgtype"
-)
-
-// openRepo opens a git repository by group and repo name.
-//
-// TODO: This should be deprecated in favor of doing it in the relevant
-// request/router context in the future, as it cannot cover the nuance of
-// fields needed.
-func (s *Server) openRepo(ctx context.Context, groupPath []string, repoName string) (repo *git.Repository, description string, repoID int, fsPath string, err error) {
- err = s.database.QueryRow(ctx, `
-WITH RECURSIVE group_path_cte AS (
- -- Start: match the first name in the path where parent_group IS NULL
- SELECT
- id,
- parent_group,
- name,
- 1 AS depth
- FROM groups
- WHERE name = ($1::text[])[1]
- AND parent_group IS NULL
-
- UNION ALL
-
- -- Recurse: join next segment of the path
- SELECT
- g.id,
- g.parent_group,
- g.name,
- group_path_cte.depth + 1
- FROM groups g
- JOIN group_path_cte ON g.parent_group = group_path_cte.id
- WHERE g.name = ($1::text[])[group_path_cte.depth + 1]
- AND group_path_cte.depth + 1 <= cardinality($1::text[])
-)
-SELECT
- r.filesystem_path,
- COALESCE(r.description, ''),
- r.id
-FROM group_path_cte g
-JOIN repos r ON r.group_id = g.id
-WHERE g.depth = cardinality($1::text[])
- AND r.name = $2
- `, pgtype.FlatArray[string](groupPath), repoName).Scan(&fsPath, &description, &repoID)
- if err != nil {
- return
- }
-
- repo, err = git.PlainOpen(fsPath)
- return
-}
-
-// commitIterSeqErr creates an [iter.Seq[*object.Commit]] from an
-// [object.CommitIter], and additionally returns a pointer to error.
-// The pointer to error is guaranteed to be populated with either nil or the
-// error returned by the commit iterator after the returned iterator is
-// finished.
-func commitIterSeqErr(ctx context.Context, commitIter object.CommitIter) (iter.Seq[*object.Commit], *error) {
- var err error
- return func(yield func(*object.Commit) bool) {
- for {
- commit, err2 := commitIter.Next()
- if err2 != nil {
- if errors.Is(err2, io.EOF) {
- return
- }
- err = err2
- return
- }
-
- select {
- case <-ctx.Done():
- err = ctx.Err()
- return
- default:
- }
-
- if !yield(commit) {
- return
- }
- }
- }, &err
-}
diff --git a/forged/internal/unsorted/git_plumbing.go b/forged/internal/unsorted/git_plumbing.go
deleted file mode 100644
index e7ebe8f..0000000
--- a/forged/internal/unsorted/git_plumbing.go
+++ /dev/null
@@ -1,188 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "bytes"
- "context"
- "encoding/hex"
- "errors"
- "os"
- "os/exec"
- "path"
- "sort"
- "strings"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-func writeTree(ctx context.Context, repoPath string, entries []treeEntry) (string, error) {
- var buf bytes.Buffer
-
- sort.Slice(entries, func(i, j int) bool {
- nameI, nameJ := entries[i].name, entries[j].name
-
- if nameI == nameJ { // meh
- return !(entries[i].mode == "40000") && (entries[j].mode == "40000")
- }
-
- if strings.HasPrefix(nameJ, nameI) && len(nameI) < len(nameJ) {
- return !(entries[i].mode == "40000")
- }
-
- if strings.HasPrefix(nameI, nameJ) && len(nameJ) < len(nameI) {
- return entries[j].mode == "40000"
- }
-
- return nameI < nameJ
- })
-
- for _, e := range entries {
- buf.WriteString(e.mode)
- buf.WriteByte(' ')
- buf.WriteString(e.name)
- buf.WriteByte(0)
- buf.Write(e.sha)
- }
-
- cmd := exec.CommandContext(ctx, "git", "hash-object", "-w", "-t", "tree", "--stdin")
- cmd.Env = append(os.Environ(), "GIT_DIR="+repoPath)
- cmd.Stdin = &buf
-
- var out bytes.Buffer
- cmd.Stdout = &out
- if err := cmd.Run(); err != nil {
- return "", err
- }
- return strings.TrimSpace(out.String()), nil
-}
-
-func buildTreeRecursive(ctx context.Context, repoPath, baseTree string, updates map[string][]byte) (string, error) {
- treeCache := make(map[string][]treeEntry)
-
- var walk func(string, string) error
- walk = func(prefix, sha string) error {
- cmd := exec.CommandContext(ctx, "git", "cat-file", "tree", sha)
- cmd.Env = append(os.Environ(), "GIT_DIR="+repoPath)
- var out bytes.Buffer
- cmd.Stdout = &out
- if err := cmd.Run(); err != nil {
- return err
- }
- data := out.Bytes()
- i := 0
- var entries []treeEntry
- for i < len(data) {
- modeEnd := bytes.IndexByte(data[i:], ' ')
- if modeEnd < 0 {
- return errors.New("invalid tree format")
- }
- mode := misc.BytesToString(data[i : i+modeEnd])
- i += modeEnd + 1
-
- nameEnd := bytes.IndexByte(data[i:], 0)
- if nameEnd < 0 {
- return errors.New("missing null after filename")
- }
- name := misc.BytesToString(data[i : i+nameEnd])
- i += nameEnd + 1
-
- if i+20 > len(data) {
- return errors.New("unexpected EOF in SHA")
- }
- shaBytes := data[i : i+20]
- i += 20
-
- entries = append(entries, treeEntry{
- mode: mode,
- name: name,
- sha: shaBytes,
- })
-
- if mode == "40000" {
- subPrefix := path.Join(prefix, name)
- if err := walk(subPrefix, hex.EncodeToString(shaBytes)); err != nil {
- return err
- }
- }
- }
- treeCache[prefix] = entries
- return nil
- }
-
- if err := walk("", baseTree); err != nil {
- return "", err
- }
-
- for filePath, blobSha := range updates {
- parts := strings.Split(filePath, "/")
- dir := strings.Join(parts[:len(parts)-1], "/")
- name := parts[len(parts)-1]
-
- entries := treeCache[dir]
- found := false
- for i, e := range entries {
- if e.name == name {
- if blobSha == nil {
- // Remove TODO
- entries = append(entries[:i], entries[i+1:]...)
- } else {
- entries[i].sha = blobSha
- }
- found = true
- break
- }
- }
- if !found && blobSha != nil {
- entries = append(entries, treeEntry{
- mode: "100644",
- name: name,
- sha: blobSha,
- })
- }
- treeCache[dir] = entries
- }
-
- built := make(map[string][]byte)
- var build func(string) ([]byte, error)
- build = func(prefix string) ([]byte, error) {
- entries := treeCache[prefix]
- for i, e := range entries {
- if e.mode == "40000" {
- subPrefix := path.Join(prefix, e.name)
- if sha, ok := built[subPrefix]; ok {
- entries[i].sha = sha
- continue
- }
- newShaStr, err := build(subPrefix)
- if err != nil {
- return nil, err
- }
- entries[i].sha = newShaStr
- }
- }
- shaStr, err := writeTree(ctx, repoPath, entries)
- if err != nil {
- return nil, err
- }
- shaBytes, err := hex.DecodeString(shaStr)
- if err != nil {
- return nil, err
- }
- built[prefix] = shaBytes
- return shaBytes, nil
- }
-
- rootShaBytes, err := build("")
- if err != nil {
- return "", err
- }
- return hex.EncodeToString(rootShaBytes), nil
-}
-
-type treeEntry struct {
- mode string // like "100644"
- name string // individual name
- sha []byte
-}
diff --git a/forged/internal/unsorted/git_ref.go b/forged/internal/unsorted/git_ref.go
deleted file mode 100644
index d9735ba..0000000
--- a/forged/internal/unsorted/git_ref.go
+++ /dev/null
@@ -1,37 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "github.com/go-git/go-git/v5"
- "github.com/go-git/go-git/v5/plumbing"
-)
-
-// getRefHash returns the hash of a reference given its
-// type and name as supplied in URL queries.
-func getRefHash(repo *git.Repository, refType, refName string) (refHash plumbing.Hash, err error) {
- var ref *plumbing.Reference
- switch refType {
- case "":
- if ref, err = repo.Head(); err != nil {
- return
- }
- refHash = ref.Hash()
- case "commit":
- refHash = plumbing.NewHash(refName)
- case "branch":
- if ref, err = repo.Reference(plumbing.NewBranchReferenceName(refName), true); err != nil {
- return
- }
- refHash = ref.Hash()
- case "tag":
- if ref, err = repo.Reference(plumbing.NewTagReferenceName(refName), true); err != nil {
- return
- }
- refHash = ref.Hash()
- default:
- panic("Invalid ref type " + refType)
- }
- return
-}
diff --git a/forged/internal/unsorted/http_auth.go b/forged/internal/unsorted/http_auth.go
deleted file mode 100644
index b0afa05..0000000
--- a/forged/internal/unsorted/http_auth.go
+++ /dev/null
@@ -1,26 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "net/http"
-)
-
-// getUserFromRequest returns the user ID and username associated with the
-// session cookie in a given [http.Request].
-func (s *Server) getUserFromRequest(request *http.Request) (id int, username string, err error) {
- var sessionCookie *http.Cookie
-
- if sessionCookie, err = request.Cookie("session"); err != nil {
- return
- }
-
- err = s.database.QueryRow(
- request.Context(),
- "SELECT user_id, COALESCE(username, '') FROM users u JOIN sessions s ON u.id = s.user_id WHERE s.session_id = $1;",
- sessionCookie.Value,
- ).Scan(&id, &username)
-
- return
-}
diff --git a/forged/internal/unsorted/http_handle_branches.go b/forged/internal/unsorted/http_handle_branches.go
deleted file mode 100644
index 704e1d8..0000000
--- a/forged/internal/unsorted/http_handle_branches.go
+++ /dev/null
@@ -1,46 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "net/http"
- "strings"
-
- "github.com/go-git/go-git/v5"
- "github.com/go-git/go-git/v5/plumbing"
- "github.com/go-git/go-git/v5/plumbing/storer"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-// httpHandleRepoBranches provides the branches page in repos.
-func (s *Server) httpHandleRepoBranches(writer http.ResponseWriter, _ *http.Request, params map[string]any) {
- var repo *git.Repository
- var repoName string
- var groupPath []string
- var err error
- var notes []string
- var branches []string
- var branchesIter storer.ReferenceIter
-
- repo, repoName, groupPath = params["repo"].(*git.Repository), params["repo_name"].(string), params["group_path"].([]string)
-
- if strings.Contains(repoName, "\n") || misc.SliceContainsNewlines(groupPath) {
- notes = append(notes, "Path contains newlines; HTTP Git access impossible")
- }
-
- branchesIter, err = repo.Branches()
- if err == nil {
- _ = branchesIter.ForEach(func(branch *plumbing.Reference) error {
- branches = append(branches, branch.Name().Short())
- return nil
- })
- }
- params["branches"] = branches
-
- params["http_clone_url"] = s.genHTTPRemoteURL(groupPath, repoName)
- params["ssh_clone_url"] = s.genSSHRemoteURL(groupPath, repoName)
- params["notes"] = notes
-
- s.renderTemplate(writer, "repo_branches", params)
-}
diff --git a/forged/internal/unsorted/http_handle_group_index.go b/forged/internal/unsorted/http_handle_group_index.go
deleted file mode 100644
index ce28a1c..0000000
--- a/forged/internal/unsorted/http_handle_group_index.go
+++ /dev/null
@@ -1,196 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "errors"
- "net/http"
- "path/filepath"
- "strconv"
-
- "github.com/jackc/pgx/v5"
- "github.com/jackc/pgx/v5/pgtype"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// httpHandleGroupIndex provides index pages for groups, which includes a list
-// of its subgroups and repos, as well as a form for group maintainers to
-// create repos.
-func (s *Server) httpHandleGroupIndex(writer http.ResponseWriter, request *http.Request, params map[string]any) {
- var groupPath []string
- var repos []nameDesc
- var subgroups []nameDesc
- var err error
- var groupID int
- var groupDesc string
-
- groupPath = params["group_path"].([]string)
-
- // The group itself
- err = s.database.QueryRow(request.Context(), `
- WITH RECURSIVE group_path_cte AS (
- SELECT
- id,
- parent_group,
- name,
- 1 AS depth
- FROM groups
- WHERE name = ($1::text[])[1]
- AND parent_group IS NULL
-
- UNION ALL
-
- SELECT
- g.id,
- g.parent_group,
- g.name,
- group_path_cte.depth + 1
- FROM groups g
- JOIN group_path_cte ON g.parent_group = group_path_cte.id
- WHERE g.name = ($1::text[])[group_path_cte.depth + 1]
- AND group_path_cte.depth + 1 <= cardinality($1::text[])
- )
- SELECT c.id, COALESCE(g.description, '')
- FROM group_path_cte c
- JOIN groups g ON g.id = c.id
- WHERE c.depth = cardinality($1::text[])
- `,
- pgtype.FlatArray[string](groupPath),
- ).Scan(&groupID, &groupDesc)
-
- if errors.Is(err, pgx.ErrNoRows) {
- web.ErrorPage404(s.templates, writer, params)
- return
- } else if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting group: "+err.Error())
- return
- }
-
- // ACL
- var count int
- err = s.database.QueryRow(request.Context(), `
- SELECT COUNT(*)
- FROM user_group_roles
- WHERE user_id = $1
- AND group_id = $2
- `, params["user_id"].(int), groupID).Scan(&count)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error checking access: "+err.Error())
- return
- }
- directAccess := (count > 0)
-
- if request.Method == http.MethodPost {
- if !directAccess {
- web.ErrorPage403(s.templates, writer, params, "You do not have direct access to this group")
- return
- }
-
- repoName := request.FormValue("repo_name")
- repoDesc := request.FormValue("repo_desc")
- contribReq := request.FormValue("repo_contrib")
- if repoName == "" {
- web.ErrorPage400(s.templates, writer, params, "Repo name is required")
- return
- }
-
- var newRepoID int
- err := s.database.QueryRow(
- request.Context(),
- `INSERT INTO repos (name, description, group_id, contrib_requirements)
- VALUES ($1, $2, $3, $4)
- RETURNING id`,
- repoName,
- repoDesc,
- groupID,
- contribReq,
- ).Scan(&newRepoID)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error creating repo: "+err.Error())
- return
- }
-
- filePath := filepath.Join(s.config.Git.RepoDir, strconv.Itoa(newRepoID)+".git")
-
- _, err = s.database.Exec(
- request.Context(),
- `UPDATE repos
- SET filesystem_path = $1
- WHERE id = $2`,
- filePath,
- newRepoID,
- )
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error updating repo path: "+err.Error())
- return
- }
-
- if err = s.gitInit(filePath); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error initializing repo: "+err.Error())
- return
- }
-
- misc.RedirectUnconditionally(writer, request)
- return
- }
-
- // Repos
- var rows pgx.Rows
- rows, err = s.database.Query(request.Context(), `
- SELECT name, COALESCE(description, '')
- FROM repos
- WHERE group_id = $1
- `, groupID)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting repos: "+err.Error())
- return
- }
- defer rows.Close()
-
- for rows.Next() {
- var name, description string
- if err = rows.Scan(&name, &description); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting repos: "+err.Error())
- return
- }
- repos = append(repos, nameDesc{name, description})
- }
- if err = rows.Err(); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting repos: "+err.Error())
- return
- }
-
- // Subgroups
- rows, err = s.database.Query(request.Context(), `
- SELECT name, COALESCE(description, '')
- FROM groups
- WHERE parent_group = $1
- `, groupID)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting subgroups: "+err.Error())
- return
- }
- defer rows.Close()
-
- for rows.Next() {
- var name, description string
- if err = rows.Scan(&name, &description); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting subgroups: "+err.Error())
- return
- }
- subgroups = append(subgroups, nameDesc{name, description})
- }
- if err = rows.Err(); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting subgroups: "+err.Error())
- return
- }
-
- params["repos"] = repos
- params["subgroups"] = subgroups
- params["description"] = groupDesc
- params["direct_access"] = directAccess
-
- s.renderTemplate(writer, "group", params)
-}
diff --git a/forged/internal/unsorted/http_handle_index.go b/forged/internal/unsorted/http_handle_index.go
deleted file mode 100644
index a3141f4..0000000
--- a/forged/internal/unsorted/http_handle_index.go
+++ /dev/null
@@ -1,26 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "net/http"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// httpHandleIndex provides the main index page which includes a list of groups
-// and some global information such as SSH keys.
-func (s *Server) httpHandleIndex(writer http.ResponseWriter, request *http.Request, params map[string]any) {
- var err error
- var groups []nameDesc
-
- groups, err = s.queryNameDesc(request.Context(), "SELECT name, COALESCE(description, '') FROM groups WHERE parent_group IS NULL")
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error querying groups: "+err.Error())
- return
- }
- params["groups"] = groups
-
- s.renderTemplate(writer, "index", params)
-}
diff --git a/forged/internal/unsorted/http_handle_login.go b/forged/internal/unsorted/http_handle_login.go
deleted file mode 100644
index 8adbe17..0000000
--- a/forged/internal/unsorted/http_handle_login.go
+++ /dev/null
@@ -1,108 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "crypto/rand"
- "encoding/base64"
- "errors"
- "fmt"
- "net/http"
- "time"
-
- "github.com/jackc/pgx/v5"
- "go.lindenii.runxiyu.org/forge/forged/internal/argon2id"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// httpHandleLogin provides the login page for local users.
-func (s *Server) httpHandleLogin(writer http.ResponseWriter, request *http.Request, params map[string]any) {
- var username, password string
- var userID int
- var passwordHash string
- var err error
- var passwordMatches bool
- var cookieValue string
- var now time.Time
- var expiry time.Time
- var cookie http.Cookie
-
- if request.Method != http.MethodPost {
- s.renderTemplate(writer, "login", params)
- return
- }
-
- username = request.PostFormValue("username")
- password = request.PostFormValue("password")
-
- err = s.database.QueryRow(request.Context(),
- "SELECT id, COALESCE(password, '') FROM users WHERE username = $1",
- username,
- ).Scan(&userID, &passwordHash)
- if err != nil {
- if errors.Is(err, pgx.ErrNoRows) {
- params["login_error"] = "Unknown username"
- s.renderTemplate(writer, "login", params)
- return
- }
- web.ErrorPage500(s.templates, writer, params, "Error querying user information: "+err.Error())
- return
- }
- if passwordHash == "" {
- params["login_error"] = "User has no password"
- s.renderTemplate(writer, "login", params)
- return
- }
-
- if passwordMatches, err = argon2id.ComparePasswordAndHash(password, passwordHash); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error comparing password and hash: "+err.Error())
- return
- }
-
- if !passwordMatches {
- params["login_error"] = "Invalid password"
- s.renderTemplate(writer, "login", params)
- return
- }
-
- if cookieValue, err = randomUrlsafeStr(16); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting random string: "+err.Error())
- return
- }
-
- now = time.Now()
- expiry = now.Add(time.Duration(s.config.HTTP.CookieExpiry) * time.Second)
-
- cookie = http.Cookie{
- Name: "session",
- Value: cookieValue,
- SameSite: http.SameSiteLaxMode,
- HttpOnly: true,
- Secure: false, // TODO
- Expires: expiry,
- Path: "/",
- } //exhaustruct:ignore
-
- http.SetCookie(writer, &cookie)
-
- _, err = s.database.Exec(request.Context(), "INSERT INTO sessions (user_id, session_id) VALUES ($1, $2)", userID, cookieValue)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error inserting session: "+err.Error())
- return
- }
-
- http.Redirect(writer, request, "/", http.StatusSeeOther)
-}
-
-// randomUrlsafeStr generates a random string of the given entropic size
-// using the URL-safe base64 encoding. The actual size of the string returned
-// will be 4*sz.
-func randomUrlsafeStr(sz int) (string, error) {
- r := make([]byte, 3*sz)
- _, err := rand.Read(r)
- if err != nil {
- return "", fmt.Errorf("error generating random string: %w", err)
- }
- return base64.RawURLEncoding.EncodeToString(r), nil
-}
diff --git a/forged/internal/unsorted/http_handle_repo_commit.go b/forged/internal/unsorted/http_handle_repo_commit.go
deleted file mode 100644
index 2afdf3a..0000000
--- a/forged/internal/unsorted/http_handle_repo_commit.go
+++ /dev/null
@@ -1,146 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "fmt"
- "net/http"
- "strings"
-
- "github.com/go-git/go-git/v5"
- "github.com/go-git/go-git/v5/plumbing"
- "github.com/go-git/go-git/v5/plumbing/filemode"
- "github.com/go-git/go-git/v5/plumbing/format/diff"
- "github.com/go-git/go-git/v5/plumbing/object"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
- "go.lindenii.runxiyu.org/forge/forged/internal/oldgit"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// usableFilePatch is a [diff.FilePatch] that is structured in a way more
-// friendly for use in HTML templates.
-type usableFilePatch struct {
- From diff.File
- To diff.File
- Chunks []usableChunk
-}
-
-// usableChunk is a [diff.Chunk] that is structured in a way more friendly for
-// use in HTML templates.
-type usableChunk struct {
- Operation diff.Operation
- Content string
-}
-
-func (s *Server) httpHandleRepoCommit(writer http.ResponseWriter, request *http.Request, params map[string]any) {
- var repo *git.Repository
- var commitIDStrSpec, commitIDStrSpecNoSuffix string
- var commitID plumbing.Hash
- var parentCommitHash plumbing.Hash
- var commitObj *object.Commit
- var commitIDStr string
- var err error
- var patch *object.Patch
-
- repo, commitIDStrSpec = params["repo"].(*git.Repository), params["commit_id"].(string)
-
- commitIDStrSpecNoSuffix = strings.TrimSuffix(commitIDStrSpec, ".patch")
- commitID = plumbing.NewHash(commitIDStrSpecNoSuffix)
- if commitObj, err = repo.CommitObject(commitID); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting commit object: "+err.Error())
- return
- }
- if commitIDStrSpecNoSuffix != commitIDStrSpec {
- var patchStr string
- if patchStr, err = oldgit.FmtCommitPatch(commitObj); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error formatting patch: "+err.Error())
- return
- }
- fmt.Fprintln(writer, patchStr)
- return
- }
- commitIDStr = commitObj.Hash.String()
-
- if commitIDStr != commitIDStrSpec {
- http.Redirect(writer, request, commitIDStr, http.StatusSeeOther)
- return
- }
-
- params["commit_object"] = commitObj
- params["commit_id"] = commitIDStr
-
- parentCommitHash, patch, err = oldgit.CommitToPatch(commitObj)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting patch from commit: "+err.Error())
- return
- }
- params["parent_commit_hash"] = parentCommitHash.String()
- params["patch"] = patch
-
- params["file_patches"] = makeUsableFilePatches(patch)
-
- s.renderTemplate(writer, "repo_commit", params)
-}
-
-type fakeDiffFile struct {
- hash plumbing.Hash
- mode filemode.FileMode
- path string
-}
-
-func (f fakeDiffFile) Hash() plumbing.Hash {
- return f.hash
-}
-
-func (f fakeDiffFile) Mode() filemode.FileMode {
- return f.mode
-}
-
-func (f fakeDiffFile) Path() string {
- return f.path
-}
-
-var nullFakeDiffFile = fakeDiffFile{ //nolint:gochecknoglobals
- hash: plumbing.NewHash("0000000000000000000000000000000000000000"),
- mode: misc.FirstOrPanic(filemode.New("100644")),
- path: "",
-}
-
-func makeUsableFilePatches(patch diff.Patch) (usableFilePatches []usableFilePatch) {
- // TODO: Remove unnecessary context
- // TODO: Prepend "+"/"-"/" " instead of solely distinguishing based on color
-
- for _, filePatch := range patch.FilePatches() {
- var fromFile, toFile diff.File
- var ufp usableFilePatch
- chunks := []usableChunk{}
-
- fromFile, toFile = filePatch.Files()
- if fromFile == nil {
- fromFile = nullFakeDiffFile
- }
- if toFile == nil {
- toFile = nullFakeDiffFile
- }
- for _, chunk := range filePatch.Chunks() {
- var content string
-
- content = chunk.Content()
- if len(content) > 0 && content[0] == '\n' {
- content = "\n" + content
- } // Horrible hack to fix how browsers newlines that immediately proceed <pre>
- chunks = append(chunks, usableChunk{
- Operation: chunk.Type(),
- Content: content,
- })
- }
- ufp = usableFilePatch{
- Chunks: chunks,
- From: fromFile,
- To: toFile,
- }
- usableFilePatches = append(usableFilePatches, ufp)
- }
- return
-}
diff --git a/forged/internal/unsorted/http_handle_repo_contrib_index.go b/forged/internal/unsorted/http_handle_repo_contrib_index.go
deleted file mode 100644
index 5c68c08..0000000
--- a/forged/internal/unsorted/http_handle_repo_contrib_index.go
+++ /dev/null
@@ -1,52 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "net/http"
-
- "github.com/jackc/pgx/v5"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// idTitleStatus describes properties of a merge request that needs to be
-// present in MR listings.
-type idTitleStatus struct {
- ID int
- Title string
- Status string
-}
-
-// httpHandleRepoContribIndex provides an index to merge requests of a repo.
-func (s *Server) httpHandleRepoContribIndex(writer http.ResponseWriter, request *http.Request, params map[string]any) {
- var rows pgx.Rows
- var result []idTitleStatus
- var err error
-
- if rows, err = s.database.Query(request.Context(),
- "SELECT repo_local_id, COALESCE(title, 'Untitled'), status FROM merge_requests WHERE repo_id = $1",
- params["repo_id"],
- ); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error querying merge requests: "+err.Error())
- return
- }
- defer rows.Close()
-
- for rows.Next() {
- var mrID int
- var mrTitle, mrStatus string
- if err = rows.Scan(&mrID, &mrTitle, &mrStatus); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error scanning merge request: "+err.Error())
- return
- }
- result = append(result, idTitleStatus{mrID, mrTitle, mrStatus})
- }
- if err = rows.Err(); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error ranging over merge requests: "+err.Error())
- return
- }
- params["merge_requests"] = result
-
- s.renderTemplate(writer, "repo_contrib_index", params)
-}
diff --git a/forged/internal/unsorted/http_handle_repo_contrib_one.go b/forged/internal/unsorted/http_handle_repo_contrib_one.go
deleted file mode 100644
index 1d733b0..0000000
--- a/forged/internal/unsorted/http_handle_repo_contrib_one.go
+++ /dev/null
@@ -1,98 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "net/http"
- "strconv"
-
- "github.com/go-git/go-git/v5"
- "github.com/go-git/go-git/v5/plumbing"
- "github.com/go-git/go-git/v5/plumbing/object"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// httpHandleRepoContribOne provides an interface to each merge request of a
-// repo.
-func (s *Server) httpHandleRepoContribOne(writer http.ResponseWriter, request *http.Request, params map[string]any) {
- var mrIDStr string
- var mrIDInt int
- var err error
- var title, status, srcRefStr, dstBranchStr string
- var repo *git.Repository
- var srcRefHash plumbing.Hash
- var dstBranchHash plumbing.Hash
- var srcCommit, dstCommit, mergeBaseCommit *object.Commit
- var mergeBases []*object.Commit
-
- mrIDStr = params["mr_id"].(string)
- mrIDInt64, err := strconv.ParseInt(mrIDStr, 10, strconv.IntSize)
- if err != nil {
- web.ErrorPage400(s.templates, writer, params, "Merge request ID not an integer")
- return
- }
- mrIDInt = int(mrIDInt64)
-
- if err = s.database.QueryRow(request.Context(),
- "SELECT COALESCE(title, ''), status, source_ref, COALESCE(destination_branch, '') FROM merge_requests WHERE repo_id = $1 AND repo_local_id = $2",
- params["repo_id"], mrIDInt,
- ).Scan(&title, &status, &srcRefStr, &dstBranchStr); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error querying merge request: "+err.Error())
- return
- }
-
- repo = params["repo"].(*git.Repository)
-
- if srcRefHash, err = getRefHash(repo, "branch", srcRefStr); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting source ref hash: "+err.Error())
- return
- }
- if srcCommit, err = repo.CommitObject(srcRefHash); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting source commit: "+err.Error())
- return
- }
- params["source_commit"] = srcCommit
-
- if dstBranchStr == "" {
- dstBranchStr = "HEAD"
- dstBranchHash, err = getRefHash(repo, "", "")
- } else {
- dstBranchHash, err = getRefHash(repo, "branch", dstBranchStr)
- }
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting destination branch hash: "+err.Error())
- return
- }
-
- if dstCommit, err = repo.CommitObject(dstBranchHash); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting destination commit: "+err.Error())
- return
- }
- params["destination_commit"] = dstCommit
-
- if mergeBases, err = srcCommit.MergeBase(dstCommit); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting merge base: "+err.Error())
- return
- }
-
- if len(mergeBases) < 1 {
- web.ErrorPage500(s.templates, writer, params, "No merge base found for this merge request; these two branches do not share any common history")
- // TODO
- return
- }
-
- mergeBaseCommit = mergeBases[0]
- params["merge_base"] = mergeBaseCommit
-
- patch, err := mergeBaseCommit.Patch(srcCommit)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting patch: "+err.Error())
- return
- }
- params["file_patches"] = makeUsableFilePatches(patch)
-
- params["mr_title"], params["mr_status"], params["mr_source_ref"], params["mr_destination_branch"] = title, status, srcRefStr, dstBranchStr
-
- s.renderTemplate(writer, "repo_contrib_one", params)
-}
diff --git a/forged/internal/unsorted/http_handle_repo_index.go b/forged/internal/unsorted/http_handle_repo_index.go
deleted file mode 100644
index dd46dfe..0000000
--- a/forged/internal/unsorted/http_handle_repo_index.go
+++ /dev/null
@@ -1,41 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "net/http"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/git2c"
- "go.lindenii.runxiyu.org/forge/forged/internal/render"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// httpHandleRepoIndex provides the front page of a repo using git2d.
-func (s *Server) httpHandleRepoIndex(w http.ResponseWriter, req *http.Request, params map[string]any) {
- repoName := params["repo_name"].(string)
- groupPath := params["group_path"].([]string)
-
- _, repoPath, _, _, _, _, _ := s.getRepoInfo(req.Context(), groupPath, repoName, "") // TODO: Don't use getRepoInfo
-
- client, err := git2c.NewClient(s.config.Git.Socket)
- if err != nil {
- web.ErrorPage500(s.templates, w, params, err.Error())
- return
- }
- defer client.Close()
-
- commits, readme, err := client.CmdIndex(repoPath)
- if err != nil {
- web.ErrorPage500(s.templates, w, params, err.Error())
- return
- }
-
- params["commits"] = commits
- params["readme_filename"] = readme.Filename
- _, params["readme"] = render.Readme(readme.Content, readme.Filename)
-
- s.renderTemplate(w, "repo_index", params)
-
- // TODO: Caching
-}
diff --git a/forged/internal/unsorted/http_handle_repo_info.go b/forged/internal/unsorted/http_handle_repo_info.go
deleted file mode 100644
index e23b1d2..0000000
--- a/forged/internal/unsorted/http_handle_repo_info.go
+++ /dev/null
@@ -1,107 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "fmt"
- "io"
- "net/http"
- "os/exec"
-
- "github.com/jackc/pgx/v5/pgtype"
-)
-
-// httpHandleRepoInfo provides advertised refs of a repo for use in Git's Smart
-// HTTP protocol.
-//
-// TODO: Reject access from web browsers.
-func (s *Server) httpHandleRepoInfo(writer http.ResponseWriter, request *http.Request, params map[string]any) (err error) {
- groupPath := params["group_path"].([]string)
- repoName := params["repo_name"].(string)
- var repoPath string
-
- if err := s.database.QueryRow(request.Context(), `
- WITH RECURSIVE group_path_cte AS (
- -- Start: match the first name in the path where parent_group IS NULL
- SELECT
- id,
- parent_group,
- name,
- 1 AS depth
- FROM groups
- WHERE name = ($1::text[])[1]
- AND parent_group IS NULL
-
- UNION ALL
-
- -- Recurse: jion next segment of the path
- SELECT
- g.id,
- g.parent_group,
- g.name,
- group_path_cte.depth + 1
- FROM groups g
- JOIN group_path_cte ON g.parent_group = group_path_cte.id
- WHERE g.name = ($1::text[])[group_path_cte.depth + 1]
- AND group_path_cte.depth + 1 <= cardinality($1::text[])
- )
- SELECT r.filesystem_path
- FROM group_path_cte c
- JOIN repos r ON r.group_id = c.id
- WHERE c.depth = cardinality($1::text[])
- AND r.name = $2
- `,
- pgtype.FlatArray[string](groupPath),
- repoName,
- ).Scan(&repoPath); err != nil {
- return err
- }
-
- writer.Header().Set("Content-Type", "application/x-git-upload-pack-advertisement")
- writer.WriteHeader(http.StatusOK)
-
- cmd := exec.Command("git", "upload-pack", "--stateless-rpc", "--advertise-refs", repoPath)
- stdout, err := cmd.StdoutPipe()
- if err != nil {
- return err
- }
- defer func() {
- _ = stdout.Close()
- }()
- cmd.Stderr = cmd.Stdout
-
- if err = cmd.Start(); err != nil {
- return err
- }
-
- if err = packLine(writer, "# service=git-upload-pack\n"); err != nil {
- return err
- }
-
- if err = packFlush(writer); err != nil {
- return
- }
-
- if _, err = io.Copy(writer, stdout); err != nil {
- return err
- }
-
- if err = cmd.Wait(); err != nil {
- return err
- }
-
- return nil
-}
-
-// Taken from https://github.com/icyphox/legit, MIT license.
-func packLine(w io.Writer, s string) error {
- _, err := fmt.Fprintf(w, "%04x%s", len(s)+4, s)
- return err
-}
-
-// Taken from https://github.com/icyphox/legit, MIT license.
-func packFlush(w io.Writer) error {
- _, err := fmt.Fprint(w, "0000")
- return err
-}
diff --git a/forged/internal/unsorted/http_handle_repo_log.go b/forged/internal/unsorted/http_handle_repo_log.go
deleted file mode 100644
index 5d90871..0000000
--- a/forged/internal/unsorted/http_handle_repo_log.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "net/http"
-
- "github.com/go-git/go-git/v5"
- "github.com/go-git/go-git/v5/plumbing"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// httpHandleRepoLog provides a page with a complete Git log.
-//
-// TODO: This currently provides all commits in the branch. It should be
-// paginated and cached instead.
-func (s *Server) httpHandleRepoLog(writer http.ResponseWriter, req *http.Request, params map[string]any) {
- var repo *git.Repository
- var refHash plumbing.Hash
- var err error
-
- repo = params["repo"].(*git.Repository)
-
- if refHash, err = getRefHash(repo, params["ref_type"].(string), params["ref_name"].(string)); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting ref hash: "+err.Error())
- return
- }
-
- logOptions := git.LogOptions{From: refHash} //exhaustruct:ignore
- commitIter, err := repo.Log(&logOptions)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error getting recent commits: "+err.Error())
- return
- }
- params["commits"], params["commits_err"] = commitIterSeqErr(req.Context(), commitIter)
-
- s.renderTemplate(writer, "repo_log", params)
-}
diff --git a/forged/internal/unsorted/http_handle_repo_raw.go b/forged/internal/unsorted/http_handle_repo_raw.go
deleted file mode 100644
index 1127284..0000000
--- a/forged/internal/unsorted/http_handle_repo_raw.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "fmt"
- "html/template"
- "net/http"
- "strings"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/git2c"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// httpHandleRepoRaw serves raw files, or directory listings that point to raw
-// files.
-func (s *Server) httpHandleRepoRaw(writer http.ResponseWriter, request *http.Request, params map[string]any) {
- repoName := params["repo_name"].(string)
- groupPath := params["group_path"].([]string)
- rawPathSpec := params["rest"].(string)
- pathSpec := strings.TrimSuffix(rawPathSpec, "/")
- params["path_spec"] = pathSpec
-
- _, repoPath, _, _, _, _, _ := s.getRepoInfo(request.Context(), groupPath, repoName, "")
-
- client, err := git2c.NewClient(s.config.Git.Socket)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, err.Error())
- return
- }
- defer client.Close()
-
- files, content, err := client.CmdTreeRaw(repoPath, pathSpec)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, err.Error())
- return
- }
-
- switch {
- case files != nil:
- params["files"] = files
- params["readme_filename"] = "README.md"
- params["readme"] = template.HTML("<p>README rendering here is WIP again</p>") // TODO
- s.renderTemplate(writer, "repo_raw_dir", params)
- case content != "":
- if misc.RedirectNoDir(writer, request) {
- return
- }
- writer.Header().Set("Content-Type", "application/octet-stream")
- fmt.Fprint(writer, content)
- default:
- web.ErrorPage500(s.templates, writer, params, "Unknown error fetching repo raw data")
- }
-}
diff --git a/forged/internal/unsorted/http_handle_repo_tree.go b/forged/internal/unsorted/http_handle_repo_tree.go
deleted file mode 100644
index 4799ccb..0000000
--- a/forged/internal/unsorted/http_handle_repo_tree.go
+++ /dev/null
@@ -1,55 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "html/template"
- "net/http"
- "strings"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/git2c"
- "go.lindenii.runxiyu.org/forge/forged/internal/render"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// httpHandleRepoTree provides a friendly, syntax-highlighted view of
-// individual files, and provides directory views that link to these files.
-//
-// TODO: Do not highlight files that are too large.
-func (s *Server) httpHandleRepoTree(writer http.ResponseWriter, request *http.Request, params map[string]any) {
- repoName := params["repo_name"].(string)
- groupPath := params["group_path"].([]string)
- rawPathSpec := params["rest"].(string)
- pathSpec := strings.TrimSuffix(rawPathSpec, "/")
- params["path_spec"] = pathSpec
-
- _, repoPath, _, _, _, _, _ := s.getRepoInfo(request.Context(), groupPath, repoName, "")
-
- client, err := git2c.NewClient(s.config.Git.Socket)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, err.Error())
- return
- }
- defer client.Close()
-
- files, content, err := client.CmdTreeRaw(repoPath, pathSpec)
- if err != nil {
- web.ErrorPage500(s.templates, writer, params, err.Error())
- return
- }
-
- switch {
- case files != nil:
- params["files"] = files
- params["readme_filename"] = "README.md"
- params["readme"] = template.HTML("<p>README rendering here is WIP again</p>") // TODO
- s.renderTemplate(writer, "repo_tree_dir", params)
- case content != "":
- rendered := render.Highlight(pathSpec, content)
- params["file_contents"] = rendered
- s.renderTemplate(writer, "repo_tree_file", params)
- default:
- web.ErrorPage500(s.templates, writer, params, "Unknown object type, something is seriously wrong")
- }
-}
diff --git a/forged/internal/unsorted/http_handle_repo_upload_pack.go b/forged/internal/unsorted/http_handle_repo_upload_pack.go
deleted file mode 100644
index 914c9cc..0000000
--- a/forged/internal/unsorted/http_handle_repo_upload_pack.go
+++ /dev/null
@@ -1,120 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "bytes"
- "compress/gzip"
- "compress/zlib"
- "fmt"
- "io"
- "log"
- "net/http"
- "os"
- "os/exec"
- "strings"
-
- "github.com/jackc/pgx/v5/pgtype"
-)
-
-// httpHandleUploadPack handles incoming Git fetch/pull/clone's over the Smart
-// HTTP protocol.
-func (s *Server) httpHandleUploadPack(writer http.ResponseWriter, request *http.Request, params map[string]any) (err error) {
- if ct := request.Header.Get("Content-Type"); !strings.HasPrefix(ct, "application/x-git-upload-pack-request") {
- http.Error(writer, "bad content-type", http.StatusUnsupportedMediaType)
- return nil
- }
-
- decoded, err := decodeBody(request)
- if err != nil {
- http.Error(writer, "cannot decode request body", http.StatusBadRequest)
- return err
- }
- defer decoded.Close()
-
- var groupPath []string
- var repoName string
- var repoPath string
- var cmd *exec.Cmd
-
- groupPath, repoName = params["group_path"].([]string), params["repo_name"].(string)
-
- if err := s.database.QueryRow(request.Context(), `
- WITH RECURSIVE group_path_cte AS (
- -- Start: match the first name in the path where parent_group IS NULL
- SELECT
- id,
- parent_group,
- name,
- 1 AS depth
- FROM groups
- WHERE name = ($1::text[])[1]
- AND parent_group IS NULL
-
- UNION ALL
-
- -- Recurse: jion next segment of the path
- SELECT
- g.id,
- g.parent_group,
- g.name,
- group_path_cte.depth + 1
- FROM groups g
- JOIN group_path_cte ON g.parent_group = group_path_cte.id
- WHERE g.name = ($1::text[])[group_path_cte.depth + 1]
- AND group_path_cte.depth + 1 <= cardinality($1::text[])
- )
- SELECT r.filesystem_path
- FROM group_path_cte c
- JOIN repos r ON r.group_id = c.id
- WHERE c.depth = cardinality($1::text[])
- AND r.name = $2
- `,
- pgtype.FlatArray[string](groupPath),
- repoName,
- ).Scan(&repoPath); err != nil {
- return err
- }
-
- writer.Header().Set("Content-Type", "application/x-git-upload-pack-result")
- // writer.Header().Set("Connection", "Keep-Alive")
- // writer.Header().Set("Transfer-Encoding", "chunked")
-
- cmd = exec.CommandContext(request.Context(), "git", "upload-pack", "--stateless-rpc", repoPath)
- cmd.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+s.config.Hooks.Socket)
-
- var stderrBuf bytes.Buffer
- cmd.Stderr = &stderrBuf
-
- cmd.Stdout = writer
- cmd.Stdin = decoded
-
- if gp := request.Header.Get("Git-Protocol"); gp != "" {
- cmd.Env = append(cmd.Env, "GIT_PROTOCOL="+gp)
- }
-
- if err = cmd.Run(); err != nil {
- log.Println(stderrBuf.String())
- return err
- }
-
- return nil
-}
-
-func decodeBody(r *http.Request) (io.ReadCloser, error) {
- switch ce := strings.ToLower(strings.TrimSpace(r.Header.Get("Content-Encoding"))); ce {
- case "", "identity":
- return r.Body, nil
- case "gzip":
- zr, err := gzip.NewReader(r.Body)
- if err != nil { return nil, err }
- return zr, nil
- case "deflate":
- zr, err := zlib.NewReader(r.Body)
- if err != nil { return nil, err }
- return zr, nil
- default:
- return nil, fmt.Errorf("unsupported Content-Encoding: %q", ce)
- }
-}
diff --git a/forged/internal/unsorted/http_handle_users.go b/forged/internal/unsorted/http_handle_users.go
deleted file mode 100644
index b41ee44..0000000
--- a/forged/internal/unsorted/http_handle_users.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "net/http"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// httpHandleUsers is a useless stub.
-func (s *Server) httpHandleUsers(writer http.ResponseWriter, _ *http.Request, params map[string]any) {
- web.ErrorPage501(s.templates, writer, params)
-}
diff --git a/forged/internal/unsorted/http_server.go b/forged/internal/unsorted/http_server.go
deleted file mode 100644
index f6a1794..0000000
--- a/forged/internal/unsorted/http_server.go
+++ /dev/null
@@ -1,276 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "errors"
- "log/slog"
- "net/http"
- "net/url"
- "strconv"
- "strings"
-
- "github.com/jackc/pgx/v5"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
- "go.lindenii.runxiyu.org/forge/forged/internal/web"
-)
-
-// ServeHTTP handles all incoming HTTP requests and routes them to the correct
-// location.
-//
-// TODO: This function is way too large.
-func (s *Server) ServeHTTP(writer http.ResponseWriter, request *http.Request) {
- var remoteAddr string
- if s.config.HTTP.ReverseProxy {
- remoteAddrs, ok := request.Header["X-Forwarded-For"]
- if ok && len(remoteAddrs) == 1 {
- remoteAddr = remoteAddrs[0]
- } else {
- remoteAddr = request.RemoteAddr
- }
- } else {
- remoteAddr = request.RemoteAddr
- }
- slog.Info("incoming http", "addr", remoteAddr, "method", request.Method, "uri", request.RequestURI)
-
- var segments []string
- var err error
- var sepIndex int
- params := make(map[string]any)
-
- if segments, _, err = misc.ParseReqURI(request.RequestURI); err != nil {
- web.ErrorPage400(s.templates, writer, params, "Error parsing request URI: "+err.Error())
- return
- }
- dirMode := false
- if segments[len(segments)-1] == "" {
- dirMode = true
- segments = segments[:len(segments)-1]
- }
-
- params["url_segments"] = segments
- params["dir_mode"] = dirMode
- params["global"] = s.globalData
- var userID int // 0 for none
- userID, params["username"], err = s.getUserFromRequest(request)
- params["user_id"] = userID
- if err != nil && !errors.Is(err, http.ErrNoCookie) && !errors.Is(err, pgx.ErrNoRows) {
- web.ErrorPage500(s.templates, writer, params, "Error getting user info from request: "+err.Error())
- return
- }
-
- if userID == 0 {
- params["user_id_string"] = ""
- } else {
- params["user_id_string"] = strconv.Itoa(userID)
- }
-
- for _, v := range segments {
- if strings.Contains(v, ":") {
- web.ErrorPage400Colon(s.templates, writer, params)
- return
- }
- }
-
- if len(segments) == 0 {
- s.httpHandleIndex(writer, request, params)
- return
- }
-
- if segments[0] == "-" {
- if len(segments) < 2 {
- web.ErrorPage404(s.templates, writer, params)
- return
- } else if len(segments) == 2 && misc.RedirectDir(writer, request) {
- return
- }
-
- switch segments[1] {
- case "static":
- s.staticHandler.ServeHTTP(writer, request)
- return
- case "source":
- s.sourceHandler.ServeHTTP(writer, request)
- return
- }
- }
-
- if segments[0] == "-" {
- switch segments[1] {
- case "login":
- s.httpHandleLogin(writer, request, params)
- return
- case "users":
- s.httpHandleUsers(writer, request, params)
- return
- default:
- web.ErrorPage404(s.templates, writer, params)
- return
- }
- }
-
- sepIndex = -1
- for i, part := range segments {
- if part == "-" {
- sepIndex = i
- break
- }
- }
-
- params["separator_index"] = sepIndex
-
- var groupPath []string
- var moduleType string
- var moduleName string
-
- if sepIndex > 0 {
- groupPath = segments[:sepIndex]
- } else {
- groupPath = segments
- }
- params["group_path"] = groupPath
-
- switch {
- case sepIndex == -1:
- if misc.RedirectDir(writer, request) {
- return
- }
- s.httpHandleGroupIndex(writer, request, params)
- case len(segments) == sepIndex+1:
- web.ErrorPage404(s.templates, writer, params)
- return
- case len(segments) == sepIndex+2:
- web.ErrorPage404(s.templates, writer, params)
- return
- default:
- moduleType = segments[sepIndex+1]
- moduleName = segments[sepIndex+2]
- switch moduleType {
- case "repos":
- params["repo_name"] = moduleName
-
- if len(segments) > sepIndex+3 {
- switch segments[sepIndex+3] {
- case "info":
- if err = s.httpHandleRepoInfo(writer, request, params); err != nil {
- web.ErrorPage500(s.templates, writer, params, err.Error())
- }
- return
- case "git-upload-pack":
- if err = s.httpHandleUploadPack(writer, request, params); err != nil {
- web.ErrorPage500(s.templates, writer, params, err.Error())
- }
- return
- }
- }
-
- if params["ref_type"], params["ref_name"], err = misc.GetParamRefTypeName(request); err != nil {
- if errors.Is(err, misc.ErrNoRefSpec) {
- params["ref_type"] = ""
- } else {
- web.ErrorPage400(s.templates, writer, params, "Error querying ref type: "+err.Error())
- return
- }
- }
-
- if params["repo"], params["repo_description"], params["repo_id"], _, err = s.openRepo(request.Context(), groupPath, moduleName); err != nil {
- web.ErrorPage500(s.templates, writer, params, "Error opening repo: "+err.Error())
- return
- }
-
- repoURLRoot := "/"
- for _, part := range segments[:sepIndex+3] {
- repoURLRoot = repoURLRoot + url.PathEscape(part) + "/"
- }
- params["repo_url_root"] = repoURLRoot
- params["repo_patch_mailing_list"] = repoURLRoot[1:len(repoURLRoot)-1] + "@" + s.config.LMTP.Domain
- params["http_clone_url"] = s.genHTTPRemoteURL(groupPath, moduleName)
- params["ssh_clone_url"] = s.genSSHRemoteURL(groupPath, moduleName)
-
- if len(segments) == sepIndex+3 {
- if misc.RedirectDir(writer, request) {
- return
- }
- s.httpHandleRepoIndex(writer, request, params)
- return
- }
-
- repoFeature := segments[sepIndex+3]
- switch repoFeature {
- case "tree":
- if misc.AnyContain(segments[sepIndex+4:], "/") {
- web.ErrorPage400(s.templates, writer, params, "Repo tree paths may not contain slashes in any segments")
- return
- }
- if dirMode {
- params["rest"] = strings.Join(segments[sepIndex+4:], "/") + "/"
- } else {
- params["rest"] = strings.Join(segments[sepIndex+4:], "/")
- }
- if len(segments) < sepIndex+5 && misc.RedirectDir(writer, request) {
- return
- }
- s.httpHandleRepoTree(writer, request, params)
- case "branches":
- if misc.RedirectDir(writer, request) {
- return
- }
- s.httpHandleRepoBranches(writer, request, params)
- return
- case "raw":
- if misc.AnyContain(segments[sepIndex+4:], "/") {
- web.ErrorPage400(s.templates, writer, params, "Repo tree paths may not contain slashes in any segments")
- return
- }
- if dirMode {
- params["rest"] = strings.Join(segments[sepIndex+4:], "/") + "/"
- } else {
- params["rest"] = strings.Join(segments[sepIndex+4:], "/")
- }
- if len(segments) < sepIndex+5 && misc.RedirectDir(writer, request) {
- return
- }
- s.httpHandleRepoRaw(writer, request, params)
- case "log":
- if len(segments) > sepIndex+4 {
- web.ErrorPage400(s.templates, writer, params, "Too many parameters")
- return
- }
- if misc.RedirectDir(writer, request) {
- return
- }
- s.httpHandleRepoLog(writer, request, params)
- case "commit":
- if len(segments) != sepIndex+5 {
- web.ErrorPage400(s.templates, writer, params, "Incorrect number of parameters")
- return
- }
- if misc.RedirectNoDir(writer, request) {
- return
- }
- params["commit_id"] = segments[sepIndex+4]
- s.httpHandleRepoCommit(writer, request, params)
- case "contrib":
- if misc.RedirectDir(writer, request) {
- return
- }
- switch len(segments) {
- case sepIndex + 4:
- s.httpHandleRepoContribIndex(writer, request, params)
- case sepIndex + 5:
- params["mr_id"] = segments[sepIndex+4]
- s.httpHandleRepoContribOne(writer, request, params)
- default:
- web.ErrorPage400(s.templates, writer, params, "Too many parameters")
- }
- default:
- web.ErrorPage404(s.templates, writer, params)
- return
- }
- default:
- web.ErrorPage404(s.templates, writer, params)
- return
- }
- }
-}
diff --git a/forged/internal/unsorted/http_template.go b/forged/internal/unsorted/http_template.go
deleted file mode 100644
index db44e4c..0000000
--- a/forged/internal/unsorted/http_template.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "log/slog"
- "net/http"
-)
-
-// renderTemplate abstracts out the annoyances of reporting template rendering
-// errors.
-func (s *Server) renderTemplate(w http.ResponseWriter, templateName string, params map[string]any) {
- if err := s.templates.ExecuteTemplate(w, templateName, params); err != nil {
- http.Error(w, "error rendering template: "+err.Error(), http.StatusInternalServerError)
- slog.Error("error rendering template", "error", err.Error())
- }
-}
diff --git a/forged/internal/unsorted/lmtp_handle_patch.go b/forged/internal/unsorted/lmtp_handle_patch.go
deleted file mode 100644
index b258bfc..0000000
--- a/forged/internal/unsorted/lmtp_handle_patch.go
+++ /dev/null
@@ -1,133 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "bytes"
- "crypto/rand"
- "encoding/hex"
- "fmt"
- "io"
- "os"
- "os/exec"
- "strings"
- "time"
-
- "github.com/bluekeyes/go-gitdiff/gitdiff"
- "github.com/go-git/go-git/v5"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-func (s *Server) lmtpHandlePatch(session *lmtpSession, groupPath []string, repoName string, mbox io.Reader) (err error) {
- var diffFiles []*gitdiff.File
- var preamble string
- if diffFiles, preamble, err = gitdiff.Parse(mbox); err != nil {
- return fmt.Errorf("failed to parse patch: %w", err)
- }
-
- var header *gitdiff.PatchHeader
- if header, err = gitdiff.ParsePatchHeader(preamble); err != nil {
- return fmt.Errorf("failed to parse patch headers: %w", err)
- }
-
- var repo *git.Repository
- var fsPath string
- repo, _, _, fsPath, err = s.openRepo(session.ctx, groupPath, repoName)
- if err != nil {
- return fmt.Errorf("failed to open repo: %w", err)
- }
-
- headRef, err := repo.Head()
- if err != nil {
- return fmt.Errorf("failed to get repo head hash: %w", err)
- }
- headCommit, err := repo.CommitObject(headRef.Hash())
- if err != nil {
- return fmt.Errorf("failed to get repo head commit: %w", err)
- }
- headTree, err := headCommit.Tree()
- if err != nil {
- return fmt.Errorf("failed to get repo head tree: %w", err)
- }
-
- headTreeHash := headTree.Hash.String()
-
- blobUpdates := make(map[string][]byte)
- for _, diffFile := range diffFiles {
- sourceFile, err := headTree.File(diffFile.OldName)
- if err != nil {
- return fmt.Errorf("failed to get file at old name %#v: %w", diffFile.OldName, err)
- }
- sourceString, err := sourceFile.Contents()
- if err != nil {
- return fmt.Errorf("failed to get contents: %w", err)
- }
-
- sourceBuf := bytes.NewReader(misc.StringToBytes(sourceString))
- var patchedBuf bytes.Buffer
- if err := gitdiff.Apply(&patchedBuf, sourceBuf, diffFile); err != nil {
- return fmt.Errorf("failed to apply patch: %w", err)
- }
-
- var hashBuf bytes.Buffer
-
- // It's really difficult to do this via go-git so we're just
- // going to use upstream git for now.
- // TODO
- cmd := exec.CommandContext(session.ctx, "git", "hash-object", "-w", "-t", "blob", "--stdin")
- cmd.Env = append(os.Environ(), "GIT_DIR="+fsPath)
- cmd.Stdout = &hashBuf
- cmd.Stdin = &patchedBuf
- if err := cmd.Run(); err != nil {
- return fmt.Errorf("failed to run git hash-object: %w", err)
- }
-
- newHashStr := strings.TrimSpace(hashBuf.String())
- newHash, err := hex.DecodeString(newHashStr)
- if err != nil {
- return fmt.Errorf("failed to decode hex string from git: %w", err)
- }
-
- blobUpdates[diffFile.NewName] = newHash
- if diffFile.NewName != diffFile.OldName {
- blobUpdates[diffFile.OldName] = nil // Mark for deletion.
- }
- }
-
- newTreeSha, err := buildTreeRecursive(session.ctx, fsPath, headTreeHash, blobUpdates)
- if err != nil {
- return fmt.Errorf("failed to recursively build a tree: %w", err)
- }
-
- commitMsg := header.Title
- if header.Body != "" {
- commitMsg += "\n\n" + header.Body
- }
-
- env := append(os.Environ(),
- "GIT_DIR="+fsPath,
- "GIT_AUTHOR_NAME="+header.Author.Name,
- "GIT_AUTHOR_EMAIL="+header.Author.Email,
- "GIT_AUTHOR_DATE="+header.AuthorDate.Format(time.RFC3339),
- )
- commitCmd := exec.CommandContext(session.ctx, "git", "commit-tree", newTreeSha, "-p", headCommit.Hash.String(), "-m", commitMsg)
- commitCmd.Env = env
-
- var commitOut bytes.Buffer
- commitCmd.Stdout = &commitOut
- if err := commitCmd.Run(); err != nil {
- return fmt.Errorf("failed to commit tree: %w", err)
- }
- newCommitSha := strings.TrimSpace(commitOut.String())
-
- newBranchName := rand.Text()
-
- refCmd := exec.CommandContext(session.ctx, "git", "update-ref", "refs/heads/contrib/"+newBranchName, newCommitSha) //#nosec G204
- refCmd.Env = append(os.Environ(), "GIT_DIR="+fsPath)
- if err := refCmd.Run(); err != nil {
- return fmt.Errorf("failed to update ref: %w", err)
- }
-
- return nil
-}
diff --git a/forged/internal/unsorted/lmtp_server.go b/forged/internal/unsorted/lmtp_server.go
deleted file mode 100644
index 1e94894..0000000
--- a/forged/internal/unsorted/lmtp_server.go
+++ /dev/null
@@ -1,204 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-// SPDX-FileCopyrightText: Copyright (c) 2024 Robin Jarry <robin@jarry.cc>
-
-package unsorted
-
-import (
- "bytes"
- "context"
- "errors"
- "fmt"
- "io"
- "log/slog"
- "net"
- "strings"
- "time"
-
- "github.com/emersion/go-message"
- "github.com/emersion/go-smtp"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-type lmtpHandler struct{}
-
-type lmtpSession struct {
- from string
- to []string
- ctx context.Context
- cancel context.CancelFunc
- s Server
-}
-
-func (session *lmtpSession) Reset() {
- session.from = ""
- session.to = nil
-}
-
-func (session *lmtpSession) Logout() error {
- session.cancel()
- return nil
-}
-
-func (session *lmtpSession) AuthPlain(_, _ string) error {
- return nil
-}
-
-func (session *lmtpSession) Mail(from string, _ *smtp.MailOptions) error {
- session.from = from
- return nil
-}
-
-func (session *lmtpSession) Rcpt(to string, _ *smtp.RcptOptions) error {
- session.to = append(session.to, to)
- return nil
-}
-
-func (*lmtpHandler) NewSession(_ *smtp.Conn) (smtp.Session, error) {
- ctx, cancel := context.WithCancel(context.Background())
- session := &lmtpSession{
- ctx: ctx,
- cancel: cancel,
- }
- return session, nil
-}
-
-func (s *Server) serveLMTP(listener net.Listener) error {
- smtpServer := smtp.NewServer(&lmtpHandler{})
- smtpServer.LMTP = true
- smtpServer.Domain = s.config.LMTP.Domain
- smtpServer.Addr = s.config.LMTP.Socket
- smtpServer.WriteTimeout = time.Duration(s.config.LMTP.WriteTimeout) * time.Second
- smtpServer.ReadTimeout = time.Duration(s.config.LMTP.ReadTimeout) * time.Second
- smtpServer.EnableSMTPUTF8 = true
- return smtpServer.Serve(listener)
-}
-
-func (session *lmtpSession) Data(r io.Reader) error {
- var (
- email *message.Entity
- from string
- to []string
- err error
- buf bytes.Buffer
- data []byte
- n int64
- )
-
- n, err = io.CopyN(&buf, r, session.s.config.LMTP.MaxSize)
- switch {
- case n == session.s.config.LMTP.MaxSize:
- err = errors.New("Message too big.")
- // drain whatever is left in the pipe
- _, _ = io.Copy(io.Discard, r)
- goto end
- case errors.Is(err, io.EOF):
- // message was smaller than max size
- break
- case err != nil:
- goto end
- }
-
- data = buf.Bytes()
-
- email, err = message.Read(bytes.NewReader(data))
- if err != nil && message.IsUnknownCharset(err) {
- goto end
- }
-
- switch strings.ToLower(email.Header.Get("Auto-Submitted")) {
- case "auto-generated", "auto-replied":
- // Disregard automatic emails like OOO replies
- slog.Info("ignoring automatic message",
- "from", session.from,
- "to", strings.Join(session.to, ","),
- "message-id", email.Header.Get("Message-Id"),
- "subject", email.Header.Get("Subject"),
- )
- goto end
- }
-
- slog.Info("message received",
- "from", session.from,
- "to", strings.Join(session.to, ","),
- "message-id", email.Header.Get("Message-Id"),
- "subject", email.Header.Get("Subject"),
- )
-
- // Make local copies of the values before to ensure the references will
- // still be valid when the task is run.
- from = session.from
- to = session.to
-
- _ = from
-
- for _, to := range to {
- if !strings.HasSuffix(to, "@"+session.s.config.LMTP.Domain) {
- continue
- }
- localPart := to[:len(to)-len("@"+session.s.config.LMTP.Domain)]
- var segments []string
- segments, err = misc.PathToSegments(localPart)
- if err != nil {
- // TODO: Should the entire email fail or should we just
- // notify them out of band?
- err = fmt.Errorf("cannot parse path: %w", err)
- goto end
- }
- sepIndex := -1
- for i, part := range segments {
- if part == "-" {
- sepIndex = i
- break
- }
- }
- if segments[len(segments)-1] == "" {
- segments = segments[:len(segments)-1] // We don't care about dir or not.
- }
- if sepIndex == -1 || len(segments) <= sepIndex+2 {
- err = errors.New("illegal path")
- goto end
- }
-
- mbox := bytes.Buffer{}
- if _, err = fmt.Fprint(&mbox, "From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001\r\n"); err != nil {
- slog.Error("error handling patch... malloc???", "error", err)
- goto end
- }
- data = bytes.ReplaceAll(data, []byte("\r\n"), []byte("\n"))
- if _, err = mbox.Write(data); err != nil {
- slog.Error("error handling patch... malloc???", "error", err)
- goto end
- }
- // TODO: Is mbox's From escaping necessary here?
-
- groupPath := segments[:sepIndex]
- moduleType := segments[sepIndex+1]
- moduleName := segments[sepIndex+2]
- switch moduleType {
- case "repos":
- err = session.s.lmtpHandlePatch(session, groupPath, moduleName, &mbox)
- if err != nil {
- slog.Error("error handling patch", "error", err)
- goto end
- }
- default:
- err = errors.New("Emailing any endpoint other than repositories, is not supported yet.") // TODO
- goto end
- }
- }
-
-end:
- session.to = nil
- session.from = ""
- switch err {
- case nil:
- return nil
- default:
- return &smtp.SMTPError{
- Code: 550,
- Message: "Permanent failure: " + err.Error(),
- EnhancedCode: [3]int{5, 7, 1},
- }
- }
-}
diff --git a/forged/internal/unsorted/remote_url.go b/forged/internal/unsorted/remote_url.go
deleted file mode 100644
index f4d4c58..0000000
--- a/forged/internal/unsorted/remote_url.go
+++ /dev/null
@@ -1,25 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "net/url"
- "strings"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-// We don't use path.Join because it collapses multiple slashes into one.
-
-// genSSHRemoteURL generates SSH remote URLs from a given group path and repo
-// name.
-func (s *Server) genSSHRemoteURL(groupPath []string, repoName string) string {
- return strings.TrimSuffix(s.config.SSH.Root, "/") + "/" + misc.SegmentsToURL(groupPath) + "/-/repos/" + url.PathEscape(repoName)
-}
-
-// genHTTPRemoteURL generates HTTP remote URLs from a given group path and repo
-// name.
-func (s *Server) genHTTPRemoteURL(groupPath []string, repoName string) string {
- return strings.TrimSuffix(s.config.HTTP.Root, "/") + "/" + misc.SegmentsToURL(groupPath) + "/-/repos/" + url.PathEscape(repoName)
-}
diff --git a/forged/internal/unsorted/resources.go b/forged/internal/unsorted/resources.go
deleted file mode 100644
index 692b454..0000000
--- a/forged/internal/unsorted/resources.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "html/template"
- "io/fs"
-
- "github.com/tdewolff/minify/v2"
- "github.com/tdewolff/minify/v2/html"
- "go.lindenii.runxiyu.org/forge/forged/internal/embed"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-// loadTemplates minifies and loads HTML templates.
-func (s *Server) loadTemplates() (err error) {
- minifier := minify.New()
- minifierOptions := html.Minifier{
- TemplateDelims: [2]string{"{{", "}}"},
- KeepDefaultAttrVals: true,
- } //exhaustruct:ignore
- minifier.Add("text/html", &minifierOptions)
-
- s.templates = template.New("templates").Funcs(template.FuncMap{
- "first_line": misc.FirstLine,
- "path_escape": misc.PathEscape,
- "query_escape": misc.QueryEscape,
- "dereference_error": misc.DereferenceOrZero[error],
- "minus": misc.Minus,
- })
-
- err = fs.WalkDir(embed.Resources, "forged/templates", func(path string, d fs.DirEntry, err error) error {
- if err != nil {
- return err
- }
- if !d.IsDir() {
- content, err := fs.ReadFile(embed.Resources, path)
- if err != nil {
- return err
- }
-
- minified, err := minifier.Bytes("text/html", content)
- if err != nil {
- return err
- }
-
- _, err = s.templates.Parse(misc.BytesToString(minified))
- if err != nil {
- return err
- }
- }
- return nil
- })
- return err
-}
diff --git a/forged/internal/unsorted/server.go b/forged/internal/unsorted/server.go
deleted file mode 100644
index 84379b0..0000000
--- a/forged/internal/unsorted/server.go
+++ /dev/null
@@ -1,236 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "errors"
- "html/template"
- "io/fs"
- "log"
- "log/slog"
- "net"
- "net/http"
- _ "net/http/pprof"
- "os"
- "os/exec"
- "path/filepath"
- "syscall"
- "time"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/cmap"
- "go.lindenii.runxiyu.org/forge/forged/internal/database"
- "go.lindenii.runxiyu.org/forge/forged/internal/embed"
- "go.lindenii.runxiyu.org/forge/forged/internal/irc"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
- goSSH "golang.org/x/crypto/ssh"
-)
-
-type Server struct {
- config Config
-
- database database.Database
-
- sourceHandler http.Handler
- staticHandler http.Handler
-
- // globalData is passed as "global" when rendering HTML templates.
- globalData map[string]any
-
- serverPubkeyString string
- serverPubkeyFP string
- serverPubkey goSSH.PublicKey
-
- // packPasses contains hook cookies mapped to their packPass.
- packPasses cmap.Map[string, packPass]
-
- templates *template.Template
-
- ircBot *irc.Bot
-
- ready bool
-}
-
-func NewServer(configPath string) (*Server, error) {
- s := &Server{
- globalData: make(map[string]any),
- } //exhaustruct:ignore
-
- s.sourceHandler = http.StripPrefix(
- "/-/source/",
- http.FileServer(http.FS(embed.Source)),
- )
- staticFS, err := fs.Sub(embed.Resources, "forged/static")
- if err != nil {
- return s, err
- }
- s.staticHandler = http.StripPrefix("/-/static/", http.FileServer(http.FS(staticFS)))
- s.globalData = map[string]any{
- "server_public_key_string": &s.serverPubkeyString,
- "server_public_key_fingerprint": &s.serverPubkeyFP,
- "forge_version": version,
- // Some other ones are populated after config parsing
- }
-
- if err := s.loadConfig(configPath); err != nil {
- return s, err
- }
-
- misc.NoneOrPanic(s.loadTemplates())
- misc.NoneOrPanic(misc.DeployBinary(misc.FirstOrPanic(embed.Resources.Open("git2d/git2d")), s.config.Git.DaemonPath))
- misc.NoneOrPanic(misc.DeployBinary(misc.FirstOrPanic(embed.Resources.Open("hookc/hookc")), filepath.Join(s.config.Hooks.Execs, "pre-receive")))
- misc.NoneOrPanic(os.Chmod(filepath.Join(s.config.Hooks.Execs, "pre-receive"), 0o755))
-
- s.ready = true
-
- return s, nil
-}
-
-func (s *Server) Run() error {
- if !s.ready {
- return errors.New("not ready")
- }
-
- // Launch Git2D
- go func() {
- cmd := exec.Command(s.config.Git.DaemonPath, s.config.Git.Socket) //#nosec G204
- cmd.Stderr = log.Writer()
- cmd.Stdout = log.Writer()
- if err := cmd.Run(); err != nil {
- panic(err)
- }
- }()
-
- // UNIX socket listener for hooks
- {
- hooksListener, err := net.Listen("unix", s.config.Hooks.Socket)
- if errors.Is(err, syscall.EADDRINUSE) {
- slog.Warn("removing existing socket", "path", s.config.Hooks.Socket)
- if err = syscall.Unlink(s.config.Hooks.Socket); err != nil {
- slog.Error("removing existing socket", "path", s.config.Hooks.Socket, "error", err)
- os.Exit(1)
- }
- if hooksListener, err = net.Listen("unix", s.config.Hooks.Socket); err != nil {
- slog.Error("listening hooks", "error", err)
- os.Exit(1)
- }
- } else if err != nil {
- slog.Error("listening hooks", "error", err)
- os.Exit(1)
- }
- slog.Info("listening hooks on unix", "path", s.config.Hooks.Socket)
- go func() {
- if err = s.serveGitHooks(hooksListener); err != nil {
- slog.Error("serving hooks", "error", err)
- os.Exit(1)
- }
- }()
- }
-
- // UNIX socket listener for LMTP
- {
- lmtpListener, err := net.Listen("unix", s.config.LMTP.Socket)
- if errors.Is(err, syscall.EADDRINUSE) {
- slog.Warn("removing existing socket", "path", s.config.LMTP.Socket)
- if err = syscall.Unlink(s.config.LMTP.Socket); err != nil {
- slog.Error("removing existing socket", "path", s.config.LMTP.Socket, "error", err)
- os.Exit(1)
- }
- if lmtpListener, err = net.Listen("unix", s.config.LMTP.Socket); err != nil {
- slog.Error("listening LMTP", "error", err)
- os.Exit(1)
- }
- } else if err != nil {
- slog.Error("listening LMTP", "error", err)
- os.Exit(1)
- }
- slog.Info("listening LMTP on unix", "path", s.config.LMTP.Socket)
- go func() {
- if err = s.serveLMTP(lmtpListener); err != nil {
- slog.Error("serving LMTP", "error", err)
- os.Exit(1)
- }
- }()
- }
-
- // SSH listener
- {
- sshListener, err := net.Listen(s.config.SSH.Net, s.config.SSH.Addr)
- if errors.Is(err, syscall.EADDRINUSE) && s.config.SSH.Net == "unix" {
- slog.Warn("removing existing socket", "path", s.config.SSH.Addr)
- if err = syscall.Unlink(s.config.SSH.Addr); err != nil {
- slog.Error("removing existing socket", "path", s.config.SSH.Addr, "error", err)
- os.Exit(1)
- }
- if sshListener, err = net.Listen(s.config.SSH.Net, s.config.SSH.Addr); err != nil {
- slog.Error("listening SSH", "error", err)
- os.Exit(1)
- }
- } else if err != nil {
- slog.Error("listening SSH", "error", err)
- os.Exit(1)
- }
- slog.Info("listening SSH on", "net", s.config.SSH.Net, "addr", s.config.SSH.Addr)
- go func() {
- if err = s.serveSSH(sshListener); err != nil {
- slog.Error("serving SSH", "error", err)
- os.Exit(1)
- }
- }()
- }
-
- // HTTP listener
- {
- httpListener, err := net.Listen(s.config.HTTP.Net, s.config.HTTP.Addr)
- if errors.Is(err, syscall.EADDRINUSE) && s.config.HTTP.Net == "unix" {
- slog.Warn("removing existing socket", "path", s.config.HTTP.Addr)
- if err = syscall.Unlink(s.config.HTTP.Addr); err != nil {
- slog.Error("removing existing socket", "path", s.config.HTTP.Addr, "error", err)
- os.Exit(1)
- }
- if httpListener, err = net.Listen(s.config.HTTP.Net, s.config.HTTP.Addr); err != nil {
- slog.Error("listening HTTP", "error", err)
- os.Exit(1)
- }
- } else if err != nil {
- slog.Error("listening HTTP", "error", err)
- os.Exit(1)
- }
- server := http.Server{
- Handler: s,
- ReadTimeout: time.Duration(s.config.HTTP.ReadTimeout) * time.Second,
- WriteTimeout: time.Duration(s.config.HTTP.ReadTimeout) * time.Second,
- IdleTimeout: time.Duration(s.config.HTTP.ReadTimeout) * time.Second,
- } //exhaustruct:ignore
- slog.Info("listening HTTP on", "net", s.config.HTTP.Net, "addr", s.config.HTTP.Addr)
- go func() {
- if err = server.Serve(httpListener); err != nil && !errors.Is(err, http.ErrServerClosed) {
- slog.Error("serving HTTP", "error", err)
- os.Exit(1)
- }
- }()
- }
-
- // Pprof listener
- {
- pprofListener, err := net.Listen(s.config.Pprof.Net, s.config.Pprof.Addr)
- if err != nil {
- slog.Error("listening pprof", "error", err)
- os.Exit(1)
- }
-
- slog.Info("listening pprof on", "net", s.config.Pprof.Net, "addr", s.config.Pprof.Addr)
- go func() {
- if err := http.Serve(pprofListener, nil); err != nil {
- slog.Error("serving pprof", "error", err)
- os.Exit(1)
- }
- }()
- }
-
- s.ircBot = irc.NewBot(&s.config.IRC)
- // IRC bot
- go s.ircBot.ConnectLoop()
-
- select {}
-}
diff --git a/forged/internal/unsorted/ssh_handle_receive_pack.go b/forged/internal/unsorted/ssh_handle_receive_pack.go
deleted file mode 100644
index a354273..0000000
--- a/forged/internal/unsorted/ssh_handle_receive_pack.go
+++ /dev/null
@@ -1,131 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "errors"
- "fmt"
- "os"
- "os/exec"
-
- gliderSSH "github.com/gliderlabs/ssh"
- "github.com/go-git/go-git/v5"
-)
-
-// packPass contains information known when handling incoming SSH connections
-// that then needs to be used in hook socket connection handlers. See hookc(1).
-type packPass struct {
- session gliderSSH.Session
- repo *git.Repository
- pubkey string
- directAccess bool
- repoPath string
- userID int
- userType string
- repoID int
- groupPath []string
- repoName string
- contribReq string
-}
-
-// sshHandleRecvPack handles attempts to push to repos.
-func (s *Server) sshHandleRecvPack(session gliderSSH.Session, pubkey, repoIdentifier string) (err error) {
- groupPath, repoName, repoID, repoPath, directAccess, contribReq, userType, userID, err := s.getRepoInfo2(session.Context(), repoIdentifier, pubkey)
- if err != nil {
- return err
- }
- repo, err := git.PlainOpen(repoPath)
- if err != nil {
- return err
- }
-
- repoConf, err := repo.Config()
- if err != nil {
- return err
- }
-
- repoConfCore := repoConf.Raw.Section("core")
- if repoConfCore == nil {
- return errors.New("repository has no core section in config")
- }
-
- hooksPath := repoConfCore.OptionAll("hooksPath")
- if len(hooksPath) != 1 || hooksPath[0] != s.config.Hooks.Execs {
- return errors.New("repository has hooksPath set to an unexpected value")
- }
-
- if !directAccess {
- switch contribReq {
- case "closed":
- if !directAccess {
- return errors.New("you need direct access to push to this repo")
- }
- case "registered_user":
- if userType != "registered" {
- return errors.New("you need to be a registered user to push to this repo")
- }
- case "ssh_pubkey":
- fallthrough
- case "federated":
- if pubkey == "" {
- return errors.New("you need to have an SSH public key to push to this repo")
- }
- if userType == "" {
- userID, err = s.addUserSSH(session.Context(), pubkey)
- if err != nil {
- return err
- }
- fmt.Fprintln(session.Stderr(), "you are now registered as user ID", userID)
- userType = "pubkey_only"
- }
-
- case "public":
- default:
- panic("unknown contrib_requirements value " + contribReq)
- }
- }
-
- cookie, err := randomUrlsafeStr(16)
- if err != nil {
- fmt.Fprintln(session.Stderr(), "Error while generating cookie:", err)
- }
-
- s.packPasses.Store(cookie, packPass{
- session: session,
- pubkey: pubkey,
- directAccess: directAccess,
- repoPath: repoPath,
- userID: userID,
- repoID: repoID,
- groupPath: groupPath,
- repoName: repoName,
- repo: repo,
- contribReq: contribReq,
- userType: userType,
- })
- defer s.packPasses.Delete(cookie)
- // The Delete won't execute until proc.Wait returns unless something
- // horribly wrong such as a panic occurs.
-
- proc := exec.CommandContext(session.Context(), "git-receive-pack", repoPath)
- proc.Env = append(os.Environ(),
- "LINDENII_FORGE_HOOKS_SOCKET_PATH="+s.config.Hooks.Socket,
- "LINDENII_FORGE_HOOKS_COOKIE="+cookie,
- )
- proc.Stdin = session
- proc.Stdout = session
- proc.Stderr = session.Stderr()
-
- if err = proc.Start(); err != nil {
- fmt.Fprintln(session.Stderr(), "Error while starting process:", err)
- return err
- }
-
- err = proc.Wait()
- if err != nil {
- fmt.Fprintln(session.Stderr(), "Error while waiting for process:", err)
- }
-
- return err
-}
diff --git a/forged/internal/unsorted/ssh_handle_upload_pack.go b/forged/internal/unsorted/ssh_handle_upload_pack.go
deleted file mode 100644
index 735a053..0000000
--- a/forged/internal/unsorted/ssh_handle_upload_pack.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "fmt"
- "os"
- "os/exec"
-
- glider_ssh "github.com/gliderlabs/ssh"
-)
-
-// sshHandleUploadPack handles clones/fetches. It just uses git-upload-pack
-// and has no ACL checks.
-func (s *Server) sshHandleUploadPack(session glider_ssh.Session, pubkey, repoIdentifier string) (err error) {
- var repoPath string
- if _, _, _, repoPath, _, _, _, _, err = s.getRepoInfo2(session.Context(), repoIdentifier, pubkey); err != nil {
- return err
- }
-
- proc := exec.CommandContext(session.Context(), "git-upload-pack", repoPath)
- proc.Env = append(os.Environ(), "LINDENII_FORGE_HOOKS_SOCKET_PATH="+s.config.Hooks.Socket)
- proc.Stdin = session
- proc.Stdout = session
- proc.Stderr = session.Stderr()
-
- if err = proc.Start(); err != nil {
- fmt.Fprintln(session.Stderr(), "Error while starting process:", err)
- return err
- }
-
- err = proc.Wait()
- if err != nil {
- fmt.Fprintln(session.Stderr(), "Error while waiting for process:", err)
- }
-
- return err
-}
diff --git a/forged/internal/unsorted/ssh_server.go b/forged/internal/unsorted/ssh_server.go
deleted file mode 100644
index 43cc0c4..0000000
--- a/forged/internal/unsorted/ssh_server.go
+++ /dev/null
@@ -1,96 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "fmt"
- "log/slog"
- "net"
- "os"
- "strings"
-
- gliderSSH "github.com/gliderlabs/ssh"
- "go.lindenii.runxiyu.org/forge/forged/internal/ansiec"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
- goSSH "golang.org/x/crypto/ssh"
-)
-
-// serveSSH serves SSH on a [net.Listener]. The listener should generally be a
-// TCP listener, although AF_UNIX SOCK_STREAM listeners may be appropriate in
-// rare cases.
-func (s *Server) serveSSH(listener net.Listener) error {
- var hostKeyBytes []byte
- var hostKey goSSH.Signer
- var err error
- var server *gliderSSH.Server
-
- if hostKeyBytes, err = os.ReadFile(s.config.SSH.Key); err != nil {
- return err
- }
-
- if hostKey, err = goSSH.ParsePrivateKey(hostKeyBytes); err != nil {
- return err
- }
-
- s.serverPubkey = hostKey.PublicKey()
- s.serverPubkeyString = misc.BytesToString(goSSH.MarshalAuthorizedKey(s.serverPubkey))
- s.serverPubkeyFP = goSSH.FingerprintSHA256(s.serverPubkey)
-
- server = &gliderSSH.Server{
- Handler: func(session gliderSSH.Session) {
- clientPubkey := session.PublicKey()
- var clientPubkeyStr string
- if clientPubkey != nil {
- clientPubkeyStr = strings.TrimSuffix(misc.BytesToString(goSSH.MarshalAuthorizedKey(clientPubkey)), "\n")
- }
-
- slog.Info("incoming ssh", "addr", session.RemoteAddr().String(), "key", clientPubkeyStr, "command", session.RawCommand())
- fmt.Fprintln(session.Stderr(), ansiec.Blue+"Lindenii Forge "+version+", source at "+strings.TrimSuffix(s.config.HTTP.Root, "/")+"/-/source/"+ansiec.Reset+"\r")
-
- cmd := session.Command()
-
- if len(cmd) < 2 {
- fmt.Fprintln(session.Stderr(), "Insufficient arguments\r")
- return
- }
-
- switch cmd[0] {
- case "git-upload-pack":
- if len(cmd) > 2 {
- fmt.Fprintln(session.Stderr(), "Too many arguments\r")
- return
- }
- err = s.sshHandleUploadPack(session, clientPubkeyStr, cmd[1])
- case "git-receive-pack":
- if len(cmd) > 2 {
- fmt.Fprintln(session.Stderr(), "Too many arguments\r")
- return
- }
- err = s.sshHandleRecvPack(session, clientPubkeyStr, cmd[1])
- default:
- fmt.Fprintln(session.Stderr(), "Unsupported command: "+cmd[0]+"\r")
- return
- }
- if err != nil {
- fmt.Fprintln(session.Stderr(), err.Error())
- return
- }
- },
- PublicKeyHandler: func(_ gliderSSH.Context, _ gliderSSH.PublicKey) bool { return true },
- KeyboardInteractiveHandler: func(_ gliderSSH.Context, _ goSSH.KeyboardInteractiveChallenge) bool { return true },
- // It is intentional that we do not check any credentials and accept all connections.
- // This allows all users to connect and clone repositories. However, the public key
- // is passed to handlers, so e.g. the push handler could check the key and reject the
- // push if it needs to.
- } //exhaustruct:ignore
-
- server.AddHostKey(hostKey)
-
- if err = server.Serve(listener); err != nil {
- slog.Error("error serving SSH", "error", err.Error())
- os.Exit(1)
- }
-
- return nil
-}
diff --git a/forged/internal/unsorted/ssh_utils.go b/forged/internal/unsorted/ssh_utils.go
deleted file mode 100644
index 6f50a87..0000000
--- a/forged/internal/unsorted/ssh_utils.go
+++ /dev/null
@@ -1,79 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "context"
- "errors"
- "fmt"
- "io"
- "net/url"
-
- "go.lindenii.runxiyu.org/forge/forged/internal/ansiec"
- "go.lindenii.runxiyu.org/forge/forged/internal/misc"
-)
-
-var errIllegalSSHRepoPath = errors.New("illegal SSH repo path")
-
-// getRepoInfo2 also fetches repo information... it should be deprecated and
-// implemented in individual handlers.
-func (s *Server) getRepoInfo2(ctx context.Context, sshPath, sshPubkey string) (groupPath []string, repoName string, repoID int, repoPath string, directAccess bool, contribReq, userType string, userID int, err error) {
- var segments []string
- var sepIndex int
- var moduleType, moduleName string
-
- segments, err = misc.PathToSegments(sshPath)
- if err != nil {
- return
- }
-
- for i, segment := range segments {
- var err error
- segments[i], err = url.PathUnescape(segment)
- if err != nil {
- return []string{}, "", 0, "", false, "", "", 0, err
- }
- }
-
- if segments[0] == "-" {
- return []string{}, "", 0, "", false, "", "", 0, errIllegalSSHRepoPath
- }
-
- sepIndex = -1
- for i, part := range segments {
- if part == "-" {
- sepIndex = i
- break
- }
- }
- if segments[len(segments)-1] == "" {
- segments = segments[:len(segments)-1]
- }
-
- switch {
- case sepIndex == -1:
- return []string{}, "", 0, "", false, "", "", 0, errIllegalSSHRepoPath
- case len(segments) <= sepIndex+2:
- return []string{}, "", 0, "", false, "", "", 0, errIllegalSSHRepoPath
- }
-
- groupPath = segments[:sepIndex]
- moduleType = segments[sepIndex+1]
- moduleName = segments[sepIndex+2]
- repoName = moduleName
- switch moduleType {
- case "repos":
- _1, _2, _3, _4, _5, _6, _7 := s.getRepoInfo(ctx, groupPath, moduleName, sshPubkey)
- return groupPath, repoName, _1, _2, _3, _4, _5, _6, _7
- default:
- return []string{}, "", 0, "", false, "", "", 0, errIllegalSSHRepoPath
- }
-}
-
-// writeRedError is a helper function that basically does a Fprintf but makes
-// the entire thing red, in terms of ANSI escape sequences. It's useful when
-// producing error messages on SSH connections.
-func writeRedError(w io.Writer, format string, args ...any) {
- fmt.Fprintln(w, ansiec.Red+fmt.Sprintf(format, args...)+ansiec.Reset)
-}
diff --git a/forged/internal/unsorted/unsorted.go b/forged/internal/unsorted/unsorted.go
deleted file mode 100644
index f26b0e4..0000000
--- a/forged/internal/unsorted/unsorted.go
+++ /dev/null
@@ -1,5 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-// Package unsorted is where unsorted Go files from the old structure are kept.
-package unsorted
diff --git a/forged/internal/unsorted/users.go b/forged/internal/unsorted/users.go
deleted file mode 100644
index 0f72eed..0000000
--- a/forged/internal/unsorted/users.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-import (
- "context"
-
- "github.com/jackc/pgx/v5"
-)
-
-// addUserSSH adds a new user solely based on their SSH public key.
-//
-// TODO: Audit all users of this function.
-func (s *Server) addUserSSH(ctx context.Context, pubkey string) (userID int, err error) {
- var txn pgx.Tx
-
- if txn, err = s.database.Begin(ctx); err != nil {
- return
- }
- defer func() {
- _ = txn.Rollback(ctx)
- }()
-
- if err = txn.QueryRow(ctx, `INSERT INTO users (type) VALUES ('pubkey_only') RETURNING id`).Scan(&userID); err != nil {
- return
- }
-
- if _, err = txn.Exec(ctx, `INSERT INTO ssh_public_keys (key_string, user_id) VALUES ($1, $2)`, pubkey, userID); err != nil {
- return
- }
-
- err = txn.Commit(ctx)
- return
-}
diff --git a/forged/internal/unsorted/version.go b/forged/internal/unsorted/version.go
deleted file mode 100644
index 52c0f32..0000000
--- a/forged/internal/unsorted/version.go
+++ /dev/null
@@ -1,6 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package unsorted
-
-var version = "unknown"
diff --git a/forged/internal/web/error_pages.go b/forged/internal/web/error_pages.go
deleted file mode 100644
index 2ba9a1a..0000000
--- a/forged/internal/web/error_pages.go
+++ /dev/null
@@ -1,60 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-package web
-
-import (
- "html/template"
- "net/http"
-)
-
-// ErrorPage404 renders a 404 Not Found error page using the "404" template.
-func ErrorPage404(templates *template.Template, w http.ResponseWriter, params map[string]any) {
- w.WriteHeader(http.StatusNotFound)
- _ = templates.ExecuteTemplate(w, "404", params)
-}
-
-// ErrorPage400 renders a 400 Bad Request error page using the "400" template.
-// The error message is passed via the "complete_error_msg" template param.
-func ErrorPage400(templates *template.Template, w http.ResponseWriter, params map[string]any, msg string) {
- w.WriteHeader(http.StatusBadRequest)
- params["complete_error_msg"] = msg
- _ = templates.ExecuteTemplate(w, "400", params)
-}
-
-// ErrorPage400Colon renders a 400 Bad Request error page telling the user
-// that we migrated from : to -.
-func ErrorPage400Colon(templates *template.Template, w http.ResponseWriter, params map[string]any) {
- w.WriteHeader(http.StatusBadRequest)
- _ = templates.ExecuteTemplate(w, "400_colon", params)
-}
-
-// ErrorPage403 renders a 403 Forbidden error page using the "403" template.
-// The error message is passed via the "complete_error_msg" template param.
-func ErrorPage403(templates *template.Template, w http.ResponseWriter, params map[string]any, msg string) {
- w.WriteHeader(http.StatusForbidden)
- params["complete_error_msg"] = msg
- _ = templates.ExecuteTemplate(w, "403", params)
-}
-
-// ErrorPage451 renders a 451 Unavailable For Legal Reasons error page using the "451" template.
-// The error message is passed via the "complete_error_msg" template param.
-func ErrorPage451(templates *template.Template, w http.ResponseWriter, params map[string]any, msg string) {
- w.WriteHeader(http.StatusUnavailableForLegalReasons)
- params["complete_error_msg"] = msg
- _ = templates.ExecuteTemplate(w, "451", params)
-}
-
-// ErrorPage500 renders a 500 Internal Server Error page using the "500" template.
-// The error message is passed via the "complete_error_msg" template param.
-func ErrorPage500(templates *template.Template, w http.ResponseWriter, params map[string]any, msg string) {
- w.WriteHeader(http.StatusInternalServerError)
- params["complete_error_msg"] = msg
- _ = templates.ExecuteTemplate(w, "500", params)
-}
-
-// ErrorPage501 renders a 501 Not Implemented error page using the "501" template.
-func ErrorPage501(templates *template.Template, w http.ResponseWriter, params map[string]any) {
- w.WriteHeader(http.StatusNotImplemented)
- _ = templates.ExecuteTemplate(w, "501", params)
-}
diff --git a/forged/internal/web/web.go b/forged/internal/web/web.go
deleted file mode 100644
index f4d15f8..0000000
--- a/forged/internal/web/web.go
+++ /dev/null
@@ -1,5 +0,0 @@
-// SPDX-License-Identifier: AGPL-3.0-only
-// SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-// Package web provides web-facing components of the forge.
-package web
diff --git a/forged/main.go b/forged/main.go
index fde15d1..38e22ff 100644
--- a/forged/main.go
+++ b/forged/main.go
@@ -5,9 +5,10 @@
package main
import (
+ "context"
"flag"
- "go.lindenii.runxiyu.org/forge/forged/internal/unsorted"
+ "go.lindenii.runxiyu.org/forge/forged/internal/server"
)
func main() {
@@ -18,10 +19,10 @@ func main() {
)
flag.Parse()
- s, err := unsorted.NewServer(*configPath)
+ s, err := server.New(*configPath)
if err != nil {
panic(err)
}
- panic(s.Run())
+ panic(s.Run(context.Background()))
}
diff --git a/forged/sql/queries/groups.sql b/forged/sql/queries/groups.sql
new file mode 100644
index 0000000..f067aeb
--- /dev/null
+++ b/forged/sql/queries/groups.sql
@@ -0,0 +1,47 @@
+-- name: GetRootGroups :many
+SELECT name, COALESCE(description, '') FROM groups WHERE parent_group IS NULL;
+
+-- name: GetGroupByPath :one
+WITH RECURSIVE group_path_cte AS (
+ SELECT
+ id,
+ parent_group,
+ name,
+ 1 AS depth
+ FROM groups
+ WHERE name = ($1::text[])[1]
+ AND parent_group IS NULL
+
+ UNION ALL
+
+ SELECT
+ g.id,
+ g.parent_group,
+ g.name,
+ group_path_cte.depth + 1
+ FROM groups g
+ JOIN group_path_cte ON g.parent_group = group_path_cte.id
+ WHERE g.name = ($1::text[])[group_path_cte.depth + 1]
+ AND group_path_cte.depth + 1 <= cardinality($1::text[])
+)
+SELECT
+ g.id,
+ g.name,
+ g.parent_group,
+ COALESCE(g.description, '') AS description,
+ EXISTS (
+ SELECT 1
+ FROM user_group_roles ugr
+ WHERE ugr.user_id = $2
+ AND ugr.group_id = g.id
+ ) AS has_role
+FROM group_path_cte c
+JOIN groups g ON g.id = c.id
+WHERE c.depth = cardinality($1::text[]);
+
+
+-- name: GetReposInGroup :many
+SELECT name, COALESCE(description, '') FROM repos WHERE group_id = $1;
+
+-- name: GetSubgroups :many
+SELECT name, COALESCE(description, '') FROM groups WHERE parent_group = $1;
diff --git a/forged/sql/queries/login.sql b/forged/sql/queries/login.sql
new file mode 100644
index 0000000..ffc4026
--- /dev/null
+++ b/forged/sql/queries/login.sql
@@ -0,0 +1,8 @@
+-- name: GetUserCreds :one
+SELECT id, COALESCE(password_hash, '') FROM users WHERE username = $1;
+
+-- name: InsertSession :exec
+INSERT INTO sessions (user_id, token_hash, expires_at) VALUES ($1, $2, $3);
+
+-- name: GetUserFromSession :one
+SELECT user_id, COALESCE(username, '') FROM users u JOIN sessions s ON u.id = s.user_id WHERE s.token_hash = $1;
diff --git a/forged/sql/queries/repos.sql b/forged/sql/queries/repos.sql
new file mode 100644
index 0000000..cacc5b8
--- /dev/null
+++ b/forged/sql/queries/repos.sql
@@ -0,0 +1,9 @@
+-- name: InsertRepo :one
+INSERT INTO repos (group_id, name, description, contrib_requirements)
+VALUES ($1, $2, $3, $4)
+RETURNING id;
+
+-- name: GetRepoByGroupAndName :one
+SELECT id, name, COALESCE(description, '') AS description
+FROM repos
+WHERE group_id = $1 AND name = $2;
diff --git a/forged/sql/schema.sql b/forged/sql/schema.sql
new file mode 100644
index 0000000..72327a9
--- /dev/null
+++ b/forged/sql/schema.sql
@@ -0,0 +1,221 @@
+-- SPDX-License-Identifier: AGPL-3.0-only
+-- SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+
+-- Currently, slugs accept arbitrary unicode text. We should
+-- look into normalization options later.
+-- May consider using citext and limiting it to safe characters.
+
+CREATE TABLE groups (
+ id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ name TEXT NOT NULL,
+ parent_group BIGINT REFERENCES groups(id) ON DELETE RESTRICT,
+ description TEXT,
+ UNIQUE NULLS NOT DISTINCT (parent_group, name)
+);
+CREATE INDEX ggroups_parent_idx ON groups(parent_group);
+
+DO $$ BEGIN
+ CREATE TYPE contrib_requirement AS ENUM ('closed','registered_user','federated','ssh_pubkey','open');
+ -- closed means only those with direct access; each layer adds that level of user
+EXCEPTION WHEN duplicate_object THEN END $$;
+CREATE TABLE repos (
+ id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ group_id BIGINT NOT NULL REFERENCES groups(id) ON DELETE RESTRICT, -- I mean, should be CASCADE but deleting Git repos on disk also needs to be considered
+ name TEXT NOT NULL,
+ description TEXT,
+ contrib_requirements contrib_requirement NOT NULL,
+ UNIQUE(group_id, name)
+ -- The filesystem path can be derived from the repo ID.
+ -- The config has repo_dir, then we can do repo_dir/<id>.git
+);
+CREATE INDEX grepos_group_idx ON repos(group_id);
+
+CREATE TABLE mailing_lists (
+ id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ group_id BIGINT NOT NULL REFERENCES groups(id) ON DELETE RESTRICT,
+ name TEXT NOT NULL,
+ description TEXT,
+ UNIQUE(group_id, name)
+);
+CREATE INDEX gmailing_lists_group_idx ON mailing_lists(group_id);
+
+CREATE TABLE mailing_list_emails (
+ id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ list_id BIGINT NOT NULL REFERENCES mailing_lists(id) ON DELETE CASCADE,
+ title TEXT NOT NULL,
+ sender TEXT NOT NULL,
+ date TIMESTAMPTZ NOT NULL, -- everything must be in UTC
+ message_id TEXT, -- no uniqueness guarantee as it's arbitrarily set by senders
+ content BYTEA NOT NULL
+);
+
+DO $$ BEGIN
+ CREATE TYPE user_type AS ENUM ('pubkey_only','federated','registered','admin');
+EXCEPTION WHEN duplicate_object THEN END $$;
+CREATE TABLE users (
+ id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ CONSTRAINT id_positive CHECK (id > 0),
+ username TEXT UNIQUE, -- NULL when, for example, pubkey_only
+ type user_type NOT NULL,
+ password_hash TEXT,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
+);
+
+CREATE TABLE ssh_public_keys (
+ id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ user_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
+ key_string TEXT NOT NULL,
+ CONSTRAINT unique_key_string EXCLUDE USING HASH (key_string WITH =) -- because apparently some haxxor like using rsa16384 keys which are too long for a simple UNIQUE constraint :D
+);
+CREATE INDEX gssh_keys_user_idx ON ssh_public_keys(user_id);
+
+CREATE TABLE sessions (
+ session_id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ user_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
+ token_hash BYTEA UNIQUE NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
+ expires_at TIMESTAMPTZ NOT NULL
+);
+CREATE INDEX gsessions_user_idx ON sessions(user_id);
+
+DO $$ BEGIN
+ CREATE TYPE group_role AS ENUM ('owner'); -- just owner for now, might need to rethink ACL altogether later; might consider using a join table if we need it to be dynamic, but enum suffices for now
+EXCEPTION WHEN duplicate_object THEN END $$;
+CREATE TABLE user_group_roles (
+ group_id BIGINT NOT NULL REFERENCES groups(id) ON DELETE CASCADE,
+ user_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
+ role group_role NOT NULL,
+ PRIMARY KEY(user_id, group_id)
+);
+CREATE INDEX gugr_group_idx ON user_group_roles(group_id);
+
+CREATE TABLE federated_identities (
+ user_id BIGINT NOT NULL REFERENCES users(id) ON DELETE RESTRICT,
+ service TEXT NOT NULL, -- might need to constrain
+ remote_username TEXT NOT NULL,
+ PRIMARY KEY(user_id, service),
+ UNIQUE(service, remote_username)
+);
+
+CREATE TABLE ticket_trackers (
+ id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ group_id BIGINT NOT NULL REFERENCES groups(id) ON DELETE RESTRICT,
+ name TEXT NOT NULL,
+ description TEXT,
+ UNIQUE(group_id, name)
+);
+
+CREATE TABLE tickets (
+ id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ tracker_id BIGINT NOT NULL REFERENCES ticket_trackers(id) ON DELETE CASCADE,
+ tracker_local_id BIGINT NOT NULL,
+ title TEXT NOT NULL,
+ description TEXT,
+ UNIQUE(tracker_id, tracker_local_id)
+);
+
+CREATE FUNCTION create_tracker_ticket_sequence()
+RETURNS TRIGGER AS $$
+DECLARE
+ seq_name TEXT := format('tracker_ticket_seq_%s', NEW.id);
+BEGIN
+ EXECUTE format('CREATE SEQUENCE g%I', seq_name);
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+CREATE FUNCTION drop_tracker_ticket_sequence()
+RETURNS TRIGGER AS $$
+DECLARE
+ seq_name TEXT := format('tracker_ticket_seq_%s', OLD.id);
+BEGIN
+ EXECUTE format('DROP SEQUENCE IF EXISTS %I', seq_name);
+ RETURN OLD;
+END;
+$$ LANGUAGE plpgsql;
+CREATE TRIGGER after_insert_ticket_tracker
+AFTER INSERT ON ticket_trackers
+FOR EACH ROW
+EXECUTE FUNCTION create_tracker_ticket_sequence();
+CREATE TRIGGER before_delete_ticket_tracker
+BEFORE DELETE ON ticket_trackers
+FOR EACH ROW
+EXECUTE FUNCTION drop_tracker_ticket_sequence();
+CREATE FUNCTION assign_tracker_local_id()
+RETURNS TRIGGER AS $$
+DECLARE
+ seq_name TEXT := format('tracker_ticket_seq_%s', NEW.tracker_id);
+BEGIN
+ IF NEW.tracker_local_id IS NULL THEN
+ EXECUTE format('SELECT nextval(%L)', seq_name) INTO NEW.tracker_local_id;
+ END IF;
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+CREATE TRIGGER before_insert_ticket
+BEFORE INSERT ON tickets
+FOR EACH ROW
+EXECUTE FUNCTION assign_tracker_local_id();
+CREATE INDEX gtickets_tracker_idx ON tickets(tracker_id);
+
+DO $$ BEGIN
+ CREATE TYPE mr_status AS ENUM ('open','merged','closed');
+EXCEPTION WHEN duplicate_object THEN END $$;
+
+CREATE TABLE merge_requests (
+ id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
+ repo_id BIGINT NOT NULL REFERENCES repos(id) ON DELETE CASCADE,
+ repo_local_id BIGINT NOT NULL,
+ title TEXT NOT NULL,
+ creator BIGINT REFERENCES users(id) ON DELETE SET NULL,
+ source_repo BIGINT NOT NULL REFERENCES repos(id) ON DELETE RESTRICT,
+ source_ref TEXT NOT NULL,
+ destination_branch TEXT,
+ status mr_status NOT NULL,
+ UNIQUE (repo_id, repo_local_id)
+);
+CREATE UNIQUE INDEX gmr_open_src_dst_uniq
+ ON merge_requests (repo_id, source_repo, source_ref, coalesce(destination_branch, ''))
+ WHERE status = 'open';
+CREATE INDEX gmr_repo_idx ON merge_requests(repo_id);
+CREATE INDEX gmr_creator_idx ON merge_requests(creator);
+CREATE FUNCTION create_repo_mr_sequence()
+RETURNS TRIGGER AS $$
+DECLARE
+ seq_name TEXT := format('repo_mr_seq_%s', NEW.id);
+BEGIN
+ EXECUTE format('CREATE SEQUENCE g%I', seq_name);
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+CREATE FUNCTION drop_repo_mr_sequence()
+RETURNS TRIGGER AS $$
+DECLARE
+ seq_name TEXT := format('repo_mr_seq_%s', OLD.id);
+BEGIN
+ EXECUTE format('DROP SEQUENCE IF EXISTS %I', seq_name);
+ RETURN OLD;
+END;
+$$ LANGUAGE plpgsql;
+CREATE TRIGGER after_insert_repo
+AFTER INSERT ON repos
+FOR EACH ROW
+EXECUTE FUNCTION create_repo_mr_sequence();
+CREATE TRIGGER before_delete_repo
+BEFORE DELETE ON repos
+FOR EACH ROW
+EXECUTE FUNCTION drop_repo_mr_sequence();
+CREATE FUNCTION assign_repo_local_id()
+RETURNS TRIGGER AS $$
+DECLARE
+ seq_name TEXT := format('repo_mr_seq_%s', NEW.repo_id);
+BEGIN
+ IF NEW.repo_local_id IS NULL THEN
+ EXECUTE format('SELECT nextval(%L)', seq_name) INTO NEW.repo_local_id;
+ END IF;
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+CREATE TRIGGER before_insert_merge_request
+BEFORE INSERT ON merge_requests
+FOR EACH ROW
+EXECUTE FUNCTION assign_repo_local_id();
diff --git a/forged/sqlc.yaml b/forged/sqlc.yaml
new file mode 100644
index 0000000..2b6e035
--- /dev/null
+++ b/forged/sqlc.yaml
@@ -0,0 +1,15 @@
+version: "2"
+sql:
+ - engine: "postgresql"
+ schema: "sql/schema.sql"
+ queries: "sql/queries"
+ gen:
+ go:
+ package: "queries"
+ out: "internal/database/queries"
+ sql_package: "pgx/v5"
+ emit_json_tags: true
+ emit_db_tags: true
+ emit_prepared_queries: true
+ emit_pointers_for_null_types: true
+ emit_enum_valid_method: true
diff --git a/forged/static/style.css b/forged/static/style.css
index 4923771..f70fe69 100644
--- a/forged/static/style.css
+++ b/forged/static/style.css
@@ -18,33 +18,33 @@ html {
background-color: var(--background-color);
color: var(--text-color);
font-size: 1rem;
- --background-color: hsl(0, 0%, 100%);
- --text-color: hsl(0, 0%, 0%);
- --link-color: hsl(320, 50%, 36%);
- --light-text-color: hsl(0, 0%, 45%);
- --darker-border-color: hsl(0, 0%, 72%);
- --lighter-border-color: hsl(0, 0%, 85%);
- --text-decoration-color: hsl(0, 0%, 72%);
- --darker-box-background-color: hsl(0, 0%, 92%);
- --lighter-box-background-color: hsl(0, 0%, 95%);
- --primary-color: hsl(320, 50%, 36%);
- --primary-color-contrast: hsl(320, 0%, 100%);
- --danger-color: #ff0000;
- --danger-color-contrast: #ffffff;
+ --background-color: oklch(1.000 0.000 0.0);
+ --text-color: oklch(0.000 0.000 0.0);
+ --link-color: oklch(0.457 0.143 343.4);
+ --light-text-color: oklch(0.555 0.000 0.0);
+ --darker-border-color: oklch(0.781 0.000 0.0);
+ --lighter-border-color: oklch(0.885 0.000 0.0);
+ --text-decoration-color: oklch(0.781 0.000 0.0);
+ --darker-box-background-color: oklch(0.939 0.000 0.0);
+ --lighter-box-background-color: oklch(0.962 0.000 0.0);
+ --primary-color: oklch(0.457 0.143 343.4);
+ --primary-color-contrast: oklch(1.000 0.000 0.0);
+ --danger-color: oklch(0.628 0.258 29.2);
+ --danger-color-contrast: oklch(1.000 0.000 0.0);
}
/* Dark mode overrides */
@media (prefers-color-scheme: dark) {
html {
- --background-color: hsl(0, 0%, 0%);
- --text-color: hsl(0, 0%, 100%);
- --link-color: hsl(320, 50%, 76%);
- --light-text-color: hsl(0, 0%, 78%);
- --darker-border-color: hsl(0, 0%, 35%);
- --lighter-border-color: hsl(0, 0%, 25%);
- --text-decoration-color: hsl(0, 0%, 50%);
- --darker-box-background-color: hsl(0, 0%, 20%);
- --lighter-box-background-color: hsl(0, 0%, 15%);
+ --background-color: oklch(0.000 0.000 0.0);
+ --text-color: oklch(1.000 0.000 0.0);
+ --link-color: oklch(0.786 0.089 339.4);
+ --light-text-color: oklch(0.829 0.000 0.0);
+ --darker-border-color: oklch(0.465 0.000 0.0);
+ --lighter-border-color: oklch(0.371 0.000 0.0);
+ --text-decoration-color: oklch(0.598 0.000 0.0);
+ --darker-box-background-color: oklch(0.321 0.000 0.0);
+ --lighter-box-background-color: oklch(0.270 0.000 0.0);
}
}
diff --git a/forged/templates/_footer.tmpl b/forged/templates/_footer.tmpl
index 22a3958..11e2365 100644
--- a/forged/templates/_footer.tmpl
+++ b/forged/templates/_footer.tmpl
@@ -4,7 +4,7 @@
*/}}
{{- define "footer" -}}
<a href="https://lindenii.runxiyu.org/forge/">Lindenii Forge</a>
-{{ .global.forge_version }}
+{{ .BaseData.Global.ForgeVersion }}
(<a href="https://forge.lindenii.runxiyu.org/forge/-/repos/server/">upstream</a>,
<a href="/-/source/LICENSE">license</a>,
<a href="https://webirc.runxiyu.org/kiwiirc/#lindenii">support</a>)
diff --git a/forged/templates/_group_view.tmpl b/forged/templates/_group_view.tmpl
index 92b6639..de5d45d 100644
--- a/forged/templates/_group_view.tmpl
+++ b/forged/templates/_group_view.tmpl
@@ -3,7 +3,7 @@
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}}
{{- define "group_view" -}}
-{{- if .subgroups -}}
+{{- if .Subgroups -}}
<table class="wide">
<thead>
<tr>
@@ -15,7 +15,7 @@
</tr>
</thead>
<tbody>
- {{- range .subgroups -}}
+ {{- range .Subgroups -}}
<tr>
<td>
<a href="{{- .Name | path_escape -}}/">{{- .Name -}}</a>
@@ -28,7 +28,7 @@
</tbody>
</table>
{{- end -}}
-{{- if .repos -}}
+{{- if .Repos -}}
<table class="wide">
<thead>
<tr>
@@ -40,7 +40,7 @@
</tr>
</thead>
<tbody>
- {{- range .repos -}}
+ {{- range .Repos -}}
<tr>
<td>
<a href="-/repos/{{- .Name | path_escape -}}/">{{- .Name -}}</a>
diff --git a/forged/templates/_header.tmpl b/forged/templates/_header.tmpl
index 340a2ac..39d3491 100644
--- a/forged/templates/_header.tmpl
+++ b/forged/templates/_header.tmpl
@@ -5,15 +5,15 @@
{{- define "header" -}}
<header id="main-header">
<div id="main-header-forge-title">
- <a href="/">{{- .global.forge_title -}}</a>
+ <a href="/">{{- .BaseData.Global.ForgeTitle -}}</a>
</div>
<nav id="breadcrumb-nav">
{{- $path := "" -}}
- {{- $url_segments := .url_segments -}}
- {{- $dir_mode := .dir_mode -}}
- {{- $ref_type := .ref_type -}}
- {{- $ref := .ref_name -}}
- {{- $separator_index := .separator_index -}}
+ {{- $url_segments := .BaseData.URLSegments -}}
+ {{- $dir_mode := .BaseData.DirMode -}}
+ {{- $ref_type := .BaseData.RefType -}}
+ {{- $ref := .BaseData.RefName -}}
+ {{- $separator_index := .BaseData.SeparatorIndex -}}
{{- if eq $separator_index -1 -}}
{{- $separator_index = len $url_segments -}}
{{- end -}}
@@ -25,8 +25,8 @@
{{- end -}}
</nav>
<div id="main-header-user">
- {{- if ne .user_id_string "" -}}
- <a href="/-/users/{{- .user_id_string -}}">{{- .username -}}</a>
+ {{- if ne .BaseData.UserID "" -}}
+ <a href="/-/users/{{- .BaseData.UserID -}}/">{{- .BaseData.Username -}}</a>
{{- else -}}
<a href="/-/login/">Login</a>
{{- end -}}
diff --git a/forged/templates/group.tmpl b/forged/templates/group.tmpl
index 3338f9b..1f9609e 100644
--- a/forged/templates/group.tmpl
+++ b/forged/templates/group.tmpl
@@ -3,23 +3,23 @@
SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
*/}}
{{- define "group" -}}
-{{- $group_path := .group_path -}}
+{{- $group_path := .BaseData.GroupPath -}}
<!DOCTYPE html>
<html lang="en">
<head>
{{- template "head_common" . -}}
- <title>{{- range $i, $s := .group_path -}}{{- $s -}}{{- if ne $i (len $group_path) -}}/{{- end -}}{{- end }} &ndash; {{ .global.forge_title -}}</title>
+ <title>{{- range $i, $s := $group_path -}}{{- $s -}}{{- if ne $i (len $group_path) -}}/{{- end -}}{{- end }} &ndash; {{ .BaseData.Global.ForgeTitle -}}</title>
</head>
<body class="group">
{{- template "header" . -}}
<main>
<div class="padding-wrapper">
- {{- if .description -}}
- <p>{{- .description -}}</p>
+ {{- if .Description -}}
+ <p>{{- .Description -}}</p>
{{- end -}}
{{- template "group_view" . -}}
</div>
- {{- if .direct_access -}}
+ {{- if .DirectAccess -}}
<div class="padding-wrapper">
<form method="POST" enctype="application/x-www-form-urlencoded">
<table>
@@ -47,7 +47,7 @@
<th scope="row">Contrib</th>
<td class="tdinput">
<select id="repo-contrib-input" name="repo_contrib">
- <option value="public">Public</option>
+ <option value="open">Public</option>
<option value="ssh_pubkey">SSH public key</option>
<option value="federated">Federated service</option>
<option value="registered_user">Registered user</option>
diff --git a/forged/templates/index.tmpl b/forged/templates/index.tmpl
index 66bd177..fa9b6a0 100644
--- a/forged/templates/index.tmpl
+++ b/forged/templates/index.tmpl
@@ -7,7 +7,7 @@
<html lang="en">
<head>
{{- template "head_common" . -}}
- <title>Index &ndash; {{ .global.forge_title -}}</title>
+ <title>Index &ndash; {{ .BaseData.Global.ForgeTitle -}}</title>
</head>
<body class="index">
{{- template "header" . -}}
@@ -24,7 +24,7 @@
</tr>
</thead>
<tbody>
- {{- range .groups -}}
+ {{- range .Groups -}}
<tr>
<td>
<a href="{{- .Name | path_escape -}}/">{{- .Name -}}</a>
@@ -47,11 +47,11 @@
<tbody>
<tr>
<th scope="row">SSH public key</th>
- <td><code class="breakable">{{- .global.server_public_key_string -}}</code></td>
+ <td><code class="breakable">{{- .BaseData.Global.SSHPubkey -}}</code></td>
</tr>
<tr>
<th scope="row">SSH fingerprint</th>
- <td><code class="breakable">{{- .global.server_public_key_fingerprint -}}</code></td>
+ <td><code class="breakable">{{- .BaseData.Global.SSHFingerprint -}}</code></td>
</tr>
</tbody>
</table>
diff --git a/forged/templates/login.tmpl b/forged/templates/login.tmpl
index 980b863..09cbb61 100644
--- a/forged/templates/login.tmpl
+++ b/forged/templates/login.tmpl
@@ -7,11 +7,11 @@
<html lang="en">
<head>
{{- template "head_common" . -}}
- <title>Login &ndash; {{ .global.forge_title -}}</title>
+ <title>Login &ndash; {{ .BaseData.Global.ForgeTitle -}}</title>
</head>
<body class="index">
<main>
- {{- .login_error -}}
+ {{- .LoginError -}}
<div class="padding-wrapper">
<form method="POST" enctype="application/x-www-form-urlencoded">
<table>
diff --git a/git2d/.gitignore b/git2d/.gitignore
deleted file mode 100644
index 635d84d..0000000
--- a/git2d/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/git2d
diff --git a/git2d/bare.c b/git2d/bare.c
index b580980..307f3d8 100644
--- a/git2d/bare.c
+++ b/git2d/bare.c
@@ -169,13 +169,7 @@ bare_error bare_get_u64(struct bare_reader *ctx, uint64_t *x)
if (err == BARE_ERROR_NONE) {
*x = (uint64_t) ((uint8_t *) x)[0]
- | (uint64_t) ((uint8_t *) x)[1] << 8
- | (uint64_t) ((uint8_t *) x)[2] << 16
- | (uint64_t) ((uint8_t *) x)[3] << 24
- | (uint64_t) ((uint8_t *) x)[4] << 32
- | (uint64_t) ((uint8_t *) x)[5] << 40
- | (uint64_t) ((uint8_t *) x)[6] << 48
- | (uint64_t) ((uint8_t *) x)[7] << 56;
+ | (uint64_t) ((uint8_t *) x)[1] << 8 | (uint64_t) ((uint8_t *) x)[2] << 16 | (uint64_t) ((uint8_t *) x)[3] << 24 | (uint64_t) ((uint8_t *) x)[4] << 32 | (uint64_t) ((uint8_t *) x)[5] << 40 | (uint64_t) ((uint8_t *) x)[6] << 48 | (uint64_t) ((uint8_t *) x)[7] << 56;
}
return err;
@@ -257,20 +251,17 @@ bare_error bare_get_bool(struct bare_reader *ctx, bool *x)
return bare_get_u8(ctx, (uint8_t *) x);
}
-bare_error
-bare_put_fixed_data(struct bare_writer *ctx, const uint8_t *src, uint64_t sz)
+bare_error bare_put_fixed_data(struct bare_writer *ctx, const uint8_t *src, uint64_t sz)
{
return ctx->write(ctx->buffer, (void *)src, sz);
}
-bare_error
-bare_get_fixed_data(struct bare_reader *ctx, uint8_t *dst, uint64_t sz)
+bare_error bare_get_fixed_data(struct bare_reader *ctx, uint8_t *dst, uint64_t sz)
{
return ctx->read(ctx->buffer, dst, sz);
}
-bare_error
-bare_put_data(struct bare_writer *ctx, const uint8_t *src, uint64_t sz)
+bare_error bare_put_data(struct bare_writer *ctx, const uint8_t *src, uint64_t sz)
{
bare_error err = BARE_ERROR_NONE;
diff --git a/git2d/bare.h b/git2d/bare.h
index e813464..e049dd0 100644
--- a/git2d/bare.h
+++ b/git2d/bare.h
@@ -17,8 +17,8 @@ typedef enum {
BARE_ERROR_INVALID_UTF8,
} bare_error;
-typedef bare_error (*bare_write_func)(void *buffer, const void *src, uint64_t sz);
-typedef bare_error (*bare_read_func)(void *buffer, void *dst, uint64_t sz);
+typedef bare_error(*bare_write_func) (void *buffer, const void *src, uint64_t sz);
+typedef bare_error(*bare_read_func) (void *buffer, void *dst, uint64_t sz);
struct bare_writer {
void *buffer;
@@ -30,27 +30,27 @@ struct bare_reader {
bare_read_func read;
};
-bare_error bare_put_uint(struct bare_writer *ctx, uint64_t x); /* varuint */
-bare_error bare_get_uint(struct bare_reader *ctx, uint64_t *x); /* varuint */
+bare_error bare_put_uint(struct bare_writer *ctx, uint64_t x); /* varuint */
+bare_error bare_get_uint(struct bare_reader *ctx, uint64_t * x); /* varuint */
bare_error bare_put_u8(struct bare_writer *ctx, uint8_t x);
-bare_error bare_get_u8(struct bare_reader *ctx, uint8_t *x);
+bare_error bare_get_u8(struct bare_reader *ctx, uint8_t * x);
bare_error bare_put_u16(struct bare_writer *ctx, uint16_t x);
-bare_error bare_get_u16(struct bare_reader *ctx, uint16_t *x);
+bare_error bare_get_u16(struct bare_reader *ctx, uint16_t * x);
bare_error bare_put_u32(struct bare_writer *ctx, uint32_t x);
-bare_error bare_get_u32(struct bare_reader *ctx, uint32_t *x);
+bare_error bare_get_u32(struct bare_reader *ctx, uint32_t * x);
bare_error bare_put_u64(struct bare_writer *ctx, uint64_t x);
-bare_error bare_get_u64(struct bare_reader *ctx, uint64_t *x);
+bare_error bare_get_u64(struct bare_reader *ctx, uint64_t * x);
-bare_error bare_put_int(struct bare_writer *ctx, int64_t x); /* varint */
-bare_error bare_get_int(struct bare_reader *ctx, int64_t *x); /* varint */
+bare_error bare_put_int(struct bare_writer *ctx, int64_t x); /* varint */
+bare_error bare_get_int(struct bare_reader *ctx, int64_t * x); /* varint */
bare_error bare_put_i8(struct bare_writer *ctx, int8_t x);
-bare_error bare_get_i8(struct bare_reader *ctx, int8_t *x);
+bare_error bare_get_i8(struct bare_reader *ctx, int8_t * x);
bare_error bare_put_i16(struct bare_writer *ctx, int16_t x);
-bare_error bare_get_i16(struct bare_reader *ctx, int16_t *x);
+bare_error bare_get_i16(struct bare_reader *ctx, int16_t * x);
bare_error bare_put_i32(struct bare_writer *ctx, int32_t x);
-bare_error bare_get_i32(struct bare_reader *ctx, int32_t *x);
+bare_error bare_get_i32(struct bare_reader *ctx, int32_t * x);
bare_error bare_put_i64(struct bare_writer *ctx, int64_t x);
-bare_error bare_get_i64(struct bare_reader *ctx, int64_t *x);
+bare_error bare_get_i64(struct bare_reader *ctx, int64_t * x);
bare_error bare_put_f32(struct bare_writer *ctx, float x);
bare_error bare_get_f32(struct bare_reader *ctx, float *x);
@@ -60,13 +60,13 @@ bare_error bare_get_f64(struct bare_reader *ctx, double *x);
bare_error bare_put_bool(struct bare_writer *ctx, bool x);
bare_error bare_get_bool(struct bare_reader *ctx, bool *x);
-bare_error bare_put_fixed_data(struct bare_writer *ctx, const uint8_t *src, uint64_t sz);
-bare_error bare_get_fixed_data(struct bare_reader *ctx, uint8_t *dst, uint64_t sz);
-bare_error bare_put_data(struct bare_writer *ctx, const uint8_t *src, uint64_t sz);
-bare_error bare_get_data(struct bare_reader *ctx, uint8_t *dst, uint64_t sz);
+bare_error bare_put_fixed_data(struct bare_writer *ctx, const uint8_t * src, uint64_t sz);
+bare_error bare_get_fixed_data(struct bare_reader *ctx, uint8_t * dst, uint64_t sz);
+bare_error bare_put_data(struct bare_writer *ctx, const uint8_t * src, uint64_t sz);
+bare_error bare_get_data(struct bare_reader *ctx, uint8_t * dst, uint64_t sz);
bare_error bare_put_str(struct bare_writer *ctx, const char *src, uint64_t sz);
bare_error bare_get_str(struct bare_reader *ctx, char *dst, uint64_t sz);
/* Note that the _str implementation here does not check for UTF-8 validity. */
-#endif /* BARE_H */
+#endif /* BARE_H */
diff --git a/git2d/cmd1.c b/git2d/cmd1.c
index a7d8b07..ec3d1ad 100644
--- a/git2d/cmd1.c
+++ b/git2d/cmd1.c
@@ -90,18 +90,15 @@ int cmd_index(git_repository *repo, struct bare_writer *writer)
/* Title */
size_t msg_len = msg ? strlen(msg) : 0;
- bare_put_data(writer, (const uint8_t *)(msg ? msg : ""),
- msg_len);
+ bare_put_data(writer, (const uint8_t *)(msg ? msg : ""), msg_len);
/* Author's name */
const char *author_name = author ? author->name : "";
- bare_put_data(writer, (const uint8_t *)author_name,
- strlen(author_name));
+ bare_put_data(writer, (const uint8_t *)author_name, strlen(author_name));
/* Author's email */
const char *author_email = author ? author->email : "";
- bare_put_data(writer, (const uint8_t *)author_email,
- strlen(author_email));
+ bare_put_data(writer, (const uint8_t *)author_email, strlen(author_email));
/* Author's date */
/* TODO: Pass the integer instead of a string */
@@ -109,12 +106,10 @@ int cmd_index(git_repository *repo, struct bare_writer *writer)
char timebuf[64];
struct tm *tm = localtime(&time);
if (tm)
- strftime(timebuf, sizeof(timebuf), "%Y-%m-%d %H:%M:%S",
- tm);
+ strftime(timebuf, sizeof(timebuf), "%Y-%m-%d %H:%M:%S", tm);
else
strcpy(timebuf, "unknown");
- bare_put_data(writer, (const uint8_t *)timebuf,
- strlen(timebuf));
+ bare_put_data(writer, (const uint8_t *)timebuf, strlen(timebuf));
git_commit_free(commit);
count++;
diff --git a/git2d/cmd2.c b/git2d/cmd2.c
index dd72ddb..33947c6 100644
--- a/git2d/cmd2.c
+++ b/git2d/cmd2.c
@@ -5,9 +5,7 @@
#include "x.h"
-int
-cmd_treeraw(git_repository *repo, struct bare_reader *reader,
- struct bare_writer *writer)
+int cmd_treeraw(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
{
/* Path */
char path[4096] = { 0 };
@@ -62,8 +60,7 @@ cmd_treeraw(git_repository *repo, struct bare_reader *reader,
bare_put_uint(writer, 1);
bare_put_uint(writer, count);
for (size_t i = 0; i < count; i++) {
- const git_tree_entry *subentry =
- git_tree_entry_byindex(subtree, i);
+ const git_tree_entry *subentry = git_tree_entry_byindex(subtree, i);
const char *name = git_tree_entry_name(subentry);
git_otype type = git_tree_entry_type(subentry);
uint32_t mode = git_tree_entry_filemode(subentry);
@@ -77,8 +74,7 @@ cmd_treeraw(git_repository *repo, struct bare_reader *reader,
entry_type = 2;
git_object *subobj = NULL;
- if (git_tree_entry_to_object
- (&subobj, repo, subentry) == 0) {
+ if (git_tree_entry_to_object(&subobj, repo, subentry) == 0) {
git_blob *b = (git_blob *) subobj;
size = git_blob_rawsize(b);
git_blob_free(b);
@@ -88,8 +84,7 @@ cmd_treeraw(git_repository *repo, struct bare_reader *reader,
bare_put_uint(writer, entry_type);
bare_put_uint(writer, mode);
bare_put_uint(writer, size);
- bare_put_data(writer, (const uint8_t *)name,
- strlen(name));
+ bare_put_data(writer, (const uint8_t *)name, strlen(name));
}
if (entry != NULL) {
git_tree_free(subtree);
diff --git a/git2d/cmd_commit.c b/git2d/cmd_commit.c
new file mode 100644
index 0000000..3031088
--- /dev/null
+++ b/git2d/cmd_commit.c
@@ -0,0 +1,188 @@
+/*-
+ * SPDX-License-Identifier: AGPL-3.0-only
+ * SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+ */
+
+#include "x.h"
+
+int cmd_commit_tree_oid(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char hex[64] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) hex, sizeof(hex) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_oid oid;
+ if (git_oid_fromstr(&oid, hex) != 0) {
+ bare_put_uint(writer, 14);
+ return -1;
+ }
+ git_commit *commit = NULL;
+ if (git_commit_lookup(&commit, repo, &oid) != 0) {
+ bare_put_uint(writer, 14);
+ return -1;
+ }
+ git_tree *tree = NULL;
+ if (git_commit_tree(&tree, commit) != 0) {
+ git_commit_free(commit);
+ bare_put_uint(writer, 14);
+ return -1;
+ }
+ const git_oid *toid = git_tree_id(tree);
+ bare_put_uint(writer, 0);
+ bare_put_data(writer, toid->id, GIT_OID_RAWSZ);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ return 0;
+}
+
+int cmd_commit_create(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char treehex[64] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) treehex, sizeof(treehex) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_oid tree_oid;
+ if (git_oid_fromstr(&tree_oid, treehex) != 0) {
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ uint64_t pcnt = 0;
+ if (bare_get_uint(reader, &pcnt) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_commit **parents = NULL;
+ if (pcnt > 0) {
+ parents = (git_commit **) calloc(pcnt, sizeof(git_commit *));
+ if (!parents) {
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ for (uint64_t i = 0; i < pcnt; i++) {
+ char phex[64] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) phex, sizeof(phex) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ goto fail;
+ }
+ git_oid poid;
+ if (git_oid_fromstr(&poid, phex) != 0) {
+ bare_put_uint(writer, 15);
+ goto fail;
+ }
+ if (git_commit_lookup(&parents[i], repo, &poid) != 0) {
+ bare_put_uint(writer, 15);
+ goto fail;
+ }
+ }
+ }
+ char aname[512] = { 0 };
+ char aemail[512] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) aname, sizeof(aname) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ goto fail;
+ }
+ if (bare_get_data(reader, (uint8_t *) aemail, sizeof(aemail) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ goto fail;
+ }
+ int64_t when = 0;
+ int64_t tzoff = 0;
+ if (bare_get_i64(reader, &when) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ goto fail;
+ }
+ if (bare_get_i64(reader, &tzoff) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ goto fail;
+ }
+ char *message = NULL;
+ {
+ uint64_t msz = 0;
+ if (bare_get_uint(reader, &msz) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ goto fail;
+ }
+ message = (char *)malloc(msz + 1);
+ if (!message) {
+ bare_put_uint(writer, 15);
+ goto fail;
+ }
+ if (bare_get_fixed_data(reader, (uint8_t *) message, msz) != BARE_ERROR_NONE) {
+ free(message);
+ bare_put_uint(writer, 11);
+ goto fail;
+ }
+ message[msz] = '\0';
+ }
+ git_signature *sig = NULL;
+ if (git_signature_new(&sig, aname, aemail, (git_time_t) when, (int)tzoff) != 0) {
+ free(message);
+ bare_put_uint(writer, 19);
+ goto fail;
+ }
+ git_tree *tree = NULL;
+ if (git_tree_lookup(&tree, repo, &tree_oid) != 0) {
+ git_signature_free(sig);
+ free(message);
+ bare_put_uint(writer, 19);
+ goto fail;
+ }
+ git_oid out;
+ int rc = git_commit_create(&out, repo, NULL, sig, sig, NULL, message, tree,
+ (int)pcnt, (const git_commit **)parents);
+ git_tree_free(tree);
+ git_signature_free(sig);
+ free(message);
+ if (rc != 0) {
+ bare_put_uint(writer, 19);
+ goto fail;
+ }
+ bare_put_uint(writer, 0);
+ bare_put_data(writer, out.id, GIT_OID_RAWSZ);
+ if (parents) {
+ for (uint64_t i = 0; i < pcnt; i++)
+ if (parents[i])
+ git_commit_free(parents[i]);
+ free(parents);
+ }
+ return 0;
+ fail:
+ if (parents) {
+ for (uint64_t i = 0; i < pcnt; i++)
+ if (parents[i])
+ git_commit_free(parents[i]);
+ free(parents);
+ }
+ return -1;
+}
+
+int cmd_update_ref(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char refname[4096] = { 0 };
+ char commithex[64] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) refname, sizeof(refname) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ if (bare_get_data(reader, (uint8_t *) commithex, sizeof(commithex) - 1)
+ != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_oid oid;
+ if (git_oid_fromstr(&oid, commithex) != 0) {
+ bare_put_uint(writer, 18);
+ return -1;
+ }
+ git_reference *out = NULL;
+ int rc = git_reference_create(&out, repo, refname, &oid, 1, NULL);
+ if (rc != 0) {
+ bare_put_uint(writer, 18);
+ return -1;
+ }
+ git_reference_free(out);
+ bare_put_uint(writer, 0);
+ return 0;
+}
diff --git a/git2d/cmd_diff.c b/git2d/cmd_diff.c
new file mode 100644
index 0000000..a7bf0b8
--- /dev/null
+++ b/git2d/cmd_diff.c
@@ -0,0 +1,366 @@
+/*-
+ * SPDX-License-Identifier: AGPL-3.0-only
+ * SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+ */
+
+#include "x.h"
+
+static int diff_stats_to_string(git_diff *diff, git_buf *out)
+{
+ git_diff_stats *stats = NULL;
+ if (git_diff_get_stats(&stats, diff) != 0) {
+ return -1;
+ }
+ int rc = git_diff_stats_to_buf(out, stats, GIT_DIFF_STATS_FULL, 80);
+ git_diff_stats_free(stats);
+ return rc;
+}
+
+static void split_message(const char *message, char **title_out, char **body_out)
+{
+ *title_out = NULL;
+ *body_out = NULL;
+ if (!message)
+ return;
+ const char *nl = strchr(message, '\n');
+ if (!nl) {
+ *title_out = strdup(message);
+ *body_out = strdup("");
+ return;
+ }
+ size_t title_len = (size_t)(nl - message);
+ *title_out = (char *)malloc(title_len + 1);
+ if (*title_out) {
+ memcpy(*title_out, message, title_len);
+ (*title_out)[title_len] = '\0';
+ }
+ const char *rest = nl + 1;
+ if (*rest == '\n')
+ rest++;
+ *body_out = strdup(rest);
+}
+
+int cmd_format_patch(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char hex[64] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) hex, sizeof(hex) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_oid oid;
+ if (git_oid_fromstr(&oid, hex) != 0) {
+ bare_put_uint(writer, 14);
+ return -1;
+ }
+
+ git_commit *commit = NULL;
+ if (git_commit_lookup(&commit, repo, &oid) != 0) {
+ bare_put_uint(writer, 14);
+ return -1;
+ }
+
+ git_tree *tree = NULL;
+ if (git_commit_tree(&tree, commit) != 0) {
+ git_commit_free(commit);
+ bare_put_uint(writer, 14);
+ return -1;
+ }
+
+ git_diff *diff = NULL;
+ if (git_commit_parentcount(commit) == 0) {
+ if (git_diff_tree_to_tree(&diff, repo, NULL, tree, NULL) != 0) {
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ } else {
+ git_commit *parent = NULL;
+ git_tree *ptree = NULL;
+ if (git_commit_parent(&parent, commit, 0) != 0 || git_commit_tree(&ptree, parent) != 0) {
+ if (parent)
+ git_commit_free(parent);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ if (git_diff_tree_to_tree(&diff, repo, ptree, tree, NULL) != 0) {
+ git_tree_free(ptree);
+ git_commit_free(parent);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ git_tree_free(ptree);
+ git_commit_free(parent);
+ }
+
+ git_buf stats = { 0 };
+ if (diff_stats_to_string(diff, &stats) != 0) {
+ git_diff_free(diff);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+
+ git_buf patch = { 0 };
+ if (git_diff_to_buf(&patch, diff, GIT_DIFF_FORMAT_PATCH) != 0) {
+ git_buf_dispose(&stats);
+ git_diff_free(diff);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+
+ const git_signature *author = git_commit_author(commit);
+ char *title = NULL, *body = NULL;
+ split_message(git_commit_message(commit), &title, &body);
+
+ char header[2048];
+ char timebuf[64];
+ {
+ time_t t = git_commit_time(commit);
+ struct tm *tm = localtime(&t);
+ if (tm)
+ strftime(timebuf, sizeof(timebuf), "%a, %d %b %Y %H:%M:%S %z", tm);
+ else
+ strcpy(timebuf, "unknown");
+ }
+ snprintf(header, sizeof(header), "From %s Mon Sep 17 00:00:00 2001\nFrom: %s <%s>\nDate: %s\nSubject: [PATCH] %s\n\n", git_oid_tostr_s(&oid), author && author->name ? author->name : "", author && author->email ? author->email : "", timebuf, title ? title : "");
+
+ const char *trailer = "\n-- \n2.48.1\n";
+ size_t header_len = strlen(header);
+ size_t body_len = body ? strlen(body) : 0;
+ size_t trailer_len = strlen(trailer);
+ size_t total = header_len + body_len + (body_len ? 1 : 0) + 4 + stats.size + 1 + patch.size + trailer_len;
+
+ uint8_t *buf = (uint8_t *) malloc(total);
+ if (!buf) {
+ free(title);
+ free(body);
+ git_buf_dispose(&patch);
+ git_buf_dispose(&stats);
+ git_diff_free(diff);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ size_t off = 0;
+ memcpy(buf + off, header, header_len);
+ off += header_len;
+ if (body_len) {
+ memcpy(buf + off, body, body_len);
+ off += body_len;
+ buf[off++] = '\n';
+ }
+ memcpy(buf + off, "---\n", 4);
+ off += 4;
+ memcpy(buf + off, stats.ptr, stats.size);
+ off += stats.size;
+ buf[off++] = '\n';
+ memcpy(buf + off, patch.ptr, patch.size);
+ off += patch.size;
+ memcpy(buf + off, trailer, trailer_len);
+ off += trailer_len;
+
+ bare_put_uint(writer, 0);
+ bare_put_data(writer, buf, off);
+
+ free(buf);
+ free(title);
+ free(body);
+ git_buf_dispose(&patch);
+ git_buf_dispose(&stats);
+ git_diff_free(diff);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ return 0;
+}
+
+int cmd_commit_diff(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char hex[64] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) hex, sizeof(hex) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_oid oid;
+ if (git_oid_fromstr(&oid, hex) != 0) {
+ bare_put_uint(writer, 14);
+ return -1;
+ }
+
+ git_commit *commit = NULL;
+ if (git_commit_lookup(&commit, repo, &oid) != 0) {
+ bare_put_uint(writer, 14);
+ return -1;
+ }
+
+ git_tree *tree = NULL;
+ if (git_commit_tree(&tree, commit) != 0) {
+ git_commit_free(commit);
+ bare_put_uint(writer, 14);
+ return -1;
+ }
+
+ git_diff *diff = NULL;
+ git_oid parent_oid = { 0 };
+ if (git_commit_parentcount(commit) == 0) {
+ if (git_diff_tree_to_tree(&diff, repo, NULL, tree, NULL) != 0) {
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ } else {
+ git_commit *parent = NULL;
+ git_tree *ptree = NULL;
+ if (git_commit_parent(&parent, commit, 0) != 0 || git_commit_tree(&ptree, parent) != 0) {
+ if (parent)
+ git_commit_free(parent);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ git_oid_cpy(&parent_oid, git_commit_id(parent));
+ if (git_diff_tree_to_tree(&diff, repo, ptree, tree, NULL) != 0) {
+ git_tree_free(ptree);
+ git_commit_free(parent);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ git_tree_free(ptree);
+ git_commit_free(parent);
+ }
+
+ git_buf stats = { 0 };
+ if (diff_stats_to_string(diff, &stats) != 0) {
+ git_diff_free(diff);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ git_buf patch = { 0 };
+ if (git_diff_to_buf(&patch, diff, GIT_DIFF_FORMAT_PATCH) != 0) {
+ git_buf_dispose(&stats);
+ git_diff_free(diff);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+
+ bare_put_uint(writer, 0);
+ bare_put_data(writer, parent_oid.id, GIT_OID_RAWSZ);
+ bare_put_data(writer, (const uint8_t *)stats.ptr, stats.size);
+ bare_put_data(writer, (const uint8_t *)patch.ptr, patch.size);
+
+ git_buf_dispose(&patch);
+ git_buf_dispose(&stats);
+ git_diff_free(diff);
+ git_tree_free(tree);
+ git_commit_free(commit);
+ return 0;
+}
+
+int cmd_merge_base(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char hex1[64] = { 0 };
+ char hex2[64] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) hex1, sizeof(hex1) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ if (bare_get_data(reader, (uint8_t *) hex2, sizeof(hex2) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_oid a, b, out;
+ if (git_oid_fromstr(&a, hex1) != 0 || git_oid_fromstr(&b, hex2) != 0) {
+ bare_put_uint(writer, 17);
+ return -1;
+ }
+ int rc = git_merge_base(&out, repo, &a, &b);
+ if (rc == GIT_ENOTFOUND) {
+ bare_put_uint(writer, 16);
+ return -1;
+ }
+ if (rc != 0) {
+ bare_put_uint(writer, 17);
+ return -1;
+ }
+ bare_put_uint(writer, 0);
+ bare_put_data(writer, out.id, GIT_OID_RAWSZ);
+ return 0;
+}
+
+int cmd_log(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char spec[4096] = { 0 };
+ uint64_t limit = 0;
+ if (bare_get_data(reader, (uint8_t *) spec, sizeof(spec) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ if (bare_get_uint(reader, &limit) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+
+ git_object *obj = NULL;
+ if (spec[0] == '\0')
+ strcpy(spec, "HEAD");
+ if (git_revparse_single(&obj, repo, spec) != 0) {
+ bare_put_uint(writer, 4);
+ return -1;
+ }
+ git_commit *start = (git_commit *) obj;
+
+ git_revwalk *walk = NULL;
+ if (git_revwalk_new(&walk, repo) != 0) {
+ git_commit_free(start);
+ bare_put_uint(writer, 9);
+ return -1;
+ }
+ git_revwalk_sorting(walk, GIT_SORT_TIME);
+ git_revwalk_push(walk, git_commit_id(start));
+ git_commit_free(start);
+
+ bare_put_uint(writer, 0);
+ git_oid oid;
+ uint64_t count = 0;
+ while ((limit == 0 || count < limit)
+ && git_revwalk_next(&oid, walk) == 0) {
+ git_commit *c = NULL;
+ if (git_commit_lookup(&c, repo, &oid) != 0)
+ break;
+ const char *msg = git_commit_summary(c);
+ const git_signature *author = git_commit_author(c);
+ time_t t = git_commit_time(c);
+ char timebuf[64];
+ struct tm *tm = localtime(&t);
+ if (tm)
+ strftime(timebuf, sizeof(timebuf), "%Y-%m-%d %H:%M:%S", tm);
+ else
+ strcpy(timebuf, "unknown");
+
+ bare_put_data(writer, oid.id, GIT_OID_RAWSZ);
+ bare_put_data(writer, (const uint8_t *)(msg ? msg : ""), msg ? strlen(msg) : 0);
+ bare_put_data(writer, (const uint8_t *)(author && author->name ? author->name : ""), author && author->name ? strlen(author->name) : 0);
+ bare_put_data(writer, (const uint8_t *)(author && author->email ? author->email : ""), author && author->email ? strlen(author->email) : 0);
+ bare_put_data(writer, (const uint8_t *)timebuf, strlen(timebuf));
+ git_commit_free(c);
+ count++;
+ }
+ git_revwalk_free(walk);
+ return 0;
+}
diff --git a/git2d/cmd_init.c b/git2d/cmd_init.c
new file mode 100644
index 0000000..962d229
--- /dev/null
+++ b/git2d/cmd_init.c
@@ -0,0 +1,65 @@
+/*-
+ * SPDX-License-Identifier: AGPL-3.0-only
+ * SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+ */
+
+#include "x.h"
+
+int cmd_init_repo(const char *path, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char hooks[4096] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) hooks, sizeof(hooks) - 1) != BARE_ERROR_NONE) {
+ fprintf(stderr, "init_repo: protocol error reading hooks for path '%s'\n", path);
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+
+ fprintf(stderr, "init_repo: starting for path='%s' hooks='%s'\n", path, hooks);
+
+ if (mkdir(path, 0700) != 0 && errno != EEXIST) {
+ fprintf(stderr, "init_repo: mkdir failed for '%s': %s\n", path, strerror(errno));
+ bare_put_uint(writer, 24);
+ return -1;
+ }
+
+ git_repository *repo = NULL;
+ git_repository_init_options opts;
+ git_repository_init_options_init(&opts, GIT_REPOSITORY_INIT_OPTIONS_VERSION);
+ opts.flags = GIT_REPOSITORY_INIT_BARE;
+ if (git_repository_init_ext(&repo, path, &opts) != 0) {
+ const git_error *ge = git_error_last();
+ fprintf(stderr, "init_repo: git_repository_init_ext failed: %s (klass=%d)\n", ge && ge->message ? ge->message : "(no message)", ge ? ge->klass : 0);
+ bare_put_uint(writer, 20);
+ return -1;
+ }
+ git_config *cfg = NULL;
+ if (git_repository_config(&cfg, repo) != 0) {
+ git_repository_free(repo);
+ const git_error *ge = git_error_last();
+ fprintf(stderr, "init_repo: open config failed: %s (klass=%d)\n", ge && ge->message ? ge->message : "(no message)", ge ? ge->klass : 0);
+ bare_put_uint(writer, 21);
+ return -1;
+ }
+ if (git_config_set_string(cfg, "core.hooksPath", hooks) != 0) {
+ git_config_free(cfg);
+ git_repository_free(repo);
+ const git_error *ge = git_error_last();
+ fprintf(stderr, "init_repo: set hooksPath failed: %s (klass=%d) hooks='%s'\n", ge && ge->message ? ge->message : "(no message)", ge ? ge->klass : 0, hooks);
+ bare_put_uint(writer, 22);
+ return -1;
+ }
+ if (git_config_set_bool(cfg, "receive.advertisePushOptions", 1) != 0) {
+ git_config_free(cfg);
+ git_repository_free(repo);
+ const git_error *ge = git_error_last();
+ fprintf(stderr, "init_repo: set advertisePushOptions failed: %s (klass=%d)\n", ge && ge->message ? ge->message : "(no message)", ge ? ge->klass : 0);
+ bare_put_uint(writer, 23);
+ return -1;
+ }
+ git_config_free(cfg);
+
+ git_repository_free(repo);
+ fprintf(stderr, "init_repo: success for path='%s'\n", path);
+ bare_put_uint(writer, 0);
+ return 0;
+}
diff --git a/git2d/cmd_ref.c b/git2d/cmd_ref.c
new file mode 100644
index 0000000..f4bae4a
--- /dev/null
+++ b/git2d/cmd_ref.c
@@ -0,0 +1,113 @@
+/*-
+ * SPDX-License-Identifier: AGPL-3.0-only
+ * SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+ */
+
+#include "x.h"
+
+static int write_oid(struct bare_writer *writer, const git_oid *oid)
+{
+ return bare_put_data(writer, oid->id, GIT_OID_RAWSZ) == BARE_ERROR_NONE ? 0 : -1;
+}
+
+int cmd_resolve_ref(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char type[32] = { 0 };
+ char name[4096] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) type, sizeof(type) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ if (bare_get_data(reader, (uint8_t *) name, sizeof(name) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+
+ git_oid oid = { 0 };
+ int err = 0;
+
+ if (type[0] == '\0') {
+ git_object *obj = NULL;
+ err = git_revparse_single(&obj, repo, "HEAD^{commit}");
+ if (err != 0) {
+ bare_put_uint(writer, 12);
+ return -1;
+ }
+ git_commit *c = (git_commit *) obj;
+ git_oid_cpy(&oid, git_commit_id(c));
+ git_commit_free(c);
+ } else if (strcmp(type, "commit") == 0) {
+ err = git_oid_fromstr(&oid, name);
+ if (err != 0) {
+ bare_put_uint(writer, 12);
+ return -1;
+ }
+ } else if (strcmp(type, "branch") == 0) {
+ char fullref[4608];
+ snprintf(fullref, sizeof(fullref), "refs/heads/%s", name);
+ git_object *obj = NULL;
+ err = git_revparse_single(&obj, repo, fullref);
+ if (err != 0) {
+ bare_put_uint(writer, 12);
+ return -1;
+ }
+ git_commit *c = (git_commit *) obj;
+ git_oid_cpy(&oid, git_commit_id(c));
+ git_commit_free(c);
+ } else if (strcmp(type, "tag") == 0) {
+ char spec[4608];
+ snprintf(spec, sizeof(spec), "refs/tags/%s^{commit}", name);
+ git_object *obj = NULL;
+ err = git_revparse_single(&obj, repo, spec);
+ if (err != 0) {
+ bare_put_uint(writer, 12);
+ return -1;
+ }
+ git_commit *c = (git_commit *) obj;
+ git_oid_cpy(&oid, git_commit_id(c));
+ git_commit_free(c);
+ } else {
+ bare_put_uint(writer, 12);
+ return -1;
+ }
+
+ bare_put_uint(writer, 0);
+ return write_oid(writer, &oid);
+}
+
+int cmd_list_branches(git_repository *repo, struct bare_writer *writer)
+{
+ git_branch_iterator *it = NULL;
+ int err = git_branch_iterator_new(&it, repo, GIT_BRANCH_LOCAL);
+ if (err != 0) {
+ bare_put_uint(writer, 13);
+ return -1;
+ }
+ size_t count = 0;
+ git_reference *ref;
+ git_branch_t type;
+ while (git_branch_next(&ref, &type, it) == 0) {
+ count++;
+ git_reference_free(ref);
+ }
+ git_branch_iterator_free(it);
+
+ err = git_branch_iterator_new(&it, repo, GIT_BRANCH_LOCAL);
+ if (err != 0) {
+ bare_put_uint(writer, 13);
+ return -1;
+ }
+
+ bare_put_uint(writer, 0);
+ bare_put_uint(writer, count);
+ while (git_branch_next(&ref, &type, it) == 0) {
+ const char *name = NULL;
+ git_branch_name(&name, ref);
+ if (name == NULL)
+ name = "";
+ bare_put_data(writer, (const uint8_t *)name, strlen(name));
+ git_reference_free(ref);
+ }
+ git_branch_iterator_free(it);
+ return 0;
+}
diff --git a/git2d/cmd_tree.c b/git2d/cmd_tree.c
new file mode 100644
index 0000000..d18e817
--- /dev/null
+++ b/git2d/cmd_tree.c
@@ -0,0 +1,120 @@
+/*-
+ * SPDX-License-Identifier: AGPL-3.0-only
+ * SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
+ */
+
+#include "x.h"
+
+int cmd_tree_list_by_oid(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ char hex[64] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) hex, sizeof(hex) - 1) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_oid oid;
+ if (git_oid_fromstr(&oid, hex) != 0) {
+ bare_put_uint(writer, 4);
+ return -1;
+ }
+ git_tree *tree = NULL;
+ if (git_tree_lookup(&tree, repo, &oid) != 0) {
+ bare_put_uint(writer, 4);
+ return -1;
+ }
+ size_t count = git_tree_entrycount(tree);
+ bare_put_uint(writer, 0);
+ bare_put_uint(writer, count);
+ for (size_t i = 0; i < count; i++) {
+ const git_tree_entry *e = git_tree_entry_byindex(tree, i);
+ const char *name = git_tree_entry_name(e);
+ uint32_t mode = git_tree_entry_filemode(e);
+ const git_oid *id = git_tree_entry_id(e);
+ bare_put_uint(writer, mode);
+ bare_put_data(writer, (const uint8_t *)name, strlen(name));
+ bare_put_data(writer, id->id, GIT_OID_RAWSZ);
+ }
+ git_tree_free(tree);
+ return 0;
+}
+
+int cmd_write_tree(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ uint64_t count = 0;
+ if (bare_get_uint(reader, &count) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_treebuilder *bld = NULL;
+ if (git_treebuilder_new(&bld, repo, NULL) != 0) {
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ for (uint64_t i = 0; i < count; i++) {
+ uint64_t mode = 0;
+ if (bare_get_uint(reader, &mode) != BARE_ERROR_NONE) {
+ git_treebuilder_free(bld);
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ char name[4096] = { 0 };
+ if (bare_get_data(reader, (uint8_t *) name, sizeof(name) - 1) != BARE_ERROR_NONE) {
+ git_treebuilder_free(bld);
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ uint8_t idraw[GIT_OID_RAWSZ] = { 0 };
+ if (bare_get_fixed_data(reader, idraw, GIT_OID_RAWSZ) != BARE_ERROR_NONE) {
+ git_treebuilder_free(bld);
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_oid id;
+ memcpy(id.id, idraw, GIT_OID_RAWSZ);
+ git_filemode_t fm = (git_filemode_t) mode;
+ if (git_treebuilder_insert(NULL, bld, name, &id, fm) != 0) {
+ git_treebuilder_free(bld);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ }
+ git_oid out;
+ if (git_treebuilder_write(&out, bld) != 0) {
+ git_treebuilder_free(bld);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ git_treebuilder_free(bld);
+ bare_put_uint(writer, 0);
+ bare_put_data(writer, out.id, GIT_OID_RAWSZ);
+ return 0;
+}
+
+int cmd_blob_write(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer)
+{
+ uint64_t sz = 0;
+ if (bare_get_uint(reader, &sz) != BARE_ERROR_NONE) {
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ uint8_t *data = (uint8_t *) malloc(sz);
+ if (!data) {
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ if (bare_get_fixed_data(reader, data, sz) != BARE_ERROR_NONE) {
+ free(data);
+ bare_put_uint(writer, 11);
+ return -1;
+ }
+ git_oid oid;
+ if (git_blob_create_frombuffer(&oid, repo, data, sz) != 0) {
+ free(data);
+ bare_put_uint(writer, 15);
+ return -1;
+ }
+ free(data);
+ bare_put_uint(writer, 0);
+ bare_put_data(writer, oid.id, GIT_OID_RAWSZ);
+ return 0;
+}
diff --git a/git2d/main.c b/git2d/main.c
index 9140c1d..8518960 100644
--- a/git2d/main.c
+++ b/git2d/main.c
@@ -34,9 +34,7 @@ int main(int argc, char **argv)
if (bind(sock, (struct sockaddr *)&addr, sizeof(struct sockaddr_un))) {
if (errno == EADDRINUSE) {
unlink(argv[1]);
- if (bind
- (sock, (struct sockaddr *)&addr,
- sizeof(struct sockaddr_un)))
+ if (bind(sock, (struct sockaddr *)&addr, sizeof(struct sockaddr_un)))
err(1, "bind");
} else {
err(1, "bind");
@@ -50,7 +48,8 @@ int main(int argc, char **argv)
if (pthread_attr_init(&pthread_attr) != 0)
err(1, "pthread_attr_init");
- if (pthread_attr_setdetachstate(&pthread_attr, PTHREAD_CREATE_DETACHED) != 0)
+ if (pthread_attr_setdetachstate(&pthread_attr, PTHREAD_CREATE_DETACHED)
+ != 0)
err(1, "pthread_attr_setdetachstate");
for (;;) {
@@ -69,7 +68,7 @@ int main(int argc, char **argv)
pthread_t thread;
- if (pthread_create (&thread, &pthread_attr, session, (void *)conn) != 0) {
+ if (pthread_create(&thread, &pthread_attr, session, (void *)conn) != 0) {
close(*conn);
free(conn);
warn("pthread_create");
diff --git a/git2d/session.c b/git2d/session.c
index 0a945ee..b5691d3 100644
--- a/git2d/session.c
+++ b/git2d/session.c
@@ -29,25 +29,30 @@ void *session(void *_conn)
goto close;
}
path[sizeof(path) - 1] = '\0';
-
- /* Open repo */
- git_repository *repo = NULL;
- err =
- git_repository_open_ext(&repo, path,
- GIT_REPOSITORY_OPEN_NO_SEARCH |
- GIT_REPOSITORY_OPEN_BARE |
- GIT_REPOSITORY_OPEN_NO_DOTGIT, NULL);
- if (err != 0) {
- bare_put_uint(&writer, 1);
- goto close;
- }
+ fprintf(stderr, "session: path='%s'\n", path);
/* Command */
uint64_t cmd = 0;
err = bare_get_uint(&reader, &cmd);
if (err != BARE_ERROR_NONE) {
bare_put_uint(&writer, 2);
- goto free_repo;
+ goto close;
+ }
+ fprintf(stderr, "session: cmd=%llu\n", (unsigned long long)cmd);
+
+ /* Repo init does not require opening an existing repo so let's just do it here */
+ if (cmd == 15) {
+ fprintf(stderr, "session: handling init for '%s'\n", path);
+ if (cmd_init_repo(path, &reader, &writer) != 0) {
+ }
+ goto close;
+ }
+
+ git_repository *repo = NULL;
+ err = git_repository_open_ext(&repo, path, GIT_REPOSITORY_OPEN_NO_SEARCH | GIT_REPOSITORY_OPEN_BARE | GIT_REPOSITORY_OPEN_NO_DOTGIT, NULL);
+ if (err != 0) {
+ bare_put_uint(&writer, 1);
+ goto close;
}
switch (cmd) {
case 1:
@@ -60,6 +65,66 @@ void *session(void *_conn)
if (err != 0)
goto free_repo;
break;
+ case 3:
+ err = cmd_resolve_ref(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 4:
+ err = cmd_list_branches(repo, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 5:
+ err = cmd_format_patch(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 6:
+ err = cmd_commit_diff(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 7:
+ err = cmd_merge_base(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 8:
+ err = cmd_log(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 9:
+ err = cmd_tree_list_by_oid(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 10:
+ err = cmd_write_tree(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 11:
+ err = cmd_blob_write(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 12:
+ err = cmd_commit_tree_oid(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 13:
+ err = cmd_commit_create(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
+ case 14:
+ err = cmd_update_ref(repo, &reader, &writer);
+ if (err != 0)
+ goto free_repo;
+ break;
case 0:
bare_put_uint(&writer, 3);
goto free_repo;
diff --git a/git2d/x.h b/git2d/x.h
index a6da50f..b5b299a 100644
--- a/git2d/x.h
+++ b/git2d/x.h
@@ -26,13 +26,29 @@ typedef struct {
int fd;
} conn_io_t;
-
bare_error conn_read(void *buffer, void *dst, uint64_t sz);
bare_error conn_write(void *buffer, const void *src, uint64_t sz);
-void * session(void *_conn);
+void *session(void *_conn);
+
+int cmd_index(git_repository * repo, struct bare_writer *writer);
+int cmd_treeraw(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+
+int cmd_resolve_ref(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+int cmd_list_branches(git_repository * repo, struct bare_writer *writer);
+int cmd_format_patch(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+int cmd_commit_diff(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+int cmd_merge_base(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+int cmd_log(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+
+int cmd_tree_list_by_oid(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+int cmd_write_tree(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+int cmd_blob_write(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+
+int cmd_commit_tree_oid(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+int cmd_commit_create(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
+int cmd_update_ref(git_repository * repo, struct bare_reader *reader, struct bare_writer *writer);
-int cmd_index(git_repository *repo, struct bare_writer *writer);
-int cmd_treeraw(git_repository *repo, struct bare_reader *reader, struct bare_writer *writer);
+int cmd_init_repo(const char *path, struct bare_reader *reader, struct bare_writer *writer);
-#endif // X_H
+#endif // X_H
diff --git a/go.mod b/go.mod
index 9515c2f..7b10d50 100644
--- a/go.mod
+++ b/go.mod
@@ -3,48 +3,19 @@ module go.lindenii.runxiyu.org/forge
go 1.24.1
require (
- github.com/alecthomas/chroma/v2 v2.20.0
- github.com/bluekeyes/go-gitdiff v0.8.1
- github.com/emersion/go-message v0.18.2
- github.com/emersion/go-smtp v0.24.0
github.com/gliderlabs/ssh v0.3.8
- github.com/go-git/go-git/v5 v5.16.2
github.com/jackc/pgx/v5 v5.7.5
- github.com/microcosm-cc/bluemonday v1.0.27
- github.com/tdewolff/minify/v2 v2.23.11
github.com/yuin/goldmark v1.7.13
golang.org/x/crypto v0.41.0
+ golang.org/x/sync v0.16.0
)
require (
- dario.cat/mergo v1.0.2 // indirect
- github.com/Microsoft/go-winio v0.6.2 // indirect
- github.com/ProtonMail/go-crypto v1.3.0 // indirect
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
- github.com/aymerick/douceur v0.2.0 // indirect
- github.com/cloudflare/circl v1.6.1 // indirect
- github.com/cyphar/filepath-securejoin v0.4.1 // indirect
- github.com/dlclark/regexp2 v1.11.5 // indirect
- github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6 // indirect
- github.com/emirpasic/gods v1.18.1 // indirect
- github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
- github.com/go-git/go-billy/v5 v5.6.2 // indirect
- github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
- github.com/gorilla/css v1.0.1 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/puddle/v2 v2.2.2 // indirect
- github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
- github.com/kevinburke/ssh_config v1.2.0 // indirect
- github.com/pjbgf/sha1cd v0.4.0 // indirect
- github.com/sergi/go-diff v1.4.0 // indirect
- github.com/skeema/knownhosts v1.3.1 // indirect
- github.com/tdewolff/parse/v2 v2.8.2-0.20250806174018-50048bb39781 // indirect
- github.com/xanzy/ssh-agent v0.3.3 // indirect
- golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect
- golang.org/x/net v0.43.0 // indirect
- golang.org/x/sync v0.16.0 // indirect
+ github.com/stretchr/testify v1.10.0 // indirect
golang.org/x/sys v0.35.0 // indirect
golang.org/x/text v0.28.0 // indirect
- gopkg.in/warnings.v0 v0.1.2 // indirect
)
diff --git a/go.sum b/go.sum
index 7263ca6..b1faca9 100644
--- a/go.sum
+++ b/go.sum
@@ -1,61 +1,10 @@
-dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8=
-dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA=
-github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
-github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
-github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
-github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw=
-github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
-github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
-github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
-github.com/alecthomas/chroma/v2 v2.20.0 h1:sfIHpxPyR07/Oylvmcai3X/exDlE8+FA820NTz+9sGw=
-github.com/alecthomas/chroma/v2 v2.20.0/go.mod h1:e7tViK0xh/Nf4BYHl00ycY6rV7b8iXBksI9E359yNmA=
-github.com/alecthomas/repr v0.5.1 h1:E3G4t2QbHTSNpPKBgMTln5KLkZHLOcU7r37J4pXBuIg=
-github.com/alecthomas/repr v0.5.1/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
-github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
-github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
-github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
-github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
-github.com/bluekeyes/go-gitdiff v0.8.1 h1:lL1GofKMywO17c0lgQmJYcKek5+s8X6tXVNOLxy4smI=
-github.com/bluekeyes/go-gitdiff v0.8.1/go.mod h1:WWAk1Mc6EgWarCrPFO+xeYlujPu98VuLW3Tu+B/85AE=
-github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
-github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
-github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
-github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
-github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
-github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o=
-github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE=
-github.com/emersion/go-message v0.18.2 h1:rl55SQdjd9oJcIoQNhubD2Acs1E6IzlZISRTK7x/Lpg=
-github.com/emersion/go-message v0.18.2/go.mod h1:XpJyL70LwRvq2a8rVbHXikPgKj8+aI0kGdHlg16ibYA=
-github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6 h1:oP4q0fw+fOSWn3DfFi4EXdT+B+gTtzx8GC9xsc26Znk=
-github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6/go.mod h1:iL2twTeMvZnrg54ZoPDNfJaJaqy0xIQFuBdrLsmspwQ=
-github.com/emersion/go-smtp v0.24.0 h1:g6AfoF140mvW0vLNPD/LuCBLEAdlxOjIXqbIkJIS6Wk=
-github.com/emersion/go-smtp v0.24.0/go.mod h1:ZtRRkbTyp2XTHCA+BmyTFTrj8xY4I+b4McvHxCU2gsQ=
-github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
-github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU=
-github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
-github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
-github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM=
-github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
-github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4=
-github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII=
-github.com/go-git/go-git/v5 v5.16.2 h1:fT6ZIOjE5iEnkzKyxTHK1W4HGAsPhqEqiSAssSO77hM=
-github.com/go-git/go-git/v5 v5.16.2/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8=
-github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ=
-github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
-github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
-github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
-github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
-github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
-github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
-github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
@@ -64,111 +13,26 @@ github.com/jackc/pgx/v5 v5.7.5 h1:JHGfMnQY+IEtGM63d+NGMjoRpysB2JBwDr5fsngwmJs=
github.com/jackc/pgx/v5 v5.7.5/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
-github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
-github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
-github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
-github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
-github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
-github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
-github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
-github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
-github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
-github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
-github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
-github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
-github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k=
-github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY=
-github.com/pjbgf/sha1cd v0.4.0 h1:NXzbL1RvjTUi6kgYZCX3fPwwl27Q1LJndxtUDVfJGRY=
-github.com/pjbgf/sha1cd v0.4.0/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
-github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
-github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
-github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
-github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw=
-github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
-github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
-github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
-github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
-github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
-github.com/tdewolff/minify/v2 v2.23.11 h1:cZqTVCtuVvPC8/GbCvYgIcdAQGmoxEObZzKeKIUixTE=
-github.com/tdewolff/minify/v2 v2.23.11/go.mod h1:vmkbfGQ5hp/eYB+TswNWKma67S0a+32HBL+mFWxjZ2Q=
-github.com/tdewolff/parse/v2 v2.8.2-0.20250806174018-50048bb39781 h1:2qicgFovKg1XtX7Wf6GwexUdpb7q/jMIE2IgkYsVAvE=
-github.com/tdewolff/parse/v2 v2.8.2-0.20250806174018-50048bb39781/go.mod h1:Hwlni2tiVNKyzR1o6nUs4FOF07URA+JLBLd6dlIXYqo=
-github.com/tdewolff/test v1.0.11 h1:FdLbwQVHxqG16SlkGveC0JVyrJN62COWTRyUFzfbtBE=
-github.com/tdewolff/test v1.0.11/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8=
-github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
-github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
-github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/goldmark v1.7.13 h1:GPddIs617DnBLFFVJFgpo1aBfe/4xcvMc3SB5t/D0pA=
github.com/yuin/goldmark v1.7.13/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
-golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
-golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
-golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
-golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
-golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
-golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
-golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
-golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
-golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
-golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
-golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
-golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
-golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
-golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
-golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
-golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
-golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
-golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
-golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
-golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
-golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
-gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
-gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
-gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
-gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/hookc/.gitignore b/hookc/.gitignore
deleted file mode 100644
index 7348daa..0000000
--- a/hookc/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/hookc
diff --git a/scripts/update_deps b/scripts/update_deps
deleted file mode 100755
index 8983a60..0000000
--- a/scripts/update_deps
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/sh
-#
-# SPDX-License-Identifier: AGPL-3.0-only
-# SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-set -eux
-
-go get -t -u -x -v ./...
-go mod tidy -x -v
diff --git a/sql/schema.sql b/sql/schema.sql
deleted file mode 100644
index 92ae605..0000000
--- a/sql/schema.sql
+++ /dev/null
@@ -1,195 +0,0 @@
--- SPDX-License-Identifier: AGPL-3.0-only
--- SPDX-FileCopyrightText: Copyright (c) 2025 Runxi Yu <https://runxiyu.org>
-
-CREATE TABLE groups (
- id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
- name TEXT NOT NULL,
- parent_group INTEGER REFERENCES groups(id) ON DELETE CASCADE,
- description TEXT,
- UNIQUE NULLS NOT DISTINCT (parent_group, name)
-);
-
-CREATE TABLE repos (
- id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
- group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE RESTRICT, -- I mean, should be CASCADE but deleting Git repos on disk also needs to be considered
- contrib_requirements TEXT NOT NULL CHECK (contrib_requirements IN ('closed', 'registered_user', 'federated', 'ssh_pubkey', 'public')),
- name TEXT NOT NULL,
- UNIQUE(group_id, name),
- description TEXT,
- filesystem_path TEXT
-);
-
-CREATE TABLE mailing_lists (
- id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
- group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE RESTRICT,
- name TEXT NOT NULL,
- UNIQUE(group_id, name),
- description TEXT
-);
-
-CREATE TABLE mailing_list_emails (
- id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
- list_id INTEGER NOT NULL REFERENCES mailing_lists(id) ON DELETE CASCADE,
- title TEXT NOT NULL,
- sender TEXT NOT NULL,
- date TIMESTAMP NOT NULL,
- content BYTEA NOT NULL
-);
-
-CREATE TABLE users (
- id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
- username TEXT UNIQUE,
- type TEXT NOT NULL CHECK (type IN ('pubkey_only', 'federated', 'registered', 'admin')),
- password TEXT
-);
-
-CREATE TABLE ssh_public_keys (
- id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
- key_string TEXT NOT NULL,
- user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
- CONSTRAINT unique_key_string EXCLUDE USING HASH (key_string WITH =)
-);
-
-CREATE TABLE sessions (
- user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
- session_id TEXT PRIMARY KEY NOT NULL,
- UNIQUE(user_id, session_id)
-);
-
-CREATE TABLE user_group_roles (
- group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE CASCADE,
- user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
- PRIMARY KEY(user_id, group_id)
-);
-
-CREATE TABLE federated_identities (
- user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
- service TEXT NOT NULL,
- remote_username TEXT NOT NULL,
- PRIMARY KEY(user_id, service)
-);
-
--- Ticket tracking
-
-CREATE TABLE ticket_trackers (
- id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
- group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE RESTRICT,
- name TEXT NOT NULL,
- description TEXT,
- UNIQUE(group_id, name)
-);
-
-CREATE TABLE tickets (
- id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
- tracker_id INTEGER NOT NULL REFERENCES ticket_trackers(id) ON DELETE CASCADE,
- tracker_local_id INTEGER NOT NULL,
- title TEXT NOT NULL,
- description TEXT,
- UNIQUE(tracker_id, tracker_local_id)
-);
-
-CREATE OR REPLACE FUNCTION create_tracker_ticket_sequence()
-RETURNS TRIGGER AS $$
-DECLARE
- seq_name TEXT := 'tracker_ticket_seq_' || NEW.id;
-BEGIN
- EXECUTE format('CREATE SEQUENCE %I', seq_name);
- RETURN NEW;
-END;
-$$ LANGUAGE plpgsql;
-CREATE TRIGGER after_insert_ticket_tracker
-AFTER INSERT ON ticket_trackers
-FOR EACH ROW
-EXECUTE FUNCTION create_tracker_ticket_sequence();
-
-CREATE OR REPLACE FUNCTION drop_tracker_ticket_sequence()
-RETURNS TRIGGER AS $$
-DECLARE
- seq_name TEXT := 'tracker_ticket_seq_' || OLD.id;
-BEGIN
- EXECUTE format('DROP SEQUENCE IF EXISTS %I', seq_name);
- RETURN OLD;
-END;
-$$ LANGUAGE plpgsql;
-CREATE TRIGGER before_delete_ticket_tracker
-BEFORE DELETE ON ticket_trackers
-FOR EACH ROW
-EXECUTE FUNCTION drop_tracker_ticket_sequence();
-
-CREATE OR REPLACE FUNCTION assign_tracker_local_id()
-RETURNS TRIGGER AS $$
-DECLARE
- seq_name TEXT := 'tracker_ticket_seq_' || NEW.tracker_id;
-BEGIN
- IF NEW.tracker_local_id IS NULL THEN
- EXECUTE format('SELECT nextval(%L)', seq_name)
- INTO NEW.tracker_local_id;
- END IF;
- RETURN NEW;
-END;
-$$ LANGUAGE plpgsql;
-CREATE TRIGGER before_insert_ticket
-BEFORE INSERT ON tickets
-FOR EACH ROW
-EXECUTE FUNCTION assign_tracker_local_id();
-
--- Merge requests
-
-CREATE TABLE merge_requests (
- id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
- repo_id INTEGER NOT NULL REFERENCES repos(id) ON DELETE CASCADE,
- repo_local_id INTEGER NOT NULL,
- title TEXT,
- creator INTEGER REFERENCES users(id) ON DELETE SET NULL,
- source_ref TEXT NOT NULL,
- destination_branch TEXT,
- status TEXT NOT NULL CHECK (status IN ('open', 'merged', 'closed')),
- UNIQUE (repo_id, repo_local_id),
- UNIQUE (repo_id, source_ref, destination_branch)
-);
-
-CREATE OR REPLACE FUNCTION create_repo_mr_sequence()
-RETURNS TRIGGER AS $$
-DECLARE
- seq_name TEXT := 'repo_mr_seq_' || NEW.id;
-BEGIN
- EXECUTE format('CREATE SEQUENCE %I', seq_name);
- RETURN NEW;
-END;
-$$ LANGUAGE plpgsql;
-CREATE TRIGGER after_insert_repo
-AFTER INSERT ON repos
-FOR EACH ROW
-EXECUTE FUNCTION create_repo_mr_sequence();
-
-CREATE OR REPLACE FUNCTION drop_repo_mr_sequence()
-RETURNS TRIGGER AS $$
-DECLARE
- seq_name TEXT := 'repo_mr_seq_' || OLD.id;
-BEGIN
- EXECUTE format('DROP SEQUENCE IF EXISTS %I', seq_name);
- RETURN OLD;
-END;
-$$ LANGUAGE plpgsql;
-CREATE TRIGGER before_delete_repo
-BEFORE DELETE ON repos
-FOR EACH ROW
-EXECUTE FUNCTION drop_repo_mr_sequence();
-
-
-CREATE OR REPLACE FUNCTION assign_repo_local_id()
-RETURNS TRIGGER AS $$
-DECLARE
- seq_name TEXT := 'repo_mr_seq_' || NEW.repo_id;
-BEGIN
- IF NEW.repo_local_id IS NULL THEN
- EXECUTE format('SELECT nextval(%L)', seq_name)
- INTO NEW.repo_local_id;
- END IF;
- RETURN NEW;
-END;
-$$ LANGUAGE plpgsql;
-CREATE TRIGGER before_insert_merge_request
-BEFORE INSERT ON merge_requests
-FOR EACH ROW
-EXECUTE FUNCTION assign_repo_local_id();