forked from tangled.org/core
Monorepo for Tangled — https://tangled.org

Compare changes

Choose any two refs to compare.

Changed files
+12315 -4870
.air
api
appview
commitverify
config
db
email
indexer
issues
knots
labels
mentions
middleware
models
notifications
notify
oauth
ogcard
pages
pagination
pipelines
pulls
repo
reporesolver
serververify
settings
spindles
state
strings
validator
cmd
genjwks
crypto
docs
guard
idresolver
jetstream
knotserver
lexicons
nix
orm
patchutil
rbac
scripts
sets
spindle
types
workflow
+8 -6
.air/appview.toml
···
-
[build]
-
cmd = "tailwindcss -i input.css -o ./appview/pages/static/tw.css && go build -o .bin/app ./cmd/appview/main.go"
-
bin = ";set -o allexport && source .env && set +o allexport; .bin/app"
root = "."
+
tmp_dir = "out"
-
exclude_regex = [".*_templ.go"]
-
include_ext = ["go", "templ", "html", "css"]
-
exclude_dir = ["target", "atrium", "nix"]
+
[build]
+
cmd = "go build -o out/appview.out cmd/appview/main.go"
+
bin = "out/appview.out"
+
+
include_ext = ["go"]
+
exclude_dir = ["avatar", "camo", "indexes", "nix", "tmp"]
+
stop_on_error = true
+11
.air/knot.toml
···
+
root = "."
+
tmp_dir = "out"
+
+
[build]
+
cmd = 'go build -ldflags "-X tangled.org/core/knotserver.version=$(git describe --tags --long)" -o out/knot.out cmd/knot/main.go'
+
bin = "out/knot.out"
+
args_bin = ["server"]
+
+
include_ext = ["go"]
+
exclude_dir = ["avatar", "camo", "indexes", "nix", "tmp"]
+
stop_on_error = true
-7
.air/knotserver.toml
···
-
[build]
-
cmd = 'go build -ldflags "-X tangled.org/core/knotserver.version=$(git describe --tags --long)" -o .bin/knot ./cmd/knot/'
-
bin = ".bin/knot server"
-
root = "."
-
-
exclude_regex = [""]
-
include_ext = ["go", "templ"]
+10
.air/spindle.toml
···
+
root = "."
+
tmp_dir = "out"
+
+
[build]
+
cmd = "go build -o out/spindle.out cmd/spindle/main.go"
+
bin = "out/spindle.out"
+
+
include_ext = ["go"]
+
exclude_dir = ["avatar", "camo", "indexes", "nix", "tmp"]
+
stop_on_error = true
+13
.editorconfig
···
+
root = true
+
+
[*.html]
+
indent_size = 2
+
+
[*.json]
+
indent_size = 2
+
+
[*.nix]
+
indent_size = 2
+
+
[*.yml]
+
indent_size = 2
+1
.gitignore
···
.env
*.rdb
.envrc
+
**/*.bleve
# Created if following hacking.md
genjwks.out
/nix/vm-data
+3 -1
api/tangled/actorprofile.go
···
Location *string `json:"location,omitempty" cborgen:"location,omitempty"`
// pinnedRepositories: Any ATURI, it is up to appviews to validate these fields.
PinnedRepositories []string `json:"pinnedRepositories,omitempty" cborgen:"pinnedRepositories,omitempty"`
-
Stats []string `json:"stats,omitempty" cborgen:"stats,omitempty"`
+
// pronouns: Preferred gender pronouns.
+
Pronouns *string `json:"pronouns,omitempty" cborgen:"pronouns,omitempty"`
+
Stats []string `json:"stats,omitempty" cborgen:"stats,omitempty"`
}
+845 -10
api/tangled/cbor_gen.go
···
}
cw := cbg.NewCborWriter(w)
-
fieldCount := 7
+
fieldCount := 8
if t.Description == nil {
fieldCount--
···
}
if t.PinnedRepositories == nil {
+
fieldCount--
+
}
+
+
if t.Pronouns == nil {
fieldCount--
}
···
return err
}
if _, err := cw.WriteString(string(*t.Location)); err != nil {
+
return err
+
}
+
}
+
}
+
+
// t.Pronouns (string) (string)
+
if t.Pronouns != nil {
+
+
if len("pronouns") > 1000000 {
+
return xerrors.Errorf("Value in field \"pronouns\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("pronouns"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("pronouns")); err != nil {
+
return err
+
}
+
+
if t.Pronouns == nil {
+
if _, err := cw.Write(cbg.CborNull); err != nil {
+
return err
+
}
+
} else {
+
if len(*t.Pronouns) > 1000000 {
+
return xerrors.Errorf("Value in field t.Pronouns was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(*t.Pronouns))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(*t.Pronouns)); err != nil {
return err
}
}
···
}
t.Location = (*string)(&sval)
+
}
+
}
+
// t.Pronouns (string) (string)
+
case "pronouns":
+
+
{
+
b, err := cr.ReadByte()
+
if err != nil {
+
return err
+
}
+
if b != cbg.CborNull[0] {
+
if err := cr.UnreadByte(); err != nil {
+
return err
+
}
+
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Pronouns = (*string)(&sval)
}
}
// t.Description (string) (string)
···
cw := cbg.NewCborWriter(w)
-
fieldCount := 8
+
fieldCount := 10
if t.Description == nil {
fieldCount--
···
if t.Spindle == nil {
+
fieldCount--
+
}
+
+
if t.Topics == nil {
+
fieldCount--
+
}
+
+
if t.Website == nil {
fieldCount--
···
+
// t.Topics ([]string) (slice)
+
if t.Topics != nil {
+
+
if len("topics") > 1000000 {
+
return xerrors.Errorf("Value in field \"topics\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("topics"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("topics")); err != nil {
+
return err
+
}
+
+
if len(t.Topics) > 8192 {
+
return xerrors.Errorf("Slice value in field t.Topics was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.Topics))); err != nil {
+
return err
+
}
+
for _, v := range t.Topics {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
+
// t.Spindle (string) (string)
if t.Spindle != nil {
···
+
// t.Website (string) (string)
+
if t.Website != nil {
+
+
if len("website") > 1000000 {
+
return xerrors.Errorf("Value in field \"website\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("website"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("website")); err != nil {
+
return err
+
}
+
+
if t.Website == nil {
+
if _, err := cw.Write(cbg.CborNull); err != nil {
+
return err
+
}
+
} else {
+
if len(*t.Website) > 1000000 {
+
return xerrors.Errorf("Value in field t.Website was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(*t.Website))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(*t.Website)); err != nil {
+
return err
+
}
+
}
+
}
+
// t.CreatedAt (string) (string)
if len("createdAt") > 1000000 {
return xerrors.Errorf("Value in field \"createdAt\" was too long")
···
t.Source = (*string)(&sval)
+
// t.Topics ([]string) (slice)
+
case "topics":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.Topics: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.Topics = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Topics[i] = string(sval)
+
}
+
+
}
+
}
// t.Spindle (string) (string)
case "spindle":
···
t.Spindle = (*string)(&sval)
+
}
+
}
+
// t.Website (string) (string)
+
case "website":
+
+
{
+
b, err := cr.ReadByte()
+
if err != nil {
+
return err
+
}
+
if b != cbg.CborNull[0] {
+
if err := cr.UnreadByte(); err != nil {
+
return err
+
}
+
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Website = (*string)(&sval)
// t.CreatedAt (string) (string)
···
cw := cbg.NewCborWriter(w)
-
fieldCount := 5
+
fieldCount := 7
if t.Body == nil {
+
fieldCount--
+
}
+
+
if t.Mentions == nil {
+
fieldCount--
+
}
+
+
if t.References == nil {
fieldCount--
···
return err
+
// t.Mentions ([]string) (slice)
+
if t.Mentions != nil {
+
+
if len("mentions") > 1000000 {
+
return xerrors.Errorf("Value in field \"mentions\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("mentions"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("mentions")); err != nil {
+
return err
+
}
+
+
if len(t.Mentions) > 8192 {
+
return xerrors.Errorf("Slice value in field t.Mentions was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.Mentions))); err != nil {
+
return err
+
}
+
for _, v := range t.Mentions {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
+
// t.CreatedAt (string) (string)
if len("createdAt") > 1000000 {
return xerrors.Errorf("Value in field \"createdAt\" was too long")
···
if _, err := cw.WriteString(string(t.CreatedAt)); err != nil {
return err
+
+
// t.References ([]string) (slice)
+
if t.References != nil {
+
+
if len("references") > 1000000 {
+
return xerrors.Errorf("Value in field \"references\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("references"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("references")); err != nil {
+
return err
+
}
+
+
if len(t.References) > 8192 {
+
return xerrors.Errorf("Slice value in field t.References was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.References))); err != nil {
+
return err
+
}
+
for _, v := range t.References {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
return nil
···
n := extra
-
nameBuf := make([]byte, 9)
+
nameBuf := make([]byte, 10)
for i := uint64(0); i < n; i++ {
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
if err != nil {
···
t.Title = string(sval)
+
// t.Mentions ([]string) (slice)
+
case "mentions":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.Mentions: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.Mentions = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Mentions[i] = string(sval)
+
}
+
+
}
+
}
// t.CreatedAt (string) (string)
case "createdAt":
···
t.CreatedAt = string(sval)
+
}
+
// t.References ([]string) (slice)
+
case "references":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.References: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.References = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.References[i] = string(sval)
+
}
+
+
}
default:
···
cw := cbg.NewCborWriter(w)
-
fieldCount := 5
+
fieldCount := 7
+
+
if t.Mentions == nil {
+
fieldCount--
+
}
+
+
if t.References == nil {
+
fieldCount--
+
}
if t.ReplyTo == nil {
fieldCount--
···
if _, err := cw.WriteString(string(*t.ReplyTo)); err != nil {
return err
+
}
+
}
+
+
// t.Mentions ([]string) (slice)
+
if t.Mentions != nil {
+
+
if len("mentions") > 1000000 {
+
return xerrors.Errorf("Value in field \"mentions\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("mentions"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("mentions")); err != nil {
+
return err
+
}
+
+
if len(t.Mentions) > 8192 {
+
return xerrors.Errorf("Slice value in field t.Mentions was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.Mentions))); err != nil {
+
return err
+
}
+
for _, v := range t.Mentions {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
···
if _, err := cw.WriteString(string(t.CreatedAt)); err != nil {
return err
+
+
// t.References ([]string) (slice)
+
if t.References != nil {
+
+
if len("references") > 1000000 {
+
return xerrors.Errorf("Value in field \"references\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("references"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("references")); err != nil {
+
return err
+
}
+
+
if len(t.References) > 8192 {
+
return xerrors.Errorf("Slice value in field t.References was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.References))); err != nil {
+
return err
+
}
+
for _, v := range t.References {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
return nil
···
n := extra
-
nameBuf := make([]byte, 9)
+
nameBuf := make([]byte, 10)
for i := uint64(0); i < n; i++ {
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
if err != nil {
···
t.ReplyTo = (*string)(&sval)
+
// t.Mentions ([]string) (slice)
+
case "mentions":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.Mentions: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.Mentions = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Mentions[i] = string(sval)
+
}
+
+
}
+
}
// t.CreatedAt (string) (string)
case "createdAt":
···
t.CreatedAt = string(sval)
+
// t.References ([]string) (slice)
+
case "references":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.References: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.References = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.References[i] = string(sval)
+
}
+
+
}
+
}
default:
// Field doesn't exist on this type, so ignore it
···
cw := cbg.NewCborWriter(w)
-
fieldCount := 7
+
fieldCount := 9
if t.Body == nil {
fieldCount--
+
if t.Mentions == nil {
+
fieldCount--
+
}
+
+
if t.References == nil {
+
fieldCount--
+
}
+
if t.Source == nil {
fieldCount--
···
return err
+
// t.Mentions ([]string) (slice)
+
if t.Mentions != nil {
+
+
if len("mentions") > 1000000 {
+
return xerrors.Errorf("Value in field \"mentions\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("mentions"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("mentions")); err != nil {
+
return err
+
}
+
+
if len(t.Mentions) > 8192 {
+
return xerrors.Errorf("Slice value in field t.Mentions was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.Mentions))); err != nil {
+
return err
+
}
+
for _, v := range t.Mentions {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
+
// t.CreatedAt (string) (string)
if len("createdAt") > 1000000 {
return xerrors.Errorf("Value in field \"createdAt\" was too long")
···
if _, err := cw.WriteString(string(t.CreatedAt)); err != nil {
return err
+
+
// t.References ([]string) (slice)
+
if t.References != nil {
+
+
if len("references") > 1000000 {
+
return xerrors.Errorf("Value in field \"references\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("references"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("references")); err != nil {
+
return err
+
}
+
+
if len(t.References) > 8192 {
+
return xerrors.Errorf("Slice value in field t.References was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.References))); err != nil {
+
return err
+
}
+
for _, v := range t.References {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
return nil
···
n := extra
-
nameBuf := make([]byte, 9)
+
nameBuf := make([]byte, 10)
for i := uint64(0); i < n; i++ {
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
if err != nil {
···
+
// t.Mentions ([]string) (slice)
+
case "mentions":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.Mentions: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.Mentions = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Mentions[i] = string(sval)
+
}
+
+
}
+
}
// t.CreatedAt (string) (string)
case "createdAt":
···
t.CreatedAt = string(sval)
+
// t.References ([]string) (slice)
+
case "references":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.References: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.References = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.References[i] = string(sval)
+
}
+
+
}
+
}
default:
// Field doesn't exist on this type, so ignore it
···
cw := cbg.NewCborWriter(w)
+
fieldCount := 6
-
if _, err := cw.Write([]byte{164}); err != nil {
+
if t.Mentions == nil {
+
fieldCount--
+
}
+
+
if t.References == nil {
+
fieldCount--
+
}
+
+
if _, err := cw.Write(cbg.CborEncodeMajorType(cbg.MajMap, uint64(fieldCount))); err != nil {
return err
···
return err
+
// t.Mentions ([]string) (slice)
+
if t.Mentions != nil {
+
+
if len("mentions") > 1000000 {
+
return xerrors.Errorf("Value in field \"mentions\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("mentions"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("mentions")); err != nil {
+
return err
+
}
+
+
if len(t.Mentions) > 8192 {
+
return xerrors.Errorf("Slice value in field t.Mentions was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.Mentions))); err != nil {
+
return err
+
}
+
for _, v := range t.Mentions {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
+
// t.CreatedAt (string) (string)
if len("createdAt") > 1000000 {
return xerrors.Errorf("Value in field \"createdAt\" was too long")
···
if _, err := cw.WriteString(string(t.CreatedAt)); err != nil {
return err
+
+
// t.References ([]string) (slice)
+
if t.References != nil {
+
+
if len("references") > 1000000 {
+
return xerrors.Errorf("Value in field \"references\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("references"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("references")); err != nil {
+
return err
+
}
+
+
if len(t.References) > 8192 {
+
return xerrors.Errorf("Slice value in field t.References was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.References))); err != nil {
+
return err
+
}
+
for _, v := range t.References {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
return nil
···
n := extra
-
nameBuf := make([]byte, 9)
+
nameBuf := make([]byte, 10)
for i := uint64(0); i < n; i++ {
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
if err != nil {
···
t.LexiconTypeID = string(sval)
+
// t.Mentions ([]string) (slice)
+
case "mentions":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.Mentions: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.Mentions = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Mentions[i] = string(sval)
+
}
+
+
}
+
}
// t.CreatedAt (string) (string)
case "createdAt":
···
t.CreatedAt = string(sval)
+
}
+
// t.References ([]string) (slice)
+
case "references":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.References: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.References = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.References[i] = string(sval)
+
}
+
+
}
default:
+7 -5
api/tangled/issuecomment.go
···
} //
// RECORDTYPE: RepoIssueComment
type RepoIssueComment struct {
-
LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue.comment" cborgen:"$type,const=sh.tangled.repo.issue.comment"`
-
Body string `json:"body" cborgen:"body"`
-
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
-
Issue string `json:"issue" cborgen:"issue"`
-
ReplyTo *string `json:"replyTo,omitempty" cborgen:"replyTo,omitempty"`
+
LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue.comment" cborgen:"$type,const=sh.tangled.repo.issue.comment"`
+
Body string `json:"body" cborgen:"body"`
+
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+
Issue string `json:"issue" cborgen:"issue"`
+
Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
+
References []string `json:"references,omitempty" cborgen:"references,omitempty"`
+
ReplyTo *string `json:"replyTo,omitempty" cborgen:"replyTo,omitempty"`
}
+6 -4
api/tangled/pullcomment.go
···
} //
// RECORDTYPE: RepoPullComment
type RepoPullComment struct {
-
LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull.comment" cborgen:"$type,const=sh.tangled.repo.pull.comment"`
-
Body string `json:"body" cborgen:"body"`
-
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
-
Pull string `json:"pull" cborgen:"pull"`
+
LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull.comment" cborgen:"$type,const=sh.tangled.repo.pull.comment"`
+
Body string `json:"body" cborgen:"body"`
+
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+
Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
+
Pull string `json:"pull" cborgen:"pull"`
+
References []string `json:"references,omitempty" cborgen:"references,omitempty"`
}
+13 -1
api/tangled/repoblob.go
···
// RepoBlob_Output is the output of a sh.tangled.repo.blob call.
type RepoBlob_Output struct {
// content: File content (base64 encoded for binary files)
-
Content string `json:"content" cborgen:"content"`
+
Content *string `json:"content,omitempty" cborgen:"content,omitempty"`
// encoding: Content encoding
Encoding *string `json:"encoding,omitempty" cborgen:"encoding,omitempty"`
// isBinary: Whether the file is binary
···
Ref string `json:"ref" cborgen:"ref"`
// size: File size in bytes
Size *int64 `json:"size,omitempty" cborgen:"size,omitempty"`
+
// submodule: Submodule information if path is a submodule
+
Submodule *RepoBlob_Submodule `json:"submodule,omitempty" cborgen:"submodule,omitempty"`
}
// RepoBlob_Signature is a "signature" in the sh.tangled.repo.blob schema.
···
Name string `json:"name" cborgen:"name"`
// when: Author timestamp
When string `json:"when" cborgen:"when"`
+
}
+
+
// RepoBlob_Submodule is a "submodule" in the sh.tangled.repo.blob schema.
+
type RepoBlob_Submodule struct {
+
// branch: Branch to track in the submodule
+
Branch *string `json:"branch,omitempty" cborgen:"branch,omitempty"`
+
// name: Submodule name
+
Name string `json:"name" cborgen:"name"`
+
// url: Submodule repository URL
+
Url string `json:"url" cborgen:"url"`
}
// RepoBlob calls the XRPC method "sh.tangled.repo.blob".
+7 -5
api/tangled/repoissue.go
···
} //
// RECORDTYPE: RepoIssue
type RepoIssue struct {
-
LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue" cborgen:"$type,const=sh.tangled.repo.issue"`
-
Body *string `json:"body,omitempty" cborgen:"body,omitempty"`
-
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
-
Repo string `json:"repo" cborgen:"repo"`
-
Title string `json:"title" cborgen:"title"`
+
LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue" cborgen:"$type,const=sh.tangled.repo.issue"`
+
Body *string `json:"body,omitempty" cborgen:"body,omitempty"`
+
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+
Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
+
References []string `json:"references,omitempty" cborgen:"references,omitempty"`
+
Repo string `json:"repo" cborgen:"repo"`
+
Title string `json:"title" cborgen:"title"`
}
+2
api/tangled/repopull.go
···
LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull" cborgen:"$type,const=sh.tangled.repo.pull"`
Body *string `json:"body,omitempty" cborgen:"body,omitempty"`
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+
Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
Patch string `json:"patch" cborgen:"patch"`
+
References []string `json:"references,omitempty" cborgen:"references,omitempty"`
Source *RepoPull_Source `json:"source,omitempty" cborgen:"source,omitempty"`
Target *RepoPull_Target `json:"target" cborgen:"target"`
Title string `json:"title" cborgen:"title"`
-4
api/tangled/repotree.go
···
// RepoTree_TreeEntry is a "treeEntry" in the sh.tangled.repo.tree schema.
type RepoTree_TreeEntry struct {
-
// is_file: Whether this entry is a file
-
Is_file bool `json:"is_file" cborgen:"is_file"`
-
// is_subtree: Whether this entry is a directory/subtree
-
Is_subtree bool `json:"is_subtree" cborgen:"is_subtree"`
Last_commit *RepoTree_LastCommit `json:"last_commit,omitempty" cborgen:"last_commit,omitempty"`
// mode: File mode
Mode string `json:"mode" cborgen:"mode"`
+4
api/tangled/tangledrepo.go
···
Source *string `json:"source,omitempty" cborgen:"source,omitempty"`
// spindle: CI runner to send jobs to and receive results from
Spindle *string `json:"spindle,omitempty" cborgen:"spindle,omitempty"`
+
// topics: Topics related to the repo
+
Topics []string `json:"topics,omitempty" cborgen:"topics,omitempty"`
+
// website: Any URI related to the repo
+
Website *string `json:"website,omitempty" cborgen:"website,omitempty"`
}
+6 -45
appview/commitverify/verify.go
···
import (
"log"
-
"github.com/go-git/go-git/v5/plumbing/object"
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/crypto"
···
return ""
}
-
func GetVerifiedObjectCommits(e db.Execer, emailToDid map[string]string, commits []*object.Commit) (VerifiedCommits, error) {
-
ndCommits := []types.NiceDiff{}
-
for _, commit := range commits {
-
ndCommits = append(ndCommits, ObjectCommitToNiceDiff(commit))
-
}
-
return GetVerifiedCommits(e, emailToDid, ndCommits)
-
}
-
-
func GetVerifiedCommits(e db.Execer, emailToDid map[string]string, ndCommits []types.NiceDiff) (VerifiedCommits, error) {
+
func GetVerifiedCommits(e db.Execer, emailToDid map[string]string, ndCommits []types.Commit) (VerifiedCommits, error) {
vcs := VerifiedCommits{}
didPubkeyCache := make(map[string][]models.PublicKey)
for _, commit := range ndCommits {
-
c := commit.Commit
-
-
committerEmail := c.Committer.Email
+
committerEmail := commit.Committer.Email
if did, exists := emailToDid[committerEmail]; exists {
// check if we've already fetched public keys for this did
pubKeys, ok := didPubkeyCache[did]
···
}
// try to verify with any associated pubkeys
+
payload := commit.Payload()
+
signature := commit.PGPSignature
for _, pk := range pubKeys {
-
if _, ok := crypto.VerifyCommitSignature(pk.Key, commit); ok {
+
if _, ok := crypto.VerifySignature([]byte(pk.Key), []byte(signature), []byte(payload)); ok {
fp, err := crypto.SSHFingerprint(pk.Key)
if err != nil {
log.Println("error computing ssh fingerprint:", err)
}
-
vc := verifiedCommit{fingerprint: fp, hash: c.This}
+
vc := verifiedCommit{fingerprint: fp, hash: commit.This}
vcs[vc] = struct{}{}
break
}
···
return vcs, nil
}
-
-
// ObjectCommitToNiceDiff is a compatibility function to convert a
-
// commit object into a NiceDiff structure.
-
func ObjectCommitToNiceDiff(c *object.Commit) types.NiceDiff {
-
var niceDiff types.NiceDiff
-
-
// set commit information
-
niceDiff.Commit.Message = c.Message
-
niceDiff.Commit.Author = c.Author
-
niceDiff.Commit.This = c.Hash.String()
-
niceDiff.Commit.Committer = c.Committer
-
niceDiff.Commit.Tree = c.TreeHash.String()
-
niceDiff.Commit.PGPSignature = c.PGPSignature
-
-
changeId, ok := c.ExtraHeaders["change-id"]
-
if ok {
-
niceDiff.Commit.ChangedId = string(changeId)
-
}
-
-
// set parent hash if available
-
if len(c.ParentHashes) > 0 {
-
niceDiff.Commit.Parent = c.ParentHashes[0].String()
-
}
-
-
// XXX: Stats and Diff fields are typically populated
-
// after fetching the actual diff information, which isn't
-
// directly available in the commit object itself.
-
-
return niceDiff
-
}
+15 -2
appview/config/config.go
···
CookieSecret string `env:"COOKIE_SECRET, default=00000000000000000000000000000000"`
DbPath string `env:"DB_PATH, default=appview.db"`
ListenAddr string `env:"LISTEN_ADDR, default=0.0.0.0:3000"`
-
AppviewHost string `env:"APPVIEW_HOST, default=https://tangled.sh"`
+
AppviewHost string `env:"APPVIEW_HOST, default=https://tangled.org"`
+
AppviewName string `env:"APPVIEW_Name, default=Tangled"`
Dev bool `env:"DEV, default=false"`
DisallowedNicknamesFile string `env:"DISALLOWED_NICKNAMES_FILE"`
···
}
type OAuthConfig struct {
-
Jwks string `env:"JWKS"`
+
ClientSecret string `env:"CLIENT_SECRET"`
+
ClientKid string `env:"CLIENT_KID"`
+
}
+
+
type PlcConfig struct {
+
PLCURL string `env:"URL, default=https://plc.directory"`
}
type JetstreamConfig struct {
···
TurnstileSecretKey string `env:"TURNSTILE_SECRET_KEY"`
}
+
type LabelConfig struct {
+
DefaultLabelDefs []string `env:"DEFAULTS, default=at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/wontfix,at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/good-first-issue,at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/duplicate,at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/documentation,at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/assignee"` // delimiter=,
+
GoodFirstIssue string `env:"GFI, default=at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/good-first-issue"`
+
}
+
func (cfg RedisConfig) ToURL() string {
u := &url.URL{
Scheme: "redis",
···
Avatar AvatarConfig `env:",prefix=TANGLED_AVATAR_"`
OAuth OAuthConfig `env:",prefix=TANGLED_OAUTH_"`
Redis RedisConfig `env:",prefix=TANGLED_REDIS_"`
+
Plc PlcConfig `env:",prefix=TANGLED_PLC_"`
Pds PdsConfig `env:",prefix=TANGLED_PDS_"`
Cloudflare Cloudflare `env:",prefix=TANGLED_CLOUDFLARE_"`
+
Label LabelConfig `env:",prefix=TANGLED_LABEL_"`
}
func LoadConfig(ctx context.Context) (*Config, error) {
+3 -2
appview/db/artifact.go
···
"github.com/go-git/go-git/v5/plumbing"
"github.com/ipfs/go-cid"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
func AddArtifact(e Execer, artifact models.Artifact) error {
···
return err
}
-
func GetArtifact(e Execer, filters ...filter) ([]models.Artifact, error) {
+
func GetArtifact(e Execer, filters ...orm.Filter) ([]models.Artifact, error) {
var artifacts []models.Artifact
var conditions []string
···
return artifacts, nil
}
-
func DeleteArtifact(e Execer, filters ...filter) error {
+
func DeleteArtifact(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
+4 -3
appview/db/collaborators.go
···
"time"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
func AddCollaborator(e Execer, c models.Collaborator) error {
···
return err
}
-
func DeleteCollaborator(e Execer, filters ...filter) error {
+
func DeleteCollaborator(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
···
return nil, nil
}
-
return GetRepos(e, 0, FilterIn("at_uri", repoAts))
+
return GetRepos(e, 0, orm.FilterIn("at_uri", repoAts))
}
-
func GetCollaborators(e Execer, filters ...filter) ([]models.Collaborator, error) {
+
func GetCollaborators(e Execer, filters ...orm.Filter) ([]models.Collaborator, error) {
var collaborators []models.Collaborator
var conditions []string
var args []any
+85 -130
appview/db/db.go
···
import (
"context"
"database/sql"
-
"fmt"
"log/slog"
-
"reflect"
"strings"
_ "github.com/mattn/go-sqlite3"
"tangled.org/core/log"
+
"tangled.org/core/orm"
)
type DB struct {
···
email_notifications integer not null default 0
);
+
create table if not exists reference_links (
+
id integer primary key autoincrement,
+
from_at text not null,
+
to_at text not null,
+
unique (from_at, to_at)
+
);
+
create table if not exists migrations (
id integer primary key autoincrement,
name text unique
···
-- indexes for better performance
create index if not exists idx_notifications_recipient_created on notifications(recipient_did, created desc);
create index if not exists idx_notifications_recipient_read on notifications(recipient_did, read);
-
create index if not exists idx_stars_created on stars(created);
-
create index if not exists idx_stars_repo_at_created on stars(repo_at, created);
+
create index if not exists idx_references_from_at on reference_links(from_at);
+
create index if not exists idx_references_to_at on reference_links(to_at);
`)
if err != nil {
return nil, err
}
// run migrations
-
runMigration(conn, logger, "add-description-to-repos", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-description-to-repos", func(tx *sql.Tx) error {
tx.Exec(`
alter table repos add column description text check (length(description) <= 200);
`)
return nil
})
-
runMigration(conn, logger, "add-rkey-to-pubkeys", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-rkey-to-pubkeys", func(tx *sql.Tx) error {
// add unconstrained column
_, err := tx.Exec(`
alter table public_keys
···
return nil
})
-
runMigration(conn, logger, "add-rkey-to-comments", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-rkey-to-comments", func(tx *sql.Tx) error {
_, err := tx.Exec(`
alter table comments drop column comment_at;
alter table comments add column rkey text;
···
return err
})
-
runMigration(conn, logger, "add-deleted-and-edited-to-issue-comments", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-deleted-and-edited-to-issue-comments", func(tx *sql.Tx) error {
_, err := tx.Exec(`
alter table comments add column deleted text; -- timestamp
alter table comments add column edited text; -- timestamp
···
return err
})
-
runMigration(conn, logger, "add-source-info-to-pulls-and-submissions", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-source-info-to-pulls-and-submissions", func(tx *sql.Tx) error {
_, err := tx.Exec(`
alter table pulls add column source_branch text;
alter table pulls add column source_repo_at text;
···
return err
})
-
runMigration(conn, logger, "add-source-to-repos", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-source-to-repos", func(tx *sql.Tx) error {
_, err := tx.Exec(`
alter table repos add column source text;
`)
···
//
// [0]: https://sqlite.org/pragma.html#pragma_foreign_keys
conn.ExecContext(ctx, "pragma foreign_keys = off;")
-
runMigration(conn, logger, "recreate-pulls-column-for-stacking-support", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "recreate-pulls-column-for-stacking-support", func(tx *sql.Tx) error {
_, err := tx.Exec(`
create table pulls_new (
-- identifiers
···
})
conn.ExecContext(ctx, "pragma foreign_keys = on;")
-
runMigration(conn, logger, "add-spindle-to-repos", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-spindle-to-repos", func(tx *sql.Tx) error {
tx.Exec(`
alter table repos add column spindle text;
`)
···
// drop all knot secrets, add unique constraint to knots
//
// knots will henceforth use service auth for signed requests
-
runMigration(conn, logger, "no-more-secrets", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "no-more-secrets", func(tx *sql.Tx) error {
_, err := tx.Exec(`
create table registrations_new (
id integer primary key autoincrement,
···
})
// recreate and add rkey + created columns with default constraint
-
runMigration(conn, logger, "rework-collaborators-table", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "rework-collaborators-table", func(tx *sql.Tx) error {
// create new table
// - repo_at instead of repo integer
// - rkey field
···
return err
})
-
runMigration(conn, logger, "add-rkey-to-issues", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-rkey-to-issues", func(tx *sql.Tx) error {
_, err := tx.Exec(`
alter table issues add column rkey text not null default '';
···
})
// repurpose the read-only column to "needs-upgrade"
-
runMigration(conn, logger, "rename-registrations-read-only-to-needs-upgrade", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "rename-registrations-read-only-to-needs-upgrade", func(tx *sql.Tx) error {
_, err := tx.Exec(`
alter table registrations rename column read_only to needs_upgrade;
`)
···
})
// require all knots to upgrade after the release of total xrpc
-
runMigration(conn, logger, "migrate-knots-to-total-xrpc", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "migrate-knots-to-total-xrpc", func(tx *sql.Tx) error {
_, err := tx.Exec(`
update registrations set needs_upgrade = 1;
`)
···
})
// require all knots to upgrade after the release of total xrpc
-
runMigration(conn, logger, "migrate-spindles-to-xrpc-owner", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "migrate-spindles-to-xrpc-owner", func(tx *sql.Tx) error {
_, err := tx.Exec(`
alter table spindles add column needs_upgrade integer not null default 0;
`)
···
//
// disable foreign-keys for the next migration
conn.ExecContext(ctx, "pragma foreign_keys = off;")
-
runMigration(conn, logger, "remove-issue-at-from-issues", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "remove-issue-at-from-issues", func(tx *sql.Tx) error {
_, err := tx.Exec(`
create table if not exists issues_new (
-- identifiers
···
// - new columns
// * column "reply_to" which can be any other comment
// * column "at-uri" which is a generated column
-
runMigration(conn, logger, "rework-issue-comments", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "rework-issue-comments", func(tx *sql.Tx) error {
_, err := tx.Exec(`
create table if not exists issue_comments (
-- identifiers
···
//
// disable foreign-keys for the next migration
conn.ExecContext(ctx, "pragma foreign_keys = off;")
-
runMigration(conn, logger, "add-at-uri-to-pulls", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-at-uri-to-pulls", func(tx *sql.Tx) error {
_, err := tx.Exec(`
create table if not exists pulls_new (
-- identifiers
···
//
// disable foreign-keys for the next migration
conn.ExecContext(ctx, "pragma foreign_keys = off;")
-
runMigration(conn, logger, "remove-repo-at-pull-id-from-pull-submissions", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "remove-repo-at-pull-id-from-pull-submissions", func(tx *sql.Tx) error {
_, err := tx.Exec(`
create table if not exists pull_submissions_new (
-- identifiers
···
// knots may report the combined patch for a comparison, we can store that on the appview side
// (but not on the pds record), because calculating the combined patch requires a git index
-
runMigration(conn, logger, "add-combined-column-submissions", func(tx *sql.Tx) error {
+
orm.RunMigration(conn, logger, "add-combined-column-submissions", func(tx *sql.Tx) error {
_, err := tx.Exec(`
alter table pull_submissions add column combined text;
`)
return err
})
-
return &DB{
-
db,
-
logger,
-
}, nil
-
}
+
orm.RunMigration(conn, logger, "add-pronouns-profile", func(tx *sql.Tx) error {
+
_, err := tx.Exec(`
+
alter table profile add column pronouns text;
+
`)
+
return err
+
})
-
type migrationFn = func(*sql.Tx) error
-
-
func runMigration(c *sql.Conn, logger *slog.Logger, name string, migrationFn migrationFn) error {
-
logger = logger.With("migration", name)
-
-
tx, err := c.BeginTx(context.Background(), nil)
-
if err != nil {
+
orm.RunMigration(conn, logger, "add-meta-column-repos", func(tx *sql.Tx) error {
+
_, err := tx.Exec(`
+
alter table repos add column website text;
+
alter table repos add column topics text;
+
`)
return err
-
}
-
defer tx.Rollback()
+
})
-
var exists bool
-
err = tx.QueryRow("select exists (select 1 from migrations where name = ?)", name).Scan(&exists)
-
if err != nil {
+
orm.RunMigration(conn, logger, "add-usermentioned-preference", func(tx *sql.Tx) error {
+
_, err := tx.Exec(`
+
alter table notification_preferences add column user_mentioned integer not null default 1;
+
`)
return err
-
}
+
})
+
+
// remove the foreign key constraints from stars.
+
orm.RunMigration(conn, logger, "generalize-stars-subject", func(tx *sql.Tx) error {
+
_, err := tx.Exec(`
+
create table stars_new (
+
id integer primary key autoincrement,
+
did text not null,
+
rkey text not null,
+
+
subject_at text not null,
-
if !exists {
-
// run migration
-
err = migrationFn(tx)
-
if err != nil {
-
logger.Error("failed to run migration", "err", err)
-
return err
-
}
+
created text not null default (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')),
+
unique(did, rkey),
+
unique(did, subject_at)
+
);
-
// mark migration as complete
-
_, err = tx.Exec("insert into migrations (name) values (?)", name)
-
if err != nil {
-
logger.Error("failed to mark migration as complete", "err", err)
-
return err
-
}
+
insert into stars_new (
+
id,
+
did,
+
rkey,
+
subject_at,
+
created
+
)
+
select
+
id,
+
starred_by_did,
+
rkey,
+
repo_at,
+
created
+
from stars;
-
// commit the transaction
-
if err := tx.Commit(); err != nil {
-
return err
-
}
+
drop table stars;
+
alter table stars_new rename to stars;
-
logger.Info("migration applied successfully")
-
} else {
-
logger.Warn("skipped migration, already applied")
-
}
+
create index if not exists idx_stars_created on stars(created);
+
create index if not exists idx_stars_subject_at_created on stars(subject_at, created);
+
`)
+
return err
+
})
-
return nil
+
return &DB{
+
db,
+
logger,
+
}, nil
func (d *DB) Close() error {
return d.DB.Close()
-
-
type filter struct {
-
key string
-
arg any
-
cmp string
-
}
-
-
func newFilter(key, cmp string, arg any) filter {
-
return filter{
-
key: key,
-
arg: arg,
-
cmp: cmp,
-
}
-
}
-
-
func FilterEq(key string, arg any) filter { return newFilter(key, "=", arg) }
-
func FilterNotEq(key string, arg any) filter { return newFilter(key, "<>", arg) }
-
func FilterGte(key string, arg any) filter { return newFilter(key, ">=", arg) }
-
func FilterLte(key string, arg any) filter { return newFilter(key, "<=", arg) }
-
func FilterIs(key string, arg any) filter { return newFilter(key, "is", arg) }
-
func FilterIsNot(key string, arg any) filter { return newFilter(key, "is not", arg) }
-
func FilterIn(key string, arg any) filter { return newFilter(key, "in", arg) }
-
func FilterLike(key string, arg any) filter { return newFilter(key, "like", arg) }
-
func FilterNotLike(key string, arg any) filter { return newFilter(key, "not like", arg) }
-
func FilterContains(key string, arg any) filter {
-
return newFilter(key, "like", fmt.Sprintf("%%%v%%", arg))
-
}
-
-
func (f filter) Condition() string {
-
rv := reflect.ValueOf(f.arg)
-
kind := rv.Kind()
-
-
// if we have `FilterIn(k, [1, 2, 3])`, compile it down to `k in (?, ?, ?)`
-
if (kind == reflect.Slice && rv.Type().Elem().Kind() != reflect.Uint8) || kind == reflect.Array {
-
if rv.Len() == 0 {
-
// always false
-
return "1 = 0"
-
}
-
-
placeholders := make([]string, rv.Len())
-
for i := range placeholders {
-
placeholders[i] = "?"
-
}
-
-
return fmt.Sprintf("%s %s (%s)", f.key, f.cmp, strings.Join(placeholders, ", "))
-
}
-
-
return fmt.Sprintf("%s %s ?", f.key, f.cmp)
-
}
-
-
func (f filter) Arg() []any {
-
rv := reflect.ValueOf(f.arg)
-
kind := rv.Kind()
-
if (kind == reflect.Slice && rv.Type().Elem().Kind() != reflect.Uint8) || kind == reflect.Array {
-
if rv.Len() == 0 {
-
return nil
-
}
-
-
out := make([]any, rv.Len())
-
for i := range rv.Len() {
-
out[i] = rv.Index(i).Interface()
-
}
-
return out
-
}
-
-
return []any{f.arg}
-
}
+4 -3
appview/db/follow.go
···
"time"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
func AddFollow(e Execer, follow *models.Follow) error {
···
return result, nil
}
-
func GetFollows(e Execer, limit int, filters ...filter) ([]models.Follow, error) {
+
func GetFollows(e Execer, limit int, filters ...orm.Filter) ([]models.Follow, error) {
var follows []models.Follow
var conditions []string
···
}
func GetFollowers(e Execer, did string) ([]models.Follow, error) {
-
return GetFollows(e, 0, FilterEq("subject_did", did))
+
return GetFollows(e, 0, orm.FilterEq("subject_did", did))
}
func GetFollowing(e Execer, did string) ([]models.Follow, error) {
-
return GetFollows(e, 0, FilterEq("user_did", did))
+
return GetFollows(e, 0, orm.FilterEq("user_did", did))
}
func getFollowStatuses(e Execer, userDid string, subjectDids []string) (map[string]models.FollowStatus, error) {
+168 -36
appview/db/issues.go
···
"time"
"github.com/bluesky-social/indigo/atproto/syntax"
+
"tangled.org/core/api/tangled"
"tangled.org/core/appview/models"
"tangled.org/core/appview/pagination"
+
"tangled.org/core/orm"
)
func PutIssue(tx *sql.Tx, issue *models.Issue) error {
···
issues, err := GetIssues(
tx,
-
FilterEq("did", issue.Did),
-
FilterEq("rkey", issue.Rkey),
+
orm.FilterEq("did", issue.Did),
+
orm.FilterEq("rkey", issue.Rkey),
)
switch {
case err != nil:
···
returning rowid, issue_id
`, issue.RepoAt, issue.Did, issue.Rkey, newIssueId, issue.Title, issue.Body)
-
return row.Scan(&issue.Id, &issue.IssueId)
+
err = row.Scan(&issue.Id, &issue.IssueId)
+
if err != nil {
+
return fmt.Errorf("scan row: %w", err)
+
}
+
+
if err := putReferences(tx, issue.AtUri(), issue.References); err != nil {
+
return fmt.Errorf("put reference_links: %w", err)
+
}
+
return nil
}
func updateIssue(tx *sql.Tx, issue *models.Issue) error {
···
set title = ?, body = ?, edited = ?
where did = ? and rkey = ?
`, issue.Title, issue.Body, time.Now().Format(time.RFC3339), issue.Did, issue.Rkey)
-
return err
+
if err != nil {
+
return err
+
}
+
+
if err := putReferences(tx, issue.AtUri(), issue.References); err != nil {
+
return fmt.Errorf("put reference_links: %w", err)
+
}
+
return nil
}
-
func GetIssuesPaginated(e Execer, page pagination.Page, filters ...filter) ([]models.Issue, error) {
+
func GetIssuesPaginated(e Execer, page pagination.Page, filters ...orm.Filter) ([]models.Issue, error) {
issueMap := make(map[string]*models.Issue) // at-uri -> issue
var conditions []string
···
whereClause = " where " + strings.Join(conditions, " and ")
}
-
pLower := FilterGte("row_num", page.Offset+1)
-
pUpper := FilterLte("row_num", page.Offset+page.Limit)
+
pLower := orm.FilterGte("row_num", page.Offset+1)
+
pUpper := orm.FilterLte("row_num", page.Offset+page.Limit)
-
args = append(args, pLower.Arg()...)
-
args = append(args, pUpper.Arg()...)
-
pagination := " where " + pLower.Condition() + " and " + pUpper.Condition()
+
pageClause := ""
+
if page.Limit > 0 {
+
args = append(args, pLower.Arg()...)
+
args = append(args, pUpper.Arg()...)
+
pageClause = " where " + pLower.Condition() + " and " + pUpper.Condition()
+
}
query := fmt.Sprintf(
`
···
%s
`,
whereClause,
-
pagination,
+
pageClause,
)
rows, err := e.Query(query, args...)
···
repoAts = append(repoAts, string(issue.RepoAt))
}
-
repos, err := GetRepos(e, 0, FilterIn("at_uri", repoAts))
+
repos, err := GetRepos(e, 0, orm.FilterIn("at_uri", repoAts))
if err != nil {
return nil, fmt.Errorf("failed to build repo mappings: %w", err)
}
···
// collect comments
issueAts := slices.Collect(maps.Keys(issueMap))
-
comments, err := GetIssueComments(e, FilterIn("issue_at", issueAts))
+
comments, err := GetIssueComments(e, orm.FilterIn("issue_at", issueAts))
if err != nil {
return nil, fmt.Errorf("failed to query comments: %w", err)
}
···
}
// collect allLabels for each issue
-
allLabels, err := GetLabels(e, FilterIn("subject", issueAts))
+
allLabels, err := GetLabels(e, orm.FilterIn("subject", issueAts))
if err != nil {
return nil, fmt.Errorf("failed to query labels: %w", err)
}
···
}
}
+
// collect references for each issue
+
allReferencs, err := GetReferencesAll(e, orm.FilterIn("from_at", issueAts))
+
if err != nil {
+
return nil, fmt.Errorf("failed to query reference_links: %w", err)
+
}
+
for issueAt, references := range allReferencs {
+
if issue, ok := issueMap[issueAt.String()]; ok {
+
issue.References = references
+
}
+
}
+
var issues []models.Issue
for _, i := range issueMap {
issues = append(issues, *i)
···
return issues, nil
}
-
func GetIssues(e Execer, filters ...filter) ([]models.Issue, error) {
-
return GetIssuesPaginated(e, pagination.FirstPage(), filters...)
+
func GetIssue(e Execer, repoAt syntax.ATURI, issueId int) (*models.Issue, error) {
+
issues, err := GetIssuesPaginated(
+
e,
+
pagination.Page{},
+
orm.FilterEq("repo_at", repoAt),
+
orm.FilterEq("issue_id", issueId),
+
)
+
if err != nil {
+
return nil, err
+
}
+
if len(issues) != 1 {
+
return nil, sql.ErrNoRows
+
}
+
+
return &issues[0], nil
+
}
+
+
func GetIssues(e Execer, filters ...orm.Filter) ([]models.Issue, error) {
+
return GetIssuesPaginated(e, pagination.Page{}, filters...)
}
-
func AddIssueComment(e Execer, c models.IssueComment) (int64, error) {
-
result, err := e.Exec(
+
// GetIssueIDs gets list of all existing issue's IDs
+
func GetIssueIDs(e Execer, opts models.IssueSearchOptions) ([]int64, error) {
+
var ids []int64
+
+
var filters []orm.Filter
+
openValue := 0
+
if opts.IsOpen {
+
openValue = 1
+
}
+
filters = append(filters, orm.FilterEq("open", openValue))
+
if opts.RepoAt != "" {
+
filters = append(filters, orm.FilterEq("repo_at", opts.RepoAt))
+
}
+
+
var conditions []string
+
var args []any
+
+
for _, filter := range filters {
+
conditions = append(conditions, filter.Condition())
+
args = append(args, filter.Arg()...)
+
}
+
+
whereClause := ""
+
if conditions != nil {
+
whereClause = " where " + strings.Join(conditions, " and ")
+
}
+
query := fmt.Sprintf(
+
`
+
select
+
id
+
from
+
issues
+
%s
+
limit ? offset ?`,
+
whereClause,
+
)
+
args = append(args, opts.Page.Limit, opts.Page.Offset)
+
rows, err := e.Query(query, args...)
+
if err != nil {
+
return nil, err
+
}
+
defer rows.Close()
+
+
for rows.Next() {
+
var id int64
+
err := rows.Scan(&id)
+
if err != nil {
+
return nil, err
+
}
+
+
ids = append(ids, id)
+
}
+
+
return ids, nil
+
}
+
+
func AddIssueComment(tx *sql.Tx, c models.IssueComment) (int64, error) {
+
result, err := tx.Exec(
`insert into issue_comments (
did,
rkey,
···
return 0, err
}
+
if err := putReferences(tx, c.AtUri(), c.References); err != nil {
+
return 0, fmt.Errorf("put reference_links: %w", err)
+
}
+
return id, nil
}
-
func DeleteIssueComments(e Execer, filters ...filter) error {
+
func DeleteIssueComments(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
···
return err
}
-
func GetIssueComments(e Execer, filters ...filter) ([]models.IssueComment, error) {
-
var comments []models.IssueComment
+
func GetIssueComments(e Execer, filters ...orm.Filter) ([]models.IssueComment, error) {
+
commentMap := make(map[string]*models.IssueComment)
var conditions []string
var args []any
···
comment.ReplyTo = &replyTo.V
}
-
comments = append(comments, comment)
+
atUri := comment.AtUri().String()
+
commentMap[atUri] = &comment
}
if err = rows.Err(); err != nil {
return nil, err
}
+
// collect references for each comments
+
commentAts := slices.Collect(maps.Keys(commentMap))
+
allReferencs, err := GetReferencesAll(e, orm.FilterIn("from_at", commentAts))
+
if err != nil {
+
return nil, fmt.Errorf("failed to query reference_links: %w", err)
+
}
+
for commentAt, references := range allReferencs {
+
if comment, ok := commentMap[commentAt.String()]; ok {
+
comment.References = references
+
}
+
}
+
+
var comments []models.IssueComment
+
for _, c := range commentMap {
+
comments = append(comments, *c)
+
}
+
+
sort.Slice(comments, func(i, j int) bool {
+
return comments[i].Created.After(comments[j].Created)
+
})
+
return comments, nil
}
-
func DeleteIssues(e Execer, filters ...filter) error {
-
var conditions []string
-
var args []any
-
for _, filter := range filters {
-
conditions = append(conditions, filter.Condition())
-
args = append(args, filter.Arg()...)
+
func DeleteIssues(tx *sql.Tx, did, rkey string) error {
+
_, err := tx.Exec(
+
`delete from issues
+
where did = ? and rkey = ?`,
+
did,
+
rkey,
+
)
+
if err != nil {
+
return fmt.Errorf("delete issue: %w", err)
}
-
whereClause := ""
-
if conditions != nil {
-
whereClause = " where " + strings.Join(conditions, " and ")
+
uri := syntax.ATURI(fmt.Sprintf("at://%s/%s/%s", did, tangled.RepoIssueNSID, rkey))
+
err = deleteReferences(tx, uri)
+
if err != nil {
+
return fmt.Errorf("delete reference_links: %w", err)
}
-
query := fmt.Sprintf(`delete from issues %s`, whereClause)
-
_, err := e.Exec(query, args...)
-
return err
+
return nil
}
-
func CloseIssues(e Execer, filters ...filter) error {
+
func CloseIssues(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
···
return err
}
-
func ReopenIssues(e Execer, filters ...filter) error {
+
func ReopenIssues(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
+8 -7
appview/db/label.go
···
"github.com/bluesky-social/indigo/atproto/syntax"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
// no updating type for now
···
return id, nil
}
-
func DeleteLabelDefinition(e Execer, filters ...filter) error {
+
func DeleteLabelDefinition(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
···
return err
}
-
func GetLabelDefinitions(e Execer, filters ...filter) ([]models.LabelDefinition, error) {
+
func GetLabelDefinitions(e Execer, filters ...orm.Filter) ([]models.LabelDefinition, error) {
var labelDefinitions []models.LabelDefinition
var conditions []string
var args []any
···
}
// helper to get exactly one label def
-
func GetLabelDefinition(e Execer, filters ...filter) (*models.LabelDefinition, error) {
+
func GetLabelDefinition(e Execer, filters ...orm.Filter) (*models.LabelDefinition, error) {
labels, err := GetLabelDefinitions(e, filters...)
if err != nil {
return nil, err
···
return id, nil
}
-
func GetLabelOps(e Execer, filters ...filter) ([]models.LabelOp, error) {
+
func GetLabelOps(e Execer, filters ...orm.Filter) ([]models.LabelOp, error) {
var labelOps []models.LabelOp
var conditions []string
var args []any
···
}
// get labels for a given list of subject URIs
-
func GetLabels(e Execer, filters ...filter) (map[syntax.ATURI]models.LabelState, error) {
+
func GetLabels(e Execer, filters ...orm.Filter) (map[syntax.ATURI]models.LabelState, error) {
ops, err := GetLabelOps(e, filters...)
if err != nil {
return nil, err
···
}
labelAts := slices.Collect(maps.Keys(labelAtSet))
-
actx, err := NewLabelApplicationCtx(e, FilterIn("at_uri", labelAts))
+
actx, err := NewLabelApplicationCtx(e, orm.FilterIn("at_uri", labelAts))
if err != nil {
return nil, err
}
···
return results, nil
}
-
func NewLabelApplicationCtx(e Execer, filters ...filter) (*models.LabelApplicationCtx, error) {
+
func NewLabelApplicationCtx(e Execer, filters ...orm.Filter) (*models.LabelApplicationCtx, error) {
labels, err := GetLabelDefinitions(e, filters...)
if err != nil {
return nil, err
+5 -4
appview/db/language.go
···
"github.com/bluesky-social/indigo/atproto/syntax"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
-
func GetRepoLanguages(e Execer, filters ...filter) ([]models.RepoLanguage, error) {
+
func GetRepoLanguages(e Execer, filters ...orm.Filter) ([]models.RepoLanguage, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
return nil
}
-
func DeleteRepoLanguages(e Execer, filters ...filter) error {
+
func DeleteRepoLanguages(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
···
func UpdateRepoLanguages(tx *sql.Tx, repoAt syntax.ATURI, ref string, langs []models.RepoLanguage) error {
err := DeleteRepoLanguages(
tx,
-
FilterEq("repo_at", repoAt),
-
FilterEq("ref", ref),
+
orm.FilterEq("repo_at", repoAt),
+
orm.FilterEq("ref", ref),
)
if err != nil {
return fmt.Errorf("failed to delete existing languages: %w", err)
+36 -22
appview/db/notifications.go
···
"github.com/bluesky-social/indigo/atproto/syntax"
"tangled.org/core/appview/models"
"tangled.org/core/appview/pagination"
+
"tangled.org/core/orm"
)
func CreateNotification(e Execer, notification *models.Notification) error {
···
}
// GetNotificationsPaginated retrieves notifications with filters and pagination
-
func GetNotificationsPaginated(e Execer, page pagination.Page, filters ...filter) ([]*models.Notification, error) {
+
func GetNotificationsPaginated(e Execer, page pagination.Page, filters ...orm.Filter) ([]*models.Notification, error) {
var conditions []string
var args []any
···
for _, condition := range conditions[1:] {
whereClause += " AND " + condition
}
+
}
+
pageClause := ""
+
if page.Limit > 0 {
+
pageClause = " limit ? offset ? "
+
args = append(args, page.Limit, page.Offset)
}
query := fmt.Sprintf(`
···
from notifications
%s
order by created desc
-
limit ? offset ?
-
`, whereClause)
-
-
args = append(args, page.Limit, page.Offset)
+
%s
+
`, whereClause, pageClause)
rows, err := e.QueryContext(context.Background(), query, args...)
if err != nil {
···
}
// GetNotificationsWithEntities retrieves notifications with their related entities
-
func GetNotificationsWithEntities(e Execer, page pagination.Page, filters ...filter) ([]*models.NotificationWithEntity, error) {
+
func GetNotificationsWithEntities(e Execer, page pagination.Page, filters ...orm.Filter) ([]*models.NotificationWithEntity, error) {
var conditions []string
var args []any
···
select
n.id, n.recipient_did, n.actor_did, n.type, n.entity_type, n.entity_id,
n.read, n.created, n.repo_id, n.issue_id, n.pull_id,
-
r.id as r_id, r.did as r_did, r.name as r_name, r.description as r_description,
+
r.id as r_id, r.did as r_did, r.name as r_name, r.description as r_description, r.website as r_website, r.topics as r_topics,
i.id as i_id, i.did as i_did, i.issue_id as i_issue_id, i.title as i_title, i.open as i_open,
p.id as p_id, p.owner_did as p_owner_did, p.pull_id as p_pull_id, p.title as p_title, p.state as p_state
from notifications n
···
var issue models.Issue
var pull models.Pull
var rId, iId, pId sql.NullInt64
-
var rDid, rName, rDescription sql.NullString
+
var rDid, rName, rDescription, rWebsite, rTopicStr sql.NullString
var iDid sql.NullString
var iIssueId sql.NullInt64
var iTitle sql.NullString
···
err := rows.Scan(
&n.ID, &n.RecipientDid, &n.ActorDid, &typeStr, &n.EntityType, &n.EntityId,
&n.Read, &createdStr, &n.RepoId, &n.IssueId, &n.PullId,
-
&rId, &rDid, &rName, &rDescription,
+
&rId, &rDid, &rName, &rDescription, &rWebsite, &rTopicStr,
&iId, &iDid, &iIssueId, &iTitle, &iOpen,
&pId, &pOwnerDid, &pPullId, &pTitle, &pState,
)
···
if rDescription.Valid {
repo.Description = rDescription.String
}
+
if rWebsite.Valid {
+
repo.Website = rWebsite.String
+
}
+
if rTopicStr.Valid {
+
repo.Topics = strings.Fields(rTopicStr.String)
+
}
nwe.Repo = &repo
}
···
}
// GetNotifications retrieves notifications with filters
-
func GetNotifications(e Execer, filters ...filter) ([]*models.Notification, error) {
+
func GetNotifications(e Execer, filters ...orm.Filter) ([]*models.Notification, error) {
return GetNotificationsPaginated(e, pagination.FirstPage(), filters...)
}
-
func CountNotifications(e Execer, filters ...filter) (int64, error) {
+
func CountNotifications(e Execer, filters ...orm.Filter) (int64, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
}
func MarkNotificationRead(e Execer, notificationID int64, userDID string) error {
-
idFilter := FilterEq("id", notificationID)
-
recipientFilter := FilterEq("recipient_did", userDID)
+
idFilter := orm.FilterEq("id", notificationID)
+
recipientFilter := orm.FilterEq("recipient_did", userDID)
query := fmt.Sprintf(`
UPDATE notifications
···
}
func MarkAllNotificationsRead(e Execer, userDID string) error {
-
recipientFilter := FilterEq("recipient_did", userDID)
-
readFilter := FilterEq("read", 0)
+
recipientFilter := orm.FilterEq("recipient_did", userDID)
+
readFilter := orm.FilterEq("read", 0)
query := fmt.Sprintf(`
UPDATE notifications
···
}
func DeleteNotification(e Execer, notificationID int64, userDID string) error {
-
idFilter := FilterEq("id", notificationID)
-
recipientFilter := FilterEq("recipient_did", userDID)
+
idFilter := orm.FilterEq("id", notificationID)
+
recipientFilter := orm.FilterEq("recipient_did", userDID)
query := fmt.Sprintf(`
DELETE FROM notifications
···
}
func GetNotificationPreference(e Execer, userDid string) (*models.NotificationPreferences, error) {
-
prefs, err := GetNotificationPreferences(e, FilterEq("user_did", userDid))
+
prefs, err := GetNotificationPreferences(e, orm.FilterEq("user_did", userDid))
if err != nil {
return nil, err
}
···
return p, nil
}
-
func GetNotificationPreferences(e Execer, filters ...filter) (map[syntax.DID]*models.NotificationPreferences, error) {
+
func GetNotificationPreferences(e Execer, filters ...orm.Filter) (map[syntax.DID]*models.NotificationPreferences, error) {
prefsMap := make(map[syntax.DID]*models.NotificationPreferences)
var conditions []string
···
pull_created,
pull_commented,
followed,
+
user_mentioned,
pull_merged,
issue_closed,
email_notifications
···
&prefs.PullCreated,
&prefs.PullCommented,
&prefs.Followed,
+
&prefs.UserMentioned,
&prefs.PullMerged,
&prefs.IssueClosed,
&prefs.EmailNotifications,
···
query := `
INSERT OR REPLACE INTO notification_preferences
(user_did, repo_starred, issue_created, issue_commented, pull_created,
-
pull_commented, followed, pull_merged, issue_closed, email_notifications)
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+
pull_commented, followed, user_mentioned, pull_merged, issue_closed,
+
email_notifications)
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`
result, err := d.DB.ExecContext(ctx, query,
···
prefs.PullCreated,
prefs.PullCommented,
prefs.Followed,
+
prefs.UserMentioned,
prefs.PullMerged,
prefs.IssueClosed,
prefs.EmailNotifications,
···
func (d *DB) ClearOldNotifications(ctx context.Context, olderThan time.Duration) error {
cutoff := time.Now().Add(-olderThan)
-
createdFilter := FilterLte("created", cutoff)
+
createdFilter := orm.FilterLte("created", cutoff)
query := fmt.Sprintf(`
DELETE FROM notifications
+9 -6
appview/db/pipeline.go
···
"time"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
-
func GetPipelines(e Execer, filters ...filter) ([]models.Pipeline, error) {
+
func GetPipelines(e Execer, filters ...orm.Filter) ([]models.Pipeline, error) {
var pipelines []models.Pipeline
var conditions []string
···
// this is a mega query, but the most useful one:
// get N pipelines, for each one get the latest status of its N workflows
-
func GetPipelineStatuses(e Execer, filters ...filter) ([]models.Pipeline, error) {
+
func GetPipelineStatuses(e Execer, limit int, filters ...orm.Filter) ([]models.Pipeline, error) {
var conditions []string
var args []any
for _, filter := range filters {
-
filter.key = "p." + filter.key // the table is aliased in the query to `p`
+
filter.Key = "p." + filter.Key // the table is aliased in the query to `p`
conditions = append(conditions, filter.Condition())
args = append(args, filter.Arg()...)
}
···
join
triggers t ON p.trigger_id = t.id
%s
-
`, whereClause)
+
order by p.created desc
+
limit %d
+
`, whereClause, limit)
rows, err := e.Query(query, args...)
if err != nil {
···
conditions = nil
args = nil
for _, p := range pipelines {
-
knotFilter := FilterEq("pipeline_knot", p.Knot)
-
rkeyFilter := FilterEq("pipeline_rkey", p.Rkey)
+
knotFilter := orm.FilterEq("pipeline_knot", p.Knot)
+
rkeyFilter := orm.FilterEq("pipeline_rkey", p.Rkey)
conditions = append(conditions, fmt.Sprintf("(%s and %s)", knotFilter.Condition(), rkeyFilter.Condition()))
args = append(args, p.Knot)
args = append(args, p.Rkey)
+32 -11
appview/db/profile.go
···
"github.com/bluesky-social/indigo/atproto/syntax"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
const TimeframeMonths = 7
···
issues, err := GetIssues(
e,
-
FilterEq("did", forDid),
-
FilterGte("created", time.Now().AddDate(0, -TimeframeMonths, 0)),
+
orm.FilterEq("did", forDid),
+
orm.FilterGte("created", time.Now().AddDate(0, -TimeframeMonths, 0)),
)
if err != nil {
return nil, fmt.Errorf("error getting issues by owner did: %w", err)
···
*items = append(*items, &issue)
}
-
repos, err := GetRepos(e, 0, FilterEq("did", forDid))
+
repos, err := GetRepos(e, 0, orm.FilterEq("did", forDid))
if err != nil {
return nil, fmt.Errorf("error getting all repos by did: %w", err)
}
···
did,
description,
include_bluesky,
-
location
+
location,
+
pronouns
)
-
values (?, ?, ?, ?)`,
+
values (?, ?, ?, ?, ?)`,
profile.Did,
profile.Description,
includeBskyValue,
profile.Location,
+
profile.Pronouns,
)
if err != nil {
···
return tx.Commit()
}
-
func GetProfiles(e Execer, filters ...filter) (map[string]*models.Profile, error) {
+
func GetProfiles(e Execer, filters ...orm.Filter) (map[string]*models.Profile, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
did,
description,
include_bluesky,
-
location
+
location,
+
pronouns
from
profile
%s`,
···
for rows.Next() {
var profile models.Profile
var includeBluesky int
+
var pronouns sql.Null[string]
-
err = rows.Scan(&profile.ID, &profile.Did, &profile.Description, &includeBluesky, &profile.Location)
+
err = rows.Scan(&profile.ID, &profile.Did, &profile.Description, &includeBluesky, &profile.Location, &pronouns)
if err != nil {
return nil, err
}
···
profile.IncludeBluesky = true
}
+
if pronouns.Valid {
+
profile.Pronouns = pronouns.V
+
}
+
profileMap[profile.Did] = &profile
}
if err = rows.Err(); err != nil {
···
func GetProfile(e Execer, did string) (*models.Profile, error) {
var profile models.Profile
+
var pronouns sql.Null[string]
+
profile.Did = did
includeBluesky := 0
+
err := e.QueryRow(
-
`select description, include_bluesky, location from profile where did = ?`,
+
`select description, include_bluesky, location, pronouns from profile where did = ?`,
did,
-
).Scan(&profile.Description, &includeBluesky, &profile.Location)
+
).Scan(&profile.Description, &includeBluesky, &profile.Location, &pronouns)
if err == sql.ErrNoRows {
profile := models.Profile{}
profile.Did = did
···
profile.IncludeBluesky = true
}
+
if pronouns.Valid {
+
profile.Pronouns = pronouns.V
+
}
+
rows, err := e.Query(`select link from profile_links where did = ?`, did)
if err != nil {
return nil, err
···
return fmt.Errorf("Entered location is too long.")
}
+
// ensure pronouns are not too long
+
if len(profile.Pronouns) > 40 {
+
return fmt.Errorf("Entered pronouns are too long.")
+
}
+
// ensure links are in order
err := validateLinks(profile)
if err != nil {
···
}
// ensure all pinned repos are either own repos or collaborating repos
-
repos, err := GetRepos(e, 0, FilterEq("did", profile.Did))
+
repos, err := GetRepos(e, 0, orm.FilterEq("did", profile.Did))
if err != nil {
log.Printf("getting repos for %s: %s", profile.Did, err)
}
+132 -26
appview/db/pulls.go
···
"github.com/bluesky-social/indigo/atproto/syntax"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
func NewPull(tx *sql.Tx, pull *models.Pull) error {
···
_, err = tx.Exec(`
insert into pull_submissions (pull_at, round_number, patch, combined, source_rev)
values (?, ?, ?, ?, ?)
-
`, pull.PullAt(), 0, pull.Submissions[0].Patch, pull.Submissions[0].Combined, pull.Submissions[0].SourceRev)
-
return err
+
`, pull.AtUri(), 0, pull.Submissions[0].Patch, pull.Submissions[0].Combined, pull.Submissions[0].SourceRev)
+
if err != nil {
+
return err
+
}
+
+
if err := putReferences(tx, pull.AtUri(), pull.References); err != nil {
+
return fmt.Errorf("put reference_links: %w", err)
+
}
+
+
return nil
}
func GetPullAt(e Execer, repoAt syntax.ATURI, pullId int) (syntax.ATURI, error) {
···
if err != nil {
return "", err
}
-
return pull.PullAt(), err
+
return pull.AtUri(), err
}
func NextPullId(e Execer, repoAt syntax.ATURI) (int, error) {
···
return pullId - 1, err
}
-
func GetPullsWithLimit(e Execer, limit int, filters ...filter) ([]*models.Pull, error) {
+
func GetPullsWithLimit(e Execer, limit int, filters ...orm.Filter) ([]*models.Pull, error) {
pulls := make(map[syntax.ATURI]*models.Pull)
var conditions []string
···
pull.ParentChangeId = parentChangeId.String
}
-
pulls[pull.PullAt()] = &pull
+
pulls[pull.AtUri()] = &pull
}
var pullAts []syntax.ATURI
for _, p := range pulls {
-
pullAts = append(pullAts, p.PullAt())
+
pullAts = append(pullAts, p.AtUri())
}
-
submissionsMap, err := GetPullSubmissions(e, FilterIn("pull_at", pullAts))
+
submissionsMap, err := GetPullSubmissions(e, orm.FilterIn("pull_at", pullAts))
if err != nil {
return nil, fmt.Errorf("failed to get submissions: %w", err)
}
···
}
// collect allLabels for each issue
-
allLabels, err := GetLabels(e, FilterIn("subject", pullAts))
+
allLabels, err := GetLabels(e, orm.FilterIn("subject", pullAts))
if err != nil {
return nil, fmt.Errorf("failed to query labels: %w", err)
}
···
sourceAts = append(sourceAts, *p.PullSource.RepoAt)
}
}
-
sourceRepos, err := GetRepos(e, 0, FilterIn("at_uri", sourceAts))
+
sourceRepos, err := GetRepos(e, 0, orm.FilterIn("at_uri", sourceAts))
if err != nil && !errors.Is(err, sql.ErrNoRows) {
return nil, fmt.Errorf("failed to get source repos: %w", err)
}
···
}
}
+
allReferences, err := GetReferencesAll(e, orm.FilterIn("from_at", pullAts))
+
if err != nil {
+
return nil, fmt.Errorf("failed to query reference_links: %w", err)
+
}
+
for pullAt, references := range allReferences {
+
if pull, ok := pulls[pullAt]; ok {
+
pull.References = references
+
}
+
}
+
orderedByPullId := []*models.Pull{}
for _, p := range pulls {
orderedByPullId = append(orderedByPullId, p)
···
return orderedByPullId, nil
}
-
func GetPulls(e Execer, filters ...filter) ([]*models.Pull, error) {
+
func GetPulls(e Execer, filters ...orm.Filter) ([]*models.Pull, error) {
return GetPullsWithLimit(e, 0, filters...)
}
+
func GetPullIDs(e Execer, opts models.PullSearchOptions) ([]int64, error) {
+
var ids []int64
+
+
var filters []orm.Filter
+
filters = append(filters, orm.FilterEq("state", opts.State))
+
if opts.RepoAt != "" {
+
filters = append(filters, orm.FilterEq("repo_at", opts.RepoAt))
+
}
+
+
var conditions []string
+
var args []any
+
+
for _, filter := range filters {
+
conditions = append(conditions, filter.Condition())
+
args = append(args, filter.Arg()...)
+
}
+
+
whereClause := ""
+
if conditions != nil {
+
whereClause = " where " + strings.Join(conditions, " and ")
+
}
+
pageClause := ""
+
if opts.Page.Limit != 0 {
+
pageClause = fmt.Sprintf(
+
" limit %d offset %d ",
+
opts.Page.Limit,
+
opts.Page.Offset,
+
)
+
}
+
+
query := fmt.Sprintf(
+
`
+
select
+
id
+
from
+
pulls
+
%s
+
%s`,
+
whereClause,
+
pageClause,
+
)
+
args = append(args, opts.Page.Limit, opts.Page.Offset)
+
rows, err := e.Query(query, args...)
+
if err != nil {
+
return nil, err
+
}
+
defer rows.Close()
+
+
for rows.Next() {
+
var id int64
+
err := rows.Scan(&id)
+
if err != nil {
+
return nil, err
+
}
+
+
ids = append(ids, id)
+
}
+
+
return ids, nil
+
}
+
func GetPull(e Execer, repoAt syntax.ATURI, pullId int) (*models.Pull, error) {
-
pulls, err := GetPullsWithLimit(e, 1, FilterEq("repo_at", repoAt), FilterEq("pull_id", pullId))
+
pulls, err := GetPullsWithLimit(e, 1, orm.FilterEq("repo_at", repoAt), orm.FilterEq("pull_id", pullId))
if err != nil {
return nil, err
}
-
if pulls == nil {
+
if len(pulls) == 0 {
return nil, sql.ErrNoRows
}
···
}
// mapping from pull -> pull submissions
-
func GetPullSubmissions(e Execer, filters ...filter) (map[syntax.ATURI][]*models.PullSubmission, error) {
+
func GetPullSubmissions(e Execer, filters ...orm.Filter) (map[syntax.ATURI][]*models.PullSubmission, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
// Get comments for all submissions using GetPullComments
submissionIds := slices.Collect(maps.Keys(submissionMap))
-
comments, err := GetPullComments(e, FilterIn("submission_id", submissionIds))
+
comments, err := GetPullComments(e, orm.FilterIn("submission_id", submissionIds))
if err != nil {
-
return nil, err
+
return nil, fmt.Errorf("failed to get pull comments: %w", err)
}
for _, comment := range comments {
if submission, ok := submissionMap[comment.SubmissionId]; ok {
···
return m, nil
}
-
func GetPullComments(e Execer, filters ...filter) ([]models.PullComment, error) {
+
func GetPullComments(e Execer, filters ...orm.Filter) ([]models.PullComment, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
}
defer rows.Close()
-
var comments []models.PullComment
+
commentMap := make(map[string]*models.PullComment)
for rows.Next() {
var comment models.PullComment
var createdAt string
···
comment.Created = t
}
-
comments = append(comments, comment)
+
atUri := comment.AtUri().String()
+
commentMap[atUri] = &comment
}
if err := rows.Err(); err != nil {
return nil, err
}
+
// collect references for each comments
+
commentAts := slices.Collect(maps.Keys(commentMap))
+
allReferencs, err := GetReferencesAll(e, orm.FilterIn("from_at", commentAts))
+
if err != nil {
+
return nil, fmt.Errorf("failed to query reference_links: %w", err)
+
}
+
for commentAt, references := range allReferencs {
+
if comment, ok := commentMap[commentAt.String()]; ok {
+
comment.References = references
+
}
+
}
+
+
var comments []models.PullComment
+
for _, c := range commentMap {
+
comments = append(comments, *c)
+
}
+
+
sort.Slice(comments, func(i, j int) bool {
+
return comments[i].Created.Before(comments[j].Created)
+
})
+
return comments, nil
}
···
return pulls, nil
}
-
func NewPullComment(e Execer, comment *models.PullComment) (int64, error) {
+
func NewPullComment(tx *sql.Tx, comment *models.PullComment) (int64, error) {
query := `insert into pull_comments (owner_did, repo_at, submission_id, comment_at, pull_id, body) values (?, ?, ?, ?, ?, ?)`
-
res, err := e.Exec(
+
res, err := tx.Exec(
query,
comment.OwnerDid,
comment.RepoAt,
···
i, err := res.LastInsertId()
if err != nil {
return 0, err
+
}
+
+
if err := putReferences(tx, comment.AtUri(), comment.References); err != nil {
+
return 0, fmt.Errorf("put reference_links: %w", err)
}
return i, nil
···
return err
}
-
func SetPullParentChangeId(e Execer, parentChangeId string, filters ...filter) error {
+
func SetPullParentChangeId(e Execer, parentChangeId string, filters ...orm.Filter) error {
var conditions []string
var args []any
···
// Only used when stacking to update contents in the event of a rebase (the interdiff should be empty).
// otherwise submissions are immutable
-
func UpdatePull(e Execer, newPatch, sourceRev string, filters ...filter) error {
+
func UpdatePull(e Execer, newPatch, sourceRev string, filters ...orm.Filter) error {
var conditions []string
var args []any
···
func GetStack(e Execer, stackId string) (models.Stack, error) {
unorderedPulls, err := GetPulls(
e,
-
FilterEq("stack_id", stackId),
-
FilterNotEq("state", models.PullDeleted),
+
orm.FilterEq("stack_id", stackId),
+
orm.FilterNotEq("state", models.PullDeleted),
)
if err != nil {
return nil, err
···
func GetAbandonedPulls(e Execer, stackId string) ([]*models.Pull, error) {
pulls, err := GetPulls(
e,
-
FilterEq("stack_id", stackId),
-
FilterEq("state", models.PullDeleted),
+
orm.FilterEq("stack_id", stackId),
+
orm.FilterEq("state", models.PullDeleted),
)
if err != nil {
return nil, err
+2 -1
appview/db/punchcard.go
···
"time"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
// this adds to the existing count
···
return err
}
-
func MakePunchcard(e Execer, filters ...filter) (*models.Punchcard, error) {
+
func MakePunchcard(e Execer, filters ...orm.Filter) (*models.Punchcard, error) {
punchcard := &models.Punchcard{}
now := time.Now()
startOfYear := time.Date(now.Year(), 1, 1, 0, 0, 0, 0, time.UTC)
+463
appview/db/reference.go
···
+
package db
+
+
import (
+
"database/sql"
+
"fmt"
+
"strings"
+
+
"github.com/bluesky-social/indigo/atproto/syntax"
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
+
)
+
+
// ValidateReferenceLinks resolves refLinks to Issue/PR/IssueComment/PullComment ATURIs.
+
// It will ignore missing refLinks.
+
func ValidateReferenceLinks(e Execer, refLinks []models.ReferenceLink) ([]syntax.ATURI, error) {
+
var (
+
issueRefs []models.ReferenceLink
+
pullRefs []models.ReferenceLink
+
)
+
for _, ref := range refLinks {
+
switch ref.Kind {
+
case models.RefKindIssue:
+
issueRefs = append(issueRefs, ref)
+
case models.RefKindPull:
+
pullRefs = append(pullRefs, ref)
+
}
+
}
+
issueUris, err := findIssueReferences(e, issueRefs)
+
if err != nil {
+
return nil, fmt.Errorf("find issue references: %w", err)
+
}
+
pullUris, err := findPullReferences(e, pullRefs)
+
if err != nil {
+
return nil, fmt.Errorf("find pull references: %w", err)
+
}
+
+
return append(issueUris, pullUris...), nil
+
}
+
+
func findIssueReferences(e Execer, refLinks []models.ReferenceLink) ([]syntax.ATURI, error) {
+
if len(refLinks) == 0 {
+
return nil, nil
+
}
+
vals := make([]string, len(refLinks))
+
args := make([]any, 0, len(refLinks)*4)
+
for i, ref := range refLinks {
+
vals[i] = "(?, ?, ?, ?)"
+
args = append(args, ref.Handle, ref.Repo, ref.SubjectId, ref.CommentId)
+
}
+
query := fmt.Sprintf(
+
`with input(owner_did, name, issue_id, comment_id) as (
+
values %s
+
)
+
select
+
i.did, i.rkey,
+
c.did, c.rkey
+
from input inp
+
join repos r
+
on r.did = inp.owner_did
+
and r.name = inp.name
+
join issues i
+
on i.repo_at = r.at_uri
+
and i.issue_id = inp.issue_id
+
left join issue_comments c
+
on inp.comment_id is not null
+
and c.issue_at = i.at_uri
+
and c.id = inp.comment_id
+
`,
+
strings.Join(vals, ","),
+
)
+
rows, err := e.Query(query, args...)
+
if err != nil {
+
return nil, err
+
}
+
defer rows.Close()
+
+
var uris []syntax.ATURI
+
+
for rows.Next() {
+
// Scan rows
+
var issueOwner, issueRkey string
+
var commentOwner, commentRkey sql.NullString
+
var uri syntax.ATURI
+
if err := rows.Scan(&issueOwner, &issueRkey, &commentOwner, &commentRkey); err != nil {
+
return nil, err
+
}
+
if commentOwner.Valid && commentRkey.Valid {
+
uri = syntax.ATURI(fmt.Sprintf(
+
"at://%s/%s/%s",
+
commentOwner.String,
+
tangled.RepoIssueCommentNSID,
+
commentRkey.String,
+
))
+
} else {
+
uri = syntax.ATURI(fmt.Sprintf(
+
"at://%s/%s/%s",
+
issueOwner,
+
tangled.RepoIssueNSID,
+
issueRkey,
+
))
+
}
+
uris = append(uris, uri)
+
}
+
if err := rows.Err(); err != nil {
+
return nil, fmt.Errorf("iterate rows: %w", err)
+
}
+
+
return uris, nil
+
}
+
+
func findPullReferences(e Execer, refLinks []models.ReferenceLink) ([]syntax.ATURI, error) {
+
if len(refLinks) == 0 {
+
return nil, nil
+
}
+
vals := make([]string, len(refLinks))
+
args := make([]any, 0, len(refLinks)*4)
+
for i, ref := range refLinks {
+
vals[i] = "(?, ?, ?, ?)"
+
args = append(args, ref.Handle, ref.Repo, ref.SubjectId, ref.CommentId)
+
}
+
query := fmt.Sprintf(
+
`with input(owner_did, name, pull_id, comment_id) as (
+
values %s
+
)
+
select
+
p.owner_did, p.rkey,
+
c.comment_at
+
from input inp
+
join repos r
+
on r.did = inp.owner_did
+
and r.name = inp.name
+
join pulls p
+
on p.repo_at = r.at_uri
+
and p.pull_id = inp.pull_id
+
left join pull_comments c
+
on inp.comment_id is not null
+
and c.repo_at = r.at_uri and c.pull_id = p.pull_id
+
and c.id = inp.comment_id
+
`,
+
strings.Join(vals, ","),
+
)
+
rows, err := e.Query(query, args...)
+
if err != nil {
+
return nil, err
+
}
+
defer rows.Close()
+
+
var uris []syntax.ATURI
+
+
for rows.Next() {
+
// Scan rows
+
var pullOwner, pullRkey string
+
var commentUri sql.NullString
+
var uri syntax.ATURI
+
if err := rows.Scan(&pullOwner, &pullRkey, &commentUri); err != nil {
+
return nil, err
+
}
+
if commentUri.Valid {
+
// no-op
+
uri = syntax.ATURI(commentUri.String)
+
} else {
+
uri = syntax.ATURI(fmt.Sprintf(
+
"at://%s/%s/%s",
+
pullOwner,
+
tangled.RepoPullNSID,
+
pullRkey,
+
))
+
}
+
uris = append(uris, uri)
+
}
+
return uris, nil
+
}
+
+
func putReferences(tx *sql.Tx, fromAt syntax.ATURI, references []syntax.ATURI) error {
+
err := deleteReferences(tx, fromAt)
+
if err != nil {
+
return fmt.Errorf("delete old reference_links: %w", err)
+
}
+
if len(references) == 0 {
+
return nil
+
}
+
+
values := make([]string, 0, len(references))
+
args := make([]any, 0, len(references)*2)
+
for _, ref := range references {
+
values = append(values, "(?, ?)")
+
args = append(args, fromAt, ref)
+
}
+
_, err = tx.Exec(
+
fmt.Sprintf(
+
`insert into reference_links (from_at, to_at)
+
values %s`,
+
strings.Join(values, ","),
+
),
+
args...,
+
)
+
if err != nil {
+
return fmt.Errorf("insert new reference_links: %w", err)
+
}
+
return nil
+
}
+
+
func deleteReferences(tx *sql.Tx, fromAt syntax.ATURI) error {
+
_, err := tx.Exec(`delete from reference_links where from_at = ?`, fromAt)
+
return err
+
}
+
+
func GetReferencesAll(e Execer, filters ...orm.Filter) (map[syntax.ATURI][]syntax.ATURI, error) {
+
var (
+
conditions []string
+
args []any
+
)
+
for _, filter := range filters {
+
conditions = append(conditions, filter.Condition())
+
args = append(args, filter.Arg()...)
+
}
+
+
whereClause := ""
+
if conditions != nil {
+
whereClause = " where " + strings.Join(conditions, " and ")
+
}
+
+
rows, err := e.Query(
+
fmt.Sprintf(
+
`select from_at, to_at from reference_links %s`,
+
whereClause,
+
),
+
args...,
+
)
+
if err != nil {
+
return nil, fmt.Errorf("query reference_links: %w", err)
+
}
+
defer rows.Close()
+
+
result := make(map[syntax.ATURI][]syntax.ATURI)
+
+
for rows.Next() {
+
var from, to syntax.ATURI
+
if err := rows.Scan(&from, &to); err != nil {
+
return nil, fmt.Errorf("scan row: %w", err)
+
}
+
+
result[from] = append(result[from], to)
+
}
+
if err := rows.Err(); err != nil {
+
return nil, fmt.Errorf("iterate rows: %w", err)
+
}
+
+
return result, nil
+
}
+
+
func GetBacklinks(e Execer, target syntax.ATURI) ([]models.RichReferenceLink, error) {
+
rows, err := e.Query(
+
`select from_at from reference_links
+
where to_at = ?`,
+
target,
+
)
+
if err != nil {
+
return nil, fmt.Errorf("query backlinks: %w", err)
+
}
+
defer rows.Close()
+
+
var (
+
backlinks []models.RichReferenceLink
+
backlinksMap = make(map[string][]syntax.ATURI)
+
)
+
for rows.Next() {
+
var from syntax.ATURI
+
if err := rows.Scan(&from); err != nil {
+
return nil, fmt.Errorf("scan row: %w", err)
+
}
+
nsid := from.Collection().String()
+
backlinksMap[nsid] = append(backlinksMap[nsid], from)
+
}
+
if err := rows.Err(); err != nil {
+
return nil, fmt.Errorf("iterate rows: %w", err)
+
}
+
+
var ls []models.RichReferenceLink
+
ls, err = getIssueBacklinks(e, backlinksMap[tangled.RepoIssueNSID])
+
if err != nil {
+
return nil, fmt.Errorf("get issue backlinks: %w", err)
+
}
+
backlinks = append(backlinks, ls...)
+
ls, err = getIssueCommentBacklinks(e, backlinksMap[tangled.RepoIssueCommentNSID])
+
if err != nil {
+
return nil, fmt.Errorf("get issue_comment backlinks: %w", err)
+
}
+
backlinks = append(backlinks, ls...)
+
ls, err = getPullBacklinks(e, backlinksMap[tangled.RepoPullNSID])
+
if err != nil {
+
return nil, fmt.Errorf("get pull backlinks: %w", err)
+
}
+
backlinks = append(backlinks, ls...)
+
ls, err = getPullCommentBacklinks(e, backlinksMap[tangled.RepoPullCommentNSID])
+
if err != nil {
+
return nil, fmt.Errorf("get pull_comment backlinks: %w", err)
+
}
+
backlinks = append(backlinks, ls...)
+
+
return backlinks, nil
+
}
+
+
func getIssueBacklinks(e Execer, aturis []syntax.ATURI) ([]models.RichReferenceLink, error) {
+
if len(aturis) == 0 {
+
return nil, nil
+
}
+
vals := make([]string, len(aturis))
+
args := make([]any, 0, len(aturis)*2)
+
for i, aturi := range aturis {
+
vals[i] = "(?, ?)"
+
did := aturi.Authority().String()
+
rkey := aturi.RecordKey().String()
+
args = append(args, did, rkey)
+
}
+
rows, err := e.Query(
+
fmt.Sprintf(
+
`select r.did, r.name, i.issue_id, i.title, i.open
+
from issues i
+
join repos r
+
on r.at_uri = i.repo_at
+
where (i.did, i.rkey) in (%s)`,
+
strings.Join(vals, ","),
+
),
+
args...,
+
)
+
if err != nil {
+
return nil, err
+
}
+
defer rows.Close()
+
var refLinks []models.RichReferenceLink
+
for rows.Next() {
+
var l models.RichReferenceLink
+
l.Kind = models.RefKindIssue
+
if err := rows.Scan(&l.Handle, &l.Repo, &l.SubjectId, &l.Title, &l.State); err != nil {
+
return nil, err
+
}
+
refLinks = append(refLinks, l)
+
}
+
if err := rows.Err(); err != nil {
+
return nil, fmt.Errorf("iterate rows: %w", err)
+
}
+
return refLinks, nil
+
}
+
+
func getIssueCommentBacklinks(e Execer, aturis []syntax.ATURI) ([]models.RichReferenceLink, error) {
+
if len(aturis) == 0 {
+
return nil, nil
+
}
+
filter := orm.FilterIn("c.at_uri", aturis)
+
rows, err := e.Query(
+
fmt.Sprintf(
+
`select r.did, r.name, i.issue_id, c.id, i.title, i.open
+
from issue_comments c
+
join issues i
+
on i.at_uri = c.issue_at
+
join repos r
+
on r.at_uri = i.repo_at
+
where %s`,
+
filter.Condition(),
+
),
+
filter.Arg()...,
+
)
+
if err != nil {
+
return nil, err
+
}
+
defer rows.Close()
+
var refLinks []models.RichReferenceLink
+
for rows.Next() {
+
var l models.RichReferenceLink
+
l.Kind = models.RefKindIssue
+
l.CommentId = new(int)
+
if err := rows.Scan(&l.Handle, &l.Repo, &l.SubjectId, l.CommentId, &l.Title, &l.State); err != nil {
+
return nil, err
+
}
+
refLinks = append(refLinks, l)
+
}
+
if err := rows.Err(); err != nil {
+
return nil, fmt.Errorf("iterate rows: %w", err)
+
}
+
return refLinks, nil
+
}
+
+
func getPullBacklinks(e Execer, aturis []syntax.ATURI) ([]models.RichReferenceLink, error) {
+
if len(aturis) == 0 {
+
return nil, nil
+
}
+
vals := make([]string, len(aturis))
+
args := make([]any, 0, len(aturis)*2)
+
for i, aturi := range aturis {
+
vals[i] = "(?, ?)"
+
did := aturi.Authority().String()
+
rkey := aturi.RecordKey().String()
+
args = append(args, did, rkey)
+
}
+
rows, err := e.Query(
+
fmt.Sprintf(
+
`select r.did, r.name, p.pull_id, p.title, p.state
+
from pulls p
+
join repos r
+
on r.at_uri = p.repo_at
+
where (p.owner_did, p.rkey) in (%s)`,
+
strings.Join(vals, ","),
+
),
+
args...,
+
)
+
if err != nil {
+
return nil, err
+
}
+
defer rows.Close()
+
var refLinks []models.RichReferenceLink
+
for rows.Next() {
+
var l models.RichReferenceLink
+
l.Kind = models.RefKindPull
+
if err := rows.Scan(&l.Handle, &l.Repo, &l.SubjectId, &l.Title, &l.State); err != nil {
+
return nil, err
+
}
+
refLinks = append(refLinks, l)
+
}
+
if err := rows.Err(); err != nil {
+
return nil, fmt.Errorf("iterate rows: %w", err)
+
}
+
return refLinks, nil
+
}
+
+
func getPullCommentBacklinks(e Execer, aturis []syntax.ATURI) ([]models.RichReferenceLink, error) {
+
if len(aturis) == 0 {
+
return nil, nil
+
}
+
filter := orm.FilterIn("c.comment_at", aturis)
+
rows, err := e.Query(
+
fmt.Sprintf(
+
`select r.did, r.name, p.pull_id, c.id, p.title, p.state
+
from repos r
+
join pulls p
+
on r.at_uri = p.repo_at
+
join pull_comments c
+
on r.at_uri = c.repo_at and p.pull_id = c.pull_id
+
where %s`,
+
filter.Condition(),
+
),
+
filter.Arg()...,
+
)
+
if err != nil {
+
return nil, err
+
}
+
defer rows.Close()
+
var refLinks []models.RichReferenceLink
+
for rows.Next() {
+
var l models.RichReferenceLink
+
l.Kind = models.RefKindPull
+
l.CommentId = new(int)
+
if err := rows.Scan(&l.Handle, &l.Repo, &l.SubjectId, l.CommentId, &l.Title, &l.State); err != nil {
+
return nil, err
+
}
+
refLinks = append(refLinks, l)
+
}
+
if err := rows.Err(); err != nil {
+
return nil, fmt.Errorf("iterate rows: %w", err)
+
}
+
return refLinks, nil
+
}
+4 -3
appview/db/registration.go
···
"time"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
-
func GetRegistrations(e Execer, filters ...filter) ([]models.Registration, error) {
+
func GetRegistrations(e Execer, filters ...orm.Filter) ([]models.Registration, error) {
var registrations []models.Registration
var conditions []string
···
return registrations, nil
}
-
func MarkRegistered(e Execer, filters ...filter) error {
+
func MarkRegistered(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
···
return err
}
-
func DeleteKnot(e Execer, filters ...filter) error {
+
func DeleteKnot(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
+70 -48
appview/db/repos.go
···
"time"
"github.com/bluesky-social/indigo/atproto/syntax"
-
securejoin "github.com/cyphar/filepath-securejoin"
-
"tangled.org/core/api/tangled"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
-
type Repo struct {
-
Id int64
-
Did string
-
Name string
-
Knot string
-
Rkey string
-
Created time.Time
-
Description string
-
Spindle string
-
-
// optionally, populate this when querying for reverse mappings
-
RepoStats *models.RepoStats
-
-
// optional
-
Source string
-
}
-
-
func (r Repo) RepoAt() syntax.ATURI {
-
return syntax.ATURI(fmt.Sprintf("at://%s/%s/%s", r.Did, tangled.RepoNSID, r.Rkey))
-
}
-
-
func (r Repo) DidSlashRepo() string {
-
p, _ := securejoin.SecureJoin(r.Did, r.Name)
-
return p
-
}
-
-
func GetRepos(e Execer, limit int, filters ...filter) ([]models.Repo, error) {
+
func GetRepos(e Execer, limit int, filters ...orm.Filter) ([]models.Repo, error) {
repoMap := make(map[syntax.ATURI]*models.Repo)
var conditions []string
···
rkey,
created,
description,
+
website,
+
topics,
source,
spindle
from
···
for rows.Next() {
var repo models.Repo
var createdAt string
-
var description, source, spindle sql.NullString
+
var description, website, topicStr, source, spindle sql.NullString
err := rows.Scan(
&repo.Id,
···
&repo.Rkey,
&createdAt,
&description,
+
&website,
+
&topicStr,
&source,
&spindle,
)
···
}
if description.Valid {
repo.Description = description.String
+
}
+
if website.Valid {
+
repo.Website = website.String
+
}
+
if topicStr.Valid {
+
repo.Topics = strings.Fields(topicStr.String)
}
if source.Valid {
repo.Source = source.String
···
starCountQuery := fmt.Sprintf(
`select
-
repo_at, count(1)
+
subject_at, count(1)
from stars
-
where repo_at in (%s)
-
group by repo_at`,
+
where subject_at in (%s)
+
group by subject_at`,
inClause,
)
rows, err = e.Query(starCountQuery, args...)
···
}
// helper to get exactly one repo
-
func GetRepo(e Execer, filters ...filter) (*models.Repo, error) {
+
func GetRepo(e Execer, filters ...orm.Filter) (*models.Repo, error) {
repos, err := GetRepos(e, 0, filters...)
if err != nil {
return nil, err
···
return &repos[0], nil
}
-
func CountRepos(e Execer, filters ...filter) (int64, error) {
+
func CountRepos(e Execer, filters ...orm.Filter) (int64, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
func GetRepoByAtUri(e Execer, atUri string) (*models.Repo, error) {
var repo models.Repo
var nullableDescription sql.NullString
+
var nullableWebsite sql.NullString
+
var nullableTopicStr sql.NullString
-
row := e.QueryRow(`select id, did, name, knot, created, rkey, description from repos where at_uri = ?`, atUri)
+
row := e.QueryRow(`select id, did, name, knot, created, rkey, description, website, topics from repos where at_uri = ?`, atUri)
var createdAt string
-
if err := row.Scan(&repo.Id, &repo.Did, &repo.Name, &repo.Knot, &createdAt, &repo.Rkey, &nullableDescription); err != nil {
+
if err := row.Scan(&repo.Id, &repo.Did, &repo.Name, &repo.Knot, &createdAt, &repo.Rkey, &nullableDescription, &nullableWebsite, &nullableTopicStr); err != nil {
return nil, err
}
createdAtTime, _ := time.Parse(time.RFC3339, createdAt)
···
if nullableDescription.Valid {
repo.Description = nullableDescription.String
-
} else {
-
repo.Description = ""
+
}
+
if nullableWebsite.Valid {
+
repo.Website = nullableWebsite.String
+
}
+
if nullableTopicStr.Valid {
+
repo.Topics = strings.Fields(nullableTopicStr.String)
}
return &repo, nil
}
+
func PutRepo(tx *sql.Tx, repo models.Repo) error {
+
_, err := tx.Exec(
+
`update repos
+
set knot = ?, description = ?, website = ?, topics = ?
+
where did = ? and rkey = ?
+
`,
+
repo.Knot, repo.Description, repo.Website, repo.TopicStr(), repo.Did, repo.Rkey,
+
)
+
return err
+
}
+
func AddRepo(tx *sql.Tx, repo *models.Repo) error {
_, err := tx.Exec(
`insert into repos
-
(did, name, knot, rkey, at_uri, description, source)
-
values (?, ?, ?, ?, ?, ?, ?)`,
-
repo.Did, repo.Name, repo.Knot, repo.Rkey, repo.RepoAt().String(), repo.Description, repo.Source,
+
(did, name, knot, rkey, at_uri, description, website, topics, source)
+
values (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
+
repo.Did, repo.Name, repo.Knot, repo.Rkey, repo.RepoAt().String(), repo.Description, repo.Website, repo.TopicStr(), repo.Source,
)
if err != nil {
return fmt.Errorf("failed to insert repo: %w", err)
···
return nullableSource.String, nil
}
+
func GetRepoSourceRepo(e Execer, repoAt syntax.ATURI) (*models.Repo, error) {
+
source, err := GetRepoSource(e, repoAt)
+
if source == "" || errors.Is(err, sql.ErrNoRows) {
+
return nil, nil
+
}
+
if err != nil {
+
return nil, err
+
}
+
return GetRepoByAtUri(e, source)
+
}
+
func GetForksByDid(e Execer, did string) ([]models.Repo, error) {
var repos []models.Repo
rows, err := e.Query(
-
`select distinct r.id, r.did, r.name, r.knot, r.rkey, r.description, r.created, r.source
+
`select distinct r.id, r.did, r.name, r.knot, r.rkey, r.description, r.website, r.created, r.source
from repos r
left join collaborators c on r.at_uri = c.repo_at
where (r.did = ? or c.subject_did = ?)
···
var repo models.Repo
var createdAt string
var nullableDescription sql.NullString
+
var nullableWebsite sql.NullString
var nullableSource sql.NullString
-
err := rows.Scan(&repo.Id, &repo.Did, &repo.Name, &repo.Knot, &repo.Rkey, &nullableDescription, &createdAt, &nullableSource)
+
err := rows.Scan(&repo.Id, &repo.Did, &repo.Name, &repo.Knot, &repo.Rkey, &nullableDescription, &nullableWebsite, &createdAt, &nullableSource)
if err != nil {
return nil, err
}
···
var repo models.Repo
var createdAt string
var nullableDescription sql.NullString
+
var nullableWebsite sql.NullString
+
var nullableTopicStr sql.NullString
var nullableSource sql.NullString
row := e.QueryRow(
-
`select id, did, name, knot, rkey, description, created, source
+
`select id, did, name, knot, rkey, description, website, topics, created, source
from repos
where did = ? and name = ? and source is not null and source != ''`,
did, name,
)
-
err := row.Scan(&repo.Id, &repo.Did, &repo.Name, &repo.Knot, &repo.Rkey, &nullableDescription, &createdAt, &nullableSource)
+
err := row.Scan(&repo.Id, &repo.Did, &repo.Name, &repo.Knot, &repo.Rkey, &nullableDescription, &nullableWebsite, &nullableTopicStr, &createdAt, &nullableSource)
if err != nil {
return nil, err
}
if nullableDescription.Valid {
repo.Description = nullableDescription.String
+
}
+
+
if nullableWebsite.Valid {
+
repo.Website = nullableWebsite.String
+
}
+
+
if nullableTopicStr.Valid {
+
repo.Topics = strings.Fields(nullableTopicStr.String)
}
if nullableSource.Valid {
···
return err
}
-
func UnsubscribeLabel(e Execer, filters ...filter) error {
+
func UnsubscribeLabel(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
···
return err
}
-
func GetRepoLabels(e Execer, filters ...filter) ([]models.RepoLabel, error) {
+
func GetRepoLabels(e Execer, filters ...orm.Filter) ([]models.RepoLabel, error) {
var conditions []string
var args []any
for _, filter := range filters {
+6 -5
appview/db/spindle.go
···
"time"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
-
func GetSpindles(e Execer, filters ...filter) ([]models.Spindle, error) {
+
func GetSpindles(e Execer, filters ...orm.Filter) ([]models.Spindle, error) {
var spindles []models.Spindle
var conditions []string
···
return err
}
-
func VerifySpindle(e Execer, filters ...filter) (int64, error) {
+
func VerifySpindle(e Execer, filters ...orm.Filter) (int64, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
return res.RowsAffected()
}
-
func DeleteSpindle(e Execer, filters ...filter) error {
+
func DeleteSpindle(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
···
return err
}
-
func RemoveSpindleMember(e Execer, filters ...filter) error {
+
func RemoveSpindleMember(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
···
return err
}
-
func GetSpindleMembers(e Execer, filters ...filter) ([]models.SpindleMember, error) {
+
func GetSpindleMembers(e Execer, filters ...orm.Filter) ([]models.SpindleMember, error) {
var members []models.SpindleMember
var conditions []string
+43 -102
appview/db/star.go
···
"github.com/bluesky-social/indigo/atproto/syntax"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
func AddStar(e Execer, star *models.Star) error {
-
query := `insert or ignore into stars (starred_by_did, repo_at, rkey) values (?, ?, ?)`
+
query := `insert or ignore into stars (did, subject_at, rkey) values (?, ?, ?)`
_, err := e.Exec(
query,
-
star.StarredByDid,
+
star.Did,
star.RepoAt.String(),
star.Rkey,
)
···
}
// Get a star record
-
func GetStar(e Execer, starredByDid string, repoAt syntax.ATURI) (*models.Star, error) {
+
func GetStar(e Execer, did string, subjectAt syntax.ATURI) (*models.Star, error) {
query := `
-
select starred_by_did, repo_at, created, rkey
+
select did, subject_at, created, rkey
from stars
-
where starred_by_did = ? and repo_at = ?`
-
row := e.QueryRow(query, starredByDid, repoAt)
+
where did = ? and subject_at = ?`
+
row := e.QueryRow(query, did, subjectAt)
var star models.Star
var created string
-
err := row.Scan(&star.StarredByDid, &star.RepoAt, &created, &star.Rkey)
+
err := row.Scan(&star.Did, &star.RepoAt, &created, &star.Rkey)
if err != nil {
return nil, err
}
···
}
// Remove a star
-
func DeleteStar(e Execer, starredByDid string, repoAt syntax.ATURI) error {
-
_, err := e.Exec(`delete from stars where starred_by_did = ? and repo_at = ?`, starredByDid, repoAt)
+
func DeleteStar(e Execer, did string, subjectAt syntax.ATURI) error {
+
_, err := e.Exec(`delete from stars where did = ? and subject_at = ?`, did, subjectAt)
return err
}
// Remove a star
-
func DeleteStarByRkey(e Execer, starredByDid string, rkey string) error {
-
_, err := e.Exec(`delete from stars where starred_by_did = ? and rkey = ?`, starredByDid, rkey)
+
func DeleteStarByRkey(e Execer, did string, rkey string) error {
+
_, err := e.Exec(`delete from stars where did = ? and rkey = ?`, did, rkey)
return err
}
-
func GetStarCount(e Execer, repoAt syntax.ATURI) (int, error) {
+
func GetStarCount(e Execer, subjectAt syntax.ATURI) (int, error) {
stars := 0
err := e.QueryRow(
-
`select count(starred_by_did) from stars where repo_at = ?`, repoAt).Scan(&stars)
+
`select count(did) from stars where subject_at = ?`, subjectAt).Scan(&stars)
if err != nil {
return 0, err
}
···
}
query := fmt.Sprintf(`
-
SELECT repo_at
+
SELECT subject_at
FROM stars
-
WHERE starred_by_did = ? AND repo_at IN (%s)
+
WHERE did = ? AND subject_at IN (%s)
`, strings.Join(placeholders, ","))
rows, err := e.Query(query, args...)
···
return result, nil
}
-
func GetStarStatus(e Execer, userDid string, repoAt syntax.ATURI) bool {
-
statuses, err := getStarStatuses(e, userDid, []syntax.ATURI{repoAt})
+
func GetStarStatus(e Execer, userDid string, subjectAt syntax.ATURI) bool {
+
statuses, err := getStarStatuses(e, userDid, []syntax.ATURI{subjectAt})
if err != nil {
return false
}
-
return statuses[repoAt.String()]
+
return statuses[subjectAt.String()]
}
// GetStarStatuses returns a map of repo URIs to star status for a given user
-
func GetStarStatuses(e Execer, userDid string, repoAts []syntax.ATURI) (map[string]bool, error) {
-
return getStarStatuses(e, userDid, repoAts)
+
func GetStarStatuses(e Execer, userDid string, subjectAts []syntax.ATURI) (map[string]bool, error) {
+
return getStarStatuses(e, userDid, subjectAts)
}
-
func GetStars(e Execer, limit int, filters ...filter) ([]models.Star, error) {
+
+
// GetRepoStars return a list of stars each holding target repository.
+
// If there isn't known repo with starred at-uri, those stars will be ignored.
+
func GetRepoStars(e Execer, limit int, filters ...orm.Filter) ([]models.RepoStar, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
}
repoQuery := fmt.Sprintf(
-
`select starred_by_did, repo_at, created, rkey
+
`select did, subject_at, created, rkey
from stars
%s
order by created desc
···
for rows.Next() {
var star models.Star
var created string
-
err := rows.Scan(&star.StarredByDid, &star.RepoAt, &created, &star.Rkey)
+
err := rows.Scan(&star.Did, &star.RepoAt, &created, &star.Rkey)
if err != nil {
return nil, err
}
···
return nil, nil
}
-
repos, err := GetRepos(e, 0, FilterIn("at_uri", args))
+
repos, err := GetRepos(e, 0, orm.FilterIn("at_uri", args))
if err != nil {
return nil, err
}
+
var repoStars []models.RepoStar
for _, r := range repos {
if stars, ok := starMap[string(r.RepoAt())]; ok {
-
for i := range stars {
-
stars[i].Repo = &r
+
for _, star := range stars {
+
repoStars = append(repoStars, models.RepoStar{
+
Star: star,
+
Repo: &r,
+
})
}
}
}
-
var stars []models.Star
-
for _, s := range starMap {
-
stars = append(stars, s...)
-
}
-
-
slices.SortFunc(stars, func(a, b models.Star) int {
+
slices.SortFunc(repoStars, func(a, b models.RepoStar) int {
if a.Created.After(b.Created) {
return -1
}
···
return 0
})
-
return stars, nil
+
return repoStars, nil
}
-
func CountStars(e Execer, filters ...filter) (int64, error) {
+
func CountStars(e Execer, filters ...orm.Filter) (int64, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
return count, nil
}
-
func GetAllStars(e Execer, limit int) ([]models.Star, error) {
-
var stars []models.Star
-
-
rows, err := e.Query(`
-
select
-
s.starred_by_did,
-
s.repo_at,
-
s.rkey,
-
s.created,
-
r.did,
-
r.name,
-
r.knot,
-
r.rkey,
-
r.created
-
from stars s
-
join repos r on s.repo_at = r.at_uri
-
`)
-
-
if err != nil {
-
return nil, err
-
}
-
defer rows.Close()
-
-
for rows.Next() {
-
var star models.Star
-
var repo models.Repo
-
var starCreatedAt, repoCreatedAt string
-
-
if err := rows.Scan(
-
&star.StarredByDid,
-
&star.RepoAt,
-
&star.Rkey,
-
&starCreatedAt,
-
&repo.Did,
-
&repo.Name,
-
&repo.Knot,
-
&repo.Rkey,
-
&repoCreatedAt,
-
); err != nil {
-
return nil, err
-
}
-
-
star.Created, err = time.Parse(time.RFC3339, starCreatedAt)
-
if err != nil {
-
star.Created = time.Now()
-
}
-
repo.Created, err = time.Parse(time.RFC3339, repoCreatedAt)
-
if err != nil {
-
repo.Created = time.Now()
-
}
-
star.Repo = &repo
-
-
stars = append(stars, star)
-
}
-
-
if err := rows.Err(); err != nil {
-
return nil, err
-
}
-
-
return stars, nil
-
}
-
// GetTopStarredReposLastWeek returns the top 8 most starred repositories from the last week
func GetTopStarredReposLastWeek(e Execer) ([]models.Repo, error) {
// first, get the top repo URIs by star count from the last week
query := `
with recent_starred_repos as (
-
select distinct repo_at
+
select distinct subject_at
from stars
where created >= datetime('now', '-7 days')
),
repo_star_counts as (
select
-
s.repo_at,
+
s.subject_at,
count(*) as stars_gained_last_week
from stars s
-
join recent_starred_repos rsr on s.repo_at = rsr.repo_at
+
join recent_starred_repos rsr on s.subject_at = rsr.subject_at
where s.created >= datetime('now', '-7 days')
-
group by s.repo_at
+
group by s.subject_at
)
-
select rsc.repo_at
+
select rsc.subject_at
from repo_star_counts rsc
order by rsc.stars_gained_last_week desc
limit 8
···
}
// get full repo data
-
repos, err := GetRepos(e, 0, FilterIn("at_uri", repoUris))
+
repos, err := GetRepos(e, 0, orm.FilterIn("at_uri", repoUris))
if err != nil {
return nil, err
}
+4 -3
appview/db/strings.go
···
"time"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
func AddString(e Execer, s models.String) error {
···
return err
}
-
func GetStrings(e Execer, limit int, filters ...filter) ([]models.String, error) {
+
func GetStrings(e Execer, limit int, filters ...orm.Filter) ([]models.String, error) {
var all []models.String
var conditions []string
···
return all, nil
}
-
func CountStrings(e Execer, filters ...filter) (int64, error) {
+
func CountStrings(e Execer, filters ...orm.Filter) (int64, error) {
var conditions []string
var args []any
for _, filter := range filters {
···
return count, nil
}
-
func DeleteString(e Execer, filters ...filter) error {
+
func DeleteString(e Execer, filters ...orm.Filter) error {
var conditions []string
var args []any
for _, filter := range filters {
+11 -20
appview/db/timeline.go
···
"github.com/bluesky-social/indigo/atproto/syntax"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
// TODO: this gathers heterogenous events from different sources and aggregates
···
}
func getTimelineRepos(e Execer, limit int, loggedInUserDid string, userIsFollowing []string) ([]models.TimelineEvent, error) {
-
filters := make([]filter, 0)
+
filters := make([]orm.Filter, 0)
if userIsFollowing != nil {
-
filters = append(filters, FilterIn("did", userIsFollowing))
+
filters = append(filters, orm.FilterIn("did", userIsFollowing))
}
repos, err := GetRepos(e, limit, filters...)
···
var origRepos []models.Repo
if args != nil {
-
origRepos, err = GetRepos(e, 0, FilterIn("at_uri", args))
+
origRepos, err = GetRepos(e, 0, orm.FilterIn("at_uri", args))
}
if err != nil {
return nil, err
···
}
func getTimelineStars(e Execer, limit int, loggedInUserDid string, userIsFollowing []string) ([]models.TimelineEvent, error) {
-
filters := make([]filter, 0)
+
filters := make([]orm.Filter, 0)
if userIsFollowing != nil {
-
filters = append(filters, FilterIn("starred_by_did", userIsFollowing))
+
filters = append(filters, orm.FilterIn("did", userIsFollowing))
}
-
stars, err := GetStars(e, limit, filters...)
+
stars, err := GetRepoStars(e, limit, filters...)
if err != nil {
return nil, err
}
-
// filter star records without a repo
-
n := 0
-
for _, s := range stars {
-
if s.Repo != nil {
-
stars[n] = s
-
n++
-
}
-
}
-
stars = stars[:n]
-
var repos []models.Repo
for _, s := range stars {
repos = append(repos, *s.Repo)
···
isStarred, starCount := getRepoStarInfo(s.Repo, starStatuses)
events = append(events, models.TimelineEvent{
-
Star: &s,
+
RepoStar: &s,
EventAt: s.Created,
IsStarred: isStarred,
StarCount: starCount,
···
}
func getTimelineFollows(e Execer, limit int, loggedInUserDid string, userIsFollowing []string) ([]models.TimelineEvent, error) {
-
filters := make([]filter, 0)
+
filters := make([]orm.Filter, 0)
if userIsFollowing != nil {
-
filters = append(filters, FilterIn("user_did", userIsFollowing))
+
filters = append(filters, orm.FilterIn("user_did", userIsFollowing))
}
follows, err := GetFollows(e, limit, filters...)
···
return nil, nil
}
-
profiles, err := GetProfiles(e, FilterIn("did", subjects))
+
profiles, err := GetProfiles(e, orm.FilterIn("did", subjects))
if err != nil {
return nil, err
}
+7 -12
appview/email/email.go
···
import (
"fmt"
"net"
-
"regexp"
+
"net/mail"
"strings"
"github.com/resend/resend-go/v2"
···
}
func IsValidEmail(email string) bool {
-
// Basic length check
-
if len(email) < 3 || len(email) > 254 {
+
// Reject whitespace (ParseAddress normalizes it away)
+
if strings.ContainsAny(email, " \t\n\r") {
return false
}
-
// Regular expression for email validation (RFC 5322 compliant)
-
pattern := `^[a-zA-Z0-9.!#$%&'*+/=?^_\x60{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$`
-
-
// Compile regex
-
regex := regexp.MustCompile(pattern)
-
-
// Check if email matches regex pattern
-
if !regex.MatchString(email) {
+
// Use stdlib RFC 5322 parser
+
addr, err := mail.ParseAddress(email)
+
if err != nil {
return false
}
// Split email into local and domain parts
-
parts := strings.Split(email, "@")
+
parts := strings.Split(addr.Address, "@")
domain := parts[1]
mx, err := net.LookupMX(domain)
+53
appview/email/email_test.go
···
+
package email
+
+
import (
+
"testing"
+
)
+
+
func TestIsValidEmail(t *testing.T) {
+
tests := []struct {
+
name string
+
email string
+
want bool
+
}{
+
// Valid emails using RFC 2606 reserved domains
+
{"standard email", "user@example.com", true},
+
{"single char local", "a@example.com", true},
+
{"dot in middle", "first.last@example.com", true},
+
{"multiple dots", "a.b.c@example.com", true},
+
{"plus tag", "user+tag@example.com", true},
+
{"numbers", "user123@example.com", true},
+
{"example.org", "user@example.org", true},
+
{"example.net", "user@example.net", true},
+
+
// Invalid format - rejected by mail.ParseAddress
+
{"empty string", "", false},
+
{"no at sign", "userexample.com", false},
+
{"no domain", "user@", false},
+
{"no local part", "@example.com", false},
+
{"double at", "user@@example.com", false},
+
{"just at sign", "@", false},
+
{"leading dot", ".user@example.com", false},
+
{"trailing dot", "user.@example.com", false},
+
{"consecutive dots", "user..name@example.com", false},
+
+
// Whitespace - rejected before parsing
+
{"space in local", "user @example.com", false},
+
{"space in domain", "user@ example.com", false},
+
{"tab", "user\t@example.com", false},
+
{"newline", "user\n@example.com", false},
+
+
// MX lookup - using RFC 2606 reserved TLDs (guaranteed no MX)
+
{"invalid TLD", "user@example.invalid", false},
+
{"test TLD", "user@mail.test", false},
+
}
+
+
for _, tt := range tests {
+
t.Run(tt.name, func(t *testing.T) {
+
got := IsValidEmail(tt.email)
+
if got != tt.want {
+
t.Errorf("IsValidEmail(%q) = %v, want %v", tt.email, got, tt.want)
+
}
+
})
+
}
+
}
+20
appview/indexer/base36/base36.go
···
+
// mostly copied from gitea/modules/indexer/internal/base32
+
+
package base36
+
+
import (
+
"fmt"
+
"strconv"
+
)
+
+
func Encode(i int64) string {
+
return strconv.FormatInt(i, 36)
+
}
+
+
func Decode(s string) (int64, error) {
+
i, err := strconv.ParseInt(s, 36, 64)
+
if err != nil {
+
return 0, fmt.Errorf("invalid base36 integer %q: %w", s, err)
+
}
+
return i, nil
+
}
+58
appview/indexer/bleve/batch.go
···
+
// Copyright 2021 The Gitea Authors. All rights reserved.
+
// SPDX-License-Identifier: MIT
+
+
package bleveutil
+
+
import (
+
"github.com/blevesearch/bleve/v2"
+
)
+
+
// FlushingBatch is a batch of operations that automatically flushes to the
+
// underlying index once it reaches a certain size.
+
type FlushingBatch struct {
+
maxBatchSize int
+
batch *bleve.Batch
+
index bleve.Index
+
}
+
+
// NewFlushingBatch creates a new flushing batch for the specified index. Once
+
// the number of operations in the batch reaches the specified limit, the batch
+
// automatically flushes its operations to the index.
+
func NewFlushingBatch(index bleve.Index, maxBatchSize int) *FlushingBatch {
+
return &FlushingBatch{
+
maxBatchSize: maxBatchSize,
+
batch: index.NewBatch(),
+
index: index,
+
}
+
}
+
+
// Index add a new index to batch
+
func (b *FlushingBatch) Index(id string, data any) error {
+
if err := b.batch.Index(id, data); err != nil {
+
return err
+
}
+
return b.flushIfFull()
+
}
+
+
// Delete add a delete index to batch
+
func (b *FlushingBatch) Delete(id string) error {
+
b.batch.Delete(id)
+
return b.flushIfFull()
+
}
+
+
func (b *FlushingBatch) flushIfFull() error {
+
if b.batch.Size() < b.maxBatchSize {
+
return nil
+
}
+
return b.Flush()
+
}
+
+
// Flush submit the batch and create a new one
+
func (b *FlushingBatch) Flush() error {
+
err := b.index.Batch(b.batch)
+
if err != nil {
+
return err
+
}
+
b.batch = b.index.NewBatch()
+
return nil
+
}
+26
appview/indexer/bleve/query.go
···
+
package bleveutil
+
+
import (
+
"github.com/blevesearch/bleve/v2"
+
"github.com/blevesearch/bleve/v2/search/query"
+
)
+
+
func MatchAndQuery(field, keyword, analyzer string, fuzziness int) query.Query {
+
q := bleve.NewMatchQuery(keyword)
+
q.FieldVal = field
+
q.Analyzer = analyzer
+
q.Fuzziness = fuzziness
+
return q
+
}
+
+
func BoolFieldQuery(field string, val bool) query.Query {
+
q := bleve.NewBoolFieldQuery(val)
+
q.FieldVal = field
+
return q
+
}
+
+
func KeywordFieldQuery(field, keyword string) query.Query {
+
q := bleve.NewTermQuery(keyword)
+
q.FieldVal = field
+
return q
+
}
+36
appview/indexer/indexer.go
···
+
package indexer
+
+
import (
+
"context"
+
"log/slog"
+
+
"tangled.org/core/appview/db"
+
issues_indexer "tangled.org/core/appview/indexer/issues"
+
pulls_indexer "tangled.org/core/appview/indexer/pulls"
+
"tangled.org/core/appview/notify"
+
tlog "tangled.org/core/log"
+
)
+
+
type Indexer struct {
+
Issues *issues_indexer.Indexer
+
Pulls *pulls_indexer.Indexer
+
logger *slog.Logger
+
notify.BaseNotifier
+
}
+
+
func New(logger *slog.Logger) *Indexer {
+
return &Indexer{
+
issues_indexer.NewIndexer("indexes/issues.bleve"),
+
pulls_indexer.NewIndexer("indexes/pulls.bleve"),
+
logger,
+
notify.BaseNotifier{},
+
}
+
}
+
+
// Init initializes all indexers
+
func (ix *Indexer) Init(ctx context.Context, db *db.DB) error {
+
ctx = tlog.IntoContext(ctx, ix.logger)
+
ix.Issues.Init(ctx, db)
+
ix.Pulls.Init(ctx, db)
+
return nil
+
}
+257
appview/indexer/issues/indexer.go
···
+
// heavily inspired by gitea's model (basically copy-pasted)
+
package issues_indexer
+
+
import (
+
"context"
+
"errors"
+
"log"
+
"os"
+
+
"github.com/blevesearch/bleve/v2"
+
"github.com/blevesearch/bleve/v2/analysis/analyzer/custom"
+
"github.com/blevesearch/bleve/v2/analysis/token/camelcase"
+
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
+
"github.com/blevesearch/bleve/v2/analysis/token/unicodenorm"
+
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
+
"github.com/blevesearch/bleve/v2/index/upsidedown"
+
"github.com/blevesearch/bleve/v2/mapping"
+
"github.com/blevesearch/bleve/v2/search/query"
+
"tangled.org/core/appview/db"
+
"tangled.org/core/appview/indexer/base36"
+
"tangled.org/core/appview/indexer/bleve"
+
"tangled.org/core/appview/models"
+
"tangled.org/core/appview/pagination"
+
tlog "tangled.org/core/log"
+
)
+
+
const (
+
issueIndexerAnalyzer = "issueIndexer"
+
issueIndexerDocType = "issueIndexerDocType"
+
+
unicodeNormalizeName = "uicodeNormalize"
+
)
+
+
type Indexer struct {
+
indexer bleve.Index
+
path string
+
}
+
+
func NewIndexer(indexDir string) *Indexer {
+
return &Indexer{
+
path: indexDir,
+
}
+
}
+
+
// Init initializes the indexer
+
func (ix *Indexer) Init(ctx context.Context, e db.Execer) {
+
l := tlog.FromContext(ctx)
+
existed, err := ix.intialize(ctx)
+
if err != nil {
+
log.Fatalln("failed to initialize issue indexer", err)
+
}
+
if !existed {
+
l.Debug("Populating the issue indexer")
+
err := PopulateIndexer(ctx, ix, e)
+
if err != nil {
+
log.Fatalln("failed to populate issue indexer", err)
+
}
+
}
+
+
count, _ := ix.indexer.DocCount()
+
l.Info("Initialized the issue indexer", "docCount", count)
+
}
+
+
func generateIssueIndexMapping() (mapping.IndexMapping, error) {
+
mapping := bleve.NewIndexMapping()
+
docMapping := bleve.NewDocumentMapping()
+
+
textFieldMapping := bleve.NewTextFieldMapping()
+
textFieldMapping.Store = false
+
textFieldMapping.IncludeInAll = false
+
+
boolFieldMapping := bleve.NewBooleanFieldMapping()
+
boolFieldMapping.Store = false
+
boolFieldMapping.IncludeInAll = false
+
+
keywordFieldMapping := bleve.NewKeywordFieldMapping()
+
keywordFieldMapping.Store = false
+
keywordFieldMapping.IncludeInAll = false
+
+
// numericFieldMapping := bleve.NewNumericFieldMapping()
+
+
docMapping.AddFieldMappingsAt("title", textFieldMapping)
+
docMapping.AddFieldMappingsAt("body", textFieldMapping)
+
+
docMapping.AddFieldMappingsAt("repo_at", keywordFieldMapping)
+
docMapping.AddFieldMappingsAt("is_open", boolFieldMapping)
+
+
err := mapping.AddCustomTokenFilter(unicodeNormalizeName, map[string]any{
+
"type": unicodenorm.Name,
+
"form": unicodenorm.NFC,
+
})
+
if err != nil {
+
return nil, err
+
}
+
+
err = mapping.AddCustomAnalyzer(issueIndexerAnalyzer, map[string]any{
+
"type": custom.Name,
+
"char_filters": []string{},
+
"tokenizer": unicode.Name,
+
"token_filters": []string{unicodeNormalizeName, camelcase.Name, lowercase.Name},
+
})
+
if err != nil {
+
return nil, err
+
}
+
+
mapping.DefaultAnalyzer = issueIndexerAnalyzer
+
mapping.AddDocumentMapping(issueIndexerDocType, docMapping)
+
mapping.AddDocumentMapping("_all", bleve.NewDocumentDisabledMapping())
+
mapping.DefaultMapping = bleve.NewDocumentDisabledMapping()
+
+
return mapping, nil
+
}
+
+
func (ix *Indexer) intialize(ctx context.Context) (bool, error) {
+
if ix.indexer != nil {
+
return false, errors.New("indexer is already initialized")
+
}
+
+
indexer, err := openIndexer(ctx, ix.path)
+
if err != nil {
+
return false, err
+
}
+
if indexer != nil {
+
ix.indexer = indexer
+
return true, nil
+
}
+
+
mapping, err := generateIssueIndexMapping()
+
if err != nil {
+
return false, err
+
}
+
indexer, err = bleve.New(ix.path, mapping)
+
if err != nil {
+
return false, err
+
}
+
+
ix.indexer = indexer
+
+
return false, nil
+
}
+
+
func openIndexer(ctx context.Context, path string) (bleve.Index, error) {
+
l := tlog.FromContext(ctx)
+
indexer, err := bleve.Open(path)
+
if err != nil {
+
if errors.Is(err, upsidedown.IncompatibleVersion) {
+
l.Info("Indexer was built with a previous version of bleve, deleting and rebuilding")
+
return nil, os.RemoveAll(path)
+
}
+
return nil, nil
+
}
+
return indexer, nil
+
}
+
+
func PopulateIndexer(ctx context.Context, ix *Indexer, e db.Execer) error {
+
l := tlog.FromContext(ctx)
+
count := 0
+
err := pagination.IterateAll(
+
func(page pagination.Page) ([]models.Issue, error) {
+
return db.GetIssuesPaginated(e, page)
+
},
+
func(issues []models.Issue) error {
+
count += len(issues)
+
return ix.Index(ctx, issues...)
+
},
+
)
+
l.Info("issues indexed", "count", count)
+
return err
+
}
+
+
// issueData data stored and will be indexed
+
type issueData struct {
+
ID int64 `json:"id"`
+
RepoAt string `json:"repo_at"`
+
IssueID int `json:"issue_id"`
+
Title string `json:"title"`
+
Body string `json:"body"`
+
+
IsOpen bool `json:"is_open"`
+
Comments []IssueCommentData `json:"comments"`
+
}
+
+
func makeIssueData(issue *models.Issue) *issueData {
+
return &issueData{
+
ID: issue.Id,
+
RepoAt: issue.RepoAt.String(),
+
IssueID: issue.IssueId,
+
Title: issue.Title,
+
Body: issue.Body,
+
IsOpen: issue.Open,
+
}
+
}
+
+
// Type returns the document type, for bleve's mapping.Classifier interface.
+
func (i *issueData) Type() string {
+
return issueIndexerDocType
+
}
+
+
type IssueCommentData struct {
+
Body string `json:"body"`
+
}
+
+
type SearchResult struct {
+
Hits []int64
+
Total uint64
+
}
+
+
const maxBatchSize = 20
+
+
func (ix *Indexer) Index(ctx context.Context, issues ...models.Issue) error {
+
batch := bleveutil.NewFlushingBatch(ix.indexer, maxBatchSize)
+
for _, issue := range issues {
+
issueData := makeIssueData(&issue)
+
if err := batch.Index(base36.Encode(issue.Id), issueData); err != nil {
+
return err
+
}
+
}
+
return batch.Flush()
+
}
+
+
func (ix *Indexer) Delete(ctx context.Context, issueId int64) error {
+
return ix.indexer.Delete(base36.Encode(issueId))
+
}
+
+
// Search searches for issues
+
func (ix *Indexer) Search(ctx context.Context, opts models.IssueSearchOptions) (*SearchResult, error) {
+
var queries []query.Query
+
+
if opts.Keyword != "" {
+
queries = append(queries, bleve.NewDisjunctionQuery(
+
bleveutil.MatchAndQuery("title", opts.Keyword, issueIndexerAnalyzer, 0),
+
bleveutil.MatchAndQuery("body", opts.Keyword, issueIndexerAnalyzer, 0),
+
))
+
}
+
queries = append(queries, bleveutil.KeywordFieldQuery("repo_at", opts.RepoAt))
+
queries = append(queries, bleveutil.BoolFieldQuery("is_open", opts.IsOpen))
+
// TODO: append more queries
+
+
var indexerQuery query.Query = bleve.NewConjunctionQuery(queries...)
+
searchReq := bleve.NewSearchRequestOptions(indexerQuery, opts.Page.Limit, opts.Page.Offset, false)
+
res, err := ix.indexer.SearchInContext(ctx, searchReq)
+
if err != nil {
+
return nil, nil
+
}
+
ret := &SearchResult{
+
Total: res.Total,
+
Hits: make([]int64, len(res.Hits)),
+
}
+
for i, hit := range res.Hits {
+
id, err := base36.Decode(hit.ID)
+
if err != nil {
+
return nil, err
+
}
+
ret.Hits[i] = id
+
}
+
return ret, nil
+
}
+57
appview/indexer/notifier.go
···
+
package indexer
+
+
import (
+
"context"
+
+
"github.com/bluesky-social/indigo/atproto/syntax"
+
"tangled.org/core/appview/models"
+
"tangled.org/core/appview/notify"
+
"tangled.org/core/log"
+
)
+
+
var _ notify.Notifier = &Indexer{}
+
+
func (ix *Indexer) NewIssue(ctx context.Context, issue *models.Issue, mentions []syntax.DID) {
+
l := log.FromContext(ctx).With("notifier", "indexer", "issue", issue)
+
l.Debug("indexing new issue")
+
err := ix.Issues.Index(ctx, *issue)
+
if err != nil {
+
l.Error("failed to index an issue", "err", err)
+
}
+
}
+
+
func (ix *Indexer) NewIssueState(ctx context.Context, actor syntax.DID, issue *models.Issue) {
+
l := log.FromContext(ctx).With("notifier", "indexer", "issue", issue)
+
l.Debug("updating an issue")
+
err := ix.Issues.Index(ctx, *issue)
+
if err != nil {
+
l.Error("failed to index an issue", "err", err)
+
}
+
}
+
+
func (ix *Indexer) DeleteIssue(ctx context.Context, issue *models.Issue) {
+
l := log.FromContext(ctx).With("notifier", "indexer", "issue", issue)
+
l.Debug("deleting an issue")
+
err := ix.Issues.Delete(ctx, issue.Id)
+
if err != nil {
+
l.Error("failed to delete an issue", "err", err)
+
}
+
}
+
+
func (ix *Indexer) NewPull(ctx context.Context, pull *models.Pull) {
+
l := log.FromContext(ctx).With("notifier", "indexer", "pull", pull)
+
l.Debug("indexing new pr")
+
err := ix.Pulls.Index(ctx, pull)
+
if err != nil {
+
l.Error("failed to index a pr", "err", err)
+
}
+
}
+
+
func (ix *Indexer) NewPullState(ctx context.Context, actor syntax.DID, pull *models.Pull) {
+
l := log.FromContext(ctx).With("notifier", "indexer", "pull", pull)
+
l.Debug("updating a pr")
+
err := ix.Pulls.Index(ctx, pull)
+
if err != nil {
+
l.Error("failed to index a pr", "err", err)
+
}
+
}
+257
appview/indexer/pulls/indexer.go
···
+
// heavily inspired by gitea's model (basically copy-pasted)
+
package pulls_indexer
+
+
import (
+
"context"
+
"errors"
+
"log"
+
"os"
+
+
"github.com/blevesearch/bleve/v2"
+
"github.com/blevesearch/bleve/v2/analysis/analyzer/custom"
+
"github.com/blevesearch/bleve/v2/analysis/token/camelcase"
+
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
+
"github.com/blevesearch/bleve/v2/analysis/token/unicodenorm"
+
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
+
"github.com/blevesearch/bleve/v2/index/upsidedown"
+
"github.com/blevesearch/bleve/v2/mapping"
+
"github.com/blevesearch/bleve/v2/search/query"
+
"tangled.org/core/appview/db"
+
"tangled.org/core/appview/indexer/base36"
+
"tangled.org/core/appview/indexer/bleve"
+
"tangled.org/core/appview/models"
+
tlog "tangled.org/core/log"
+
)
+
+
const (
+
pullIndexerAnalyzer = "pullIndexer"
+
pullIndexerDocType = "pullIndexerDocType"
+
+
unicodeNormalizeName = "uicodeNormalize"
+
)
+
+
type Indexer struct {
+
indexer bleve.Index
+
path string
+
}
+
+
func NewIndexer(indexDir string) *Indexer {
+
return &Indexer{
+
path: indexDir,
+
}
+
}
+
+
// Init initializes the indexer
+
func (ix *Indexer) Init(ctx context.Context, e db.Execer) {
+
l := tlog.FromContext(ctx)
+
existed, err := ix.intialize(ctx)
+
if err != nil {
+
log.Fatalln("failed to initialize pull indexer", err)
+
}
+
if !existed {
+
l.Debug("Populating the pull indexer")
+
err := PopulateIndexer(ctx, ix, e)
+
if err != nil {
+
log.Fatalln("failed to populate pull indexer", err)
+
}
+
}
+
+
count, _ := ix.indexer.DocCount()
+
l.Info("Initialized the pull indexer", "docCount", count)
+
}
+
+
func generatePullIndexMapping() (mapping.IndexMapping, error) {
+
mapping := bleve.NewIndexMapping()
+
docMapping := bleve.NewDocumentMapping()
+
+
textFieldMapping := bleve.NewTextFieldMapping()
+
textFieldMapping.Store = false
+
textFieldMapping.IncludeInAll = false
+
+
keywordFieldMapping := bleve.NewKeywordFieldMapping()
+
keywordFieldMapping.Store = false
+
keywordFieldMapping.IncludeInAll = false
+
+
// numericFieldMapping := bleve.NewNumericFieldMapping()
+
+
docMapping.AddFieldMappingsAt("title", textFieldMapping)
+
docMapping.AddFieldMappingsAt("body", textFieldMapping)
+
+
docMapping.AddFieldMappingsAt("repo_at", keywordFieldMapping)
+
docMapping.AddFieldMappingsAt("state", keywordFieldMapping)
+
+
err := mapping.AddCustomTokenFilter(unicodeNormalizeName, map[string]any{
+
"type": unicodenorm.Name,
+
"form": unicodenorm.NFC,
+
})
+
if err != nil {
+
return nil, err
+
}
+
+
err = mapping.AddCustomAnalyzer(pullIndexerAnalyzer, map[string]any{
+
"type": custom.Name,
+
"char_filters": []string{},
+
"tokenizer": unicode.Name,
+
"token_filters": []string{unicodeNormalizeName, camelcase.Name, lowercase.Name},
+
})
+
if err != nil {
+
return nil, err
+
}
+
+
mapping.DefaultAnalyzer = pullIndexerAnalyzer
+
mapping.AddDocumentMapping(pullIndexerDocType, docMapping)
+
mapping.AddDocumentMapping("_all", bleve.NewDocumentDisabledMapping())
+
mapping.DefaultMapping = bleve.NewDocumentDisabledMapping()
+
+
return mapping, nil
+
}
+
+
func (ix *Indexer) intialize(ctx context.Context) (bool, error) {
+
if ix.indexer != nil {
+
return false, errors.New("indexer is already initialized")
+
}
+
+
indexer, err := openIndexer(ctx, ix.path)
+
if err != nil {
+
return false, err
+
}
+
if indexer != nil {
+
ix.indexer = indexer
+
return true, nil
+
}
+
+
mapping, err := generatePullIndexMapping()
+
if err != nil {
+
return false, err
+
}
+
indexer, err = bleve.New(ix.path, mapping)
+
if err != nil {
+
return false, err
+
}
+
+
ix.indexer = indexer
+
+
return false, nil
+
}
+
+
func openIndexer(ctx context.Context, path string) (bleve.Index, error) {
+
l := tlog.FromContext(ctx)
+
indexer, err := bleve.Open(path)
+
if err != nil {
+
if errors.Is(err, upsidedown.IncompatibleVersion) {
+
l.Info("Indexer was built with a previous version of bleve, deleting and rebuilding")
+
return nil, os.RemoveAll(path)
+
}
+
return nil, nil
+
}
+
return indexer, nil
+
}
+
+
func PopulateIndexer(ctx context.Context, ix *Indexer, e db.Execer) error {
+
l := tlog.FromContext(ctx)
+
+
pulls, err := db.GetPulls(e)
+
if err != nil {
+
return err
+
}
+
count := len(pulls)
+
err = ix.Index(ctx, pulls...)
+
if err != nil {
+
return err
+
}
+
l.Info("pulls indexed", "count", count)
+
return err
+
}
+
+
// pullData data stored and will be indexed
+
type pullData struct {
+
ID int64 `json:"id"`
+
RepoAt string `json:"repo_at"`
+
PullID int `json:"pull_id"`
+
Title string `json:"title"`
+
Body string `json:"body"`
+
State string `json:"state"`
+
+
Comments []pullCommentData `json:"comments"`
+
}
+
+
func makePullData(pull *models.Pull) *pullData {
+
return &pullData{
+
ID: int64(pull.ID),
+
RepoAt: pull.RepoAt.String(),
+
PullID: pull.PullId,
+
Title: pull.Title,
+
Body: pull.Body,
+
State: pull.State.String(),
+
}
+
}
+
+
// Type returns the document type, for bleve's mapping.Classifier interface.
+
func (i *pullData) Type() string {
+
return pullIndexerDocType
+
}
+
+
type pullCommentData struct {
+
Body string `json:"body"`
+
}
+
+
type searchResult struct {
+
Hits []int64
+
Total uint64
+
}
+
+
const maxBatchSize = 20
+
+
func (ix *Indexer) Index(ctx context.Context, pulls ...*models.Pull) error {
+
batch := bleveutil.NewFlushingBatch(ix.indexer, maxBatchSize)
+
for _, pull := range pulls {
+
pullData := makePullData(pull)
+
if err := batch.Index(base36.Encode(pullData.ID), pullData); err != nil {
+
return err
+
}
+
}
+
return batch.Flush()
+
}
+
+
func (ix *Indexer) Delete(ctx context.Context, pullID int64) error {
+
return ix.indexer.Delete(base36.Encode(pullID))
+
}
+
+
// Search searches for pulls
+
func (ix *Indexer) Search(ctx context.Context, opts models.PullSearchOptions) (*searchResult, error) {
+
var queries []query.Query
+
+
// TODO(boltless): remove this after implementing pulls page pagination
+
limit := opts.Page.Limit
+
if limit == 0 {
+
limit = 500
+
}
+
+
if opts.Keyword != "" {
+
queries = append(queries, bleve.NewDisjunctionQuery(
+
bleveutil.MatchAndQuery("title", opts.Keyword, pullIndexerAnalyzer, 0),
+
bleveutil.MatchAndQuery("body", opts.Keyword, pullIndexerAnalyzer, 0),
+
))
+
}
+
queries = append(queries, bleveutil.KeywordFieldQuery("repo_at", opts.RepoAt))
+
queries = append(queries, bleveutil.KeywordFieldQuery("state", opts.State.String()))
+
+
var indexerQuery query.Query = bleve.NewConjunctionQuery(queries...)
+
searchReq := bleve.NewSearchRequestOptions(indexerQuery, limit, opts.Page.Offset, false)
+
res, err := ix.indexer.SearchInContext(ctx, searchReq)
+
if err != nil {
+
return nil, nil
+
}
+
ret := &searchResult{
+
Total: res.Total,
+
Hits: make([]int64, len(res.Hits)),
+
}
+
for i, hit := range res.Hits {
+
id, err := base36.Decode(hit.ID)
+
if err != nil {
+
return nil, err
+
}
+
ret.Hits[i] = id
+
}
+
return ret, nil
+
}
+56 -32
appview/ingester.go
···
"tangled.org/core/appview/serververify"
"tangled.org/core/appview/validator"
"tangled.org/core/idresolver"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
)
···
return err
}
err = db.AddStar(i.Db, &models.Star{
-
StarredByDid: did,
-
RepoAt: subjectUri,
-
Rkey: e.Commit.RKey,
+
Did: did,
+
RepoAt: subjectUri,
+
Rkey: e.Commit.RKey,
})
case jmodels.CommitOperationDelete:
err = db.DeleteStarByRkey(i.Db, did, e.Commit.RKey)
···
err = db.AddArtifact(i.Db, artifact)
case jmodels.CommitOperationDelete:
-
err = db.DeleteArtifact(i.Db, db.FilterEq("did", did), db.FilterEq("rkey", e.Commit.RKey))
+
err = db.DeleteArtifact(i.Db, orm.FilterEq("did", did), orm.FilterEq("rkey", e.Commit.RKey))
}
if err != nil {
···
includeBluesky := record.Bluesky
+
pronouns := ""
+
if record.Pronouns != nil {
+
pronouns = *record.Pronouns
+
}
+
location := ""
if record.Location != nil {
location = *record.Location
···
Links: links,
Stats: stats,
PinnedRepos: pinned,
+
Pronouns: pronouns,
}
ddb, ok := i.Db.Execer.(*db.DB)
···
err = db.UpsertProfile(tx, &profile)
case jmodels.CommitOperationDelete:
-
err = db.DeleteArtifact(i.Db, db.FilterEq("did", did), db.FilterEq("rkey", e.Commit.RKey))
+
err = db.DeleteArtifact(i.Db, orm.FilterEq("did", did), orm.FilterEq("rkey", e.Commit.RKey))
}
if err != nil {
···
// get record from db first
members, err := db.GetSpindleMembers(
ddb,
-
db.FilterEq("did", did),
-
db.FilterEq("rkey", rkey),
+
orm.FilterEq("did", did),
+
orm.FilterEq("rkey", rkey),
)
if err != nil || len(members) != 1 {
return fmt.Errorf("failed to get member: %w, len(members) = %d", err, len(members))
···
// remove record by rkey && update enforcer
if err = db.RemoveSpindleMember(
tx,
-
db.FilterEq("did", did),
-
db.FilterEq("rkey", rkey),
+
orm.FilterEq("did", did),
+
orm.FilterEq("rkey", rkey),
); err != nil {
return fmt.Errorf("failed to remove from db: %w", err)
}
···
// get record from db first
spindles, err := db.GetSpindles(
ddb,
-
db.FilterEq("owner", did),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("owner", did),
+
orm.FilterEq("instance", instance),
)
if err != nil || len(spindles) != 1 {
return fmt.Errorf("failed to get spindles: %w, len(spindles) = %d", err, len(spindles))
···
// remove spindle members first
err = db.RemoveSpindleMember(
tx,
-
db.FilterEq("owner", did),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("owner", did),
+
orm.FilterEq("instance", instance),
)
if err != nil {
return err
···
err = db.DeleteSpindle(
tx,
-
db.FilterEq("owner", did),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("owner", did),
+
orm.FilterEq("instance", instance),
)
if err != nil {
return err
···
case jmodels.CommitOperationDelete:
if err := db.DeleteString(
ddb,
-
db.FilterEq("did", did),
-
db.FilterEq("rkey", rkey),
+
orm.FilterEq("did", did),
+
orm.FilterEq("rkey", rkey),
); err != nil {
l.Error("failed to delete", "err", err)
return fmt.Errorf("failed to delete string record: %w", err)
···
// get record from db first
registrations, err := db.GetRegistrations(
ddb,
-
db.FilterEq("domain", domain),
-
db.FilterEq("did", did),
+
orm.FilterEq("domain", domain),
+
orm.FilterEq("did", did),
)
if err != nil {
return fmt.Errorf("failed to get registration: %w", err)
···
err = db.DeleteKnot(
tx,
-
db.FilterEq("did", did),
-
db.FilterEq("domain", domain),
+
orm.FilterEq("did", did),
+
orm.FilterEq("domain", domain),
)
if err != nil {
return err
···
return nil
case jmodels.CommitOperationDelete:
+
tx, err := ddb.BeginTx(ctx, nil)
+
if err != nil {
+
l.Error("failed to begin transaction", "err", err)
+
return err
+
}
+
defer tx.Rollback()
+
if err := db.DeleteIssues(
-
ddb,
-
db.FilterEq("did", did),
-
db.FilterEq("rkey", rkey),
+
tx,
+
did,
+
rkey,
); err != nil {
l.Error("failed to delete", "err", err)
return fmt.Errorf("failed to delete issue record: %w", err)
+
}
+
if err := tx.Commit(); err != nil {
+
l.Error("failed to commit txn", "err", err)
+
return err
}
return nil
···
return fmt.Errorf("failed to validate comment: %w", err)
}
-
_, err = db.AddIssueComment(ddb, *comment)
+
tx, err := ddb.Begin()
+
if err != nil {
+
return fmt.Errorf("failed to start transaction: %w", err)
+
}
+
defer tx.Rollback()
+
+
_, err = db.AddIssueComment(tx, *comment)
if err != nil {
return fmt.Errorf("failed to create issue comment: %w", err)
}
-
return nil
+
return tx.Commit()
case jmodels.CommitOperationDelete:
if err := db.DeleteIssueComments(
ddb,
-
db.FilterEq("did", did),
-
db.FilterEq("rkey", rkey),
+
orm.FilterEq("did", did),
+
orm.FilterEq("rkey", rkey),
); err != nil {
return fmt.Errorf("failed to delete issue comment record: %w", err)
}
···
case jmodels.CommitOperationDelete:
if err := db.DeleteLabelDefinition(
ddb,
-
db.FilterEq("did", did),
-
db.FilterEq("rkey", rkey),
+
orm.FilterEq("did", did),
+
orm.FilterEq("rkey", rkey),
); err != nil {
return fmt.Errorf("failed to delete labeldef record: %w", err)
}
···
var repo *models.Repo
switch collection {
case tangled.RepoIssueNSID:
-
i, err := db.GetIssues(ddb, db.FilterEq("at_uri", subject))
+
i, err := db.GetIssues(ddb, orm.FilterEq("at_uri", subject))
if err != nil || len(i) != 1 {
return fmt.Errorf("failed to find subject: %w || subject count %d", err, len(i))
}
···
return fmt.Errorf("unsupport label subject: %s", collection)
}
-
actx, err := db.NewLabelApplicationCtx(ddb, db.FilterIn("at_uri", repo.Labels))
+
actx, err := db.NewLabelApplicationCtx(ddb, orm.FilterIn("at_uri", repo.Labels))
if err != nil {
return fmt.Errorf("failed to build label application ctx: %w", err)
+209 -130
appview/issues/issues.go
···
"fmt"
"log/slog"
"net/http"
-
"slices"
"time"
comatproto "github.com/bluesky-social/indigo/api/atproto"
···
"tangled.org/core/api/tangled"
"tangled.org/core/appview/config"
"tangled.org/core/appview/db"
+
issues_indexer "tangled.org/core/appview/indexer/issues"
+
"tangled.org/core/appview/mentions"
"tangled.org/core/appview/models"
"tangled.org/core/appview/notify"
"tangled.org/core/appview/oauth"
"tangled.org/core/appview/pages"
+
"tangled.org/core/appview/pages/repoinfo"
"tangled.org/core/appview/pagination"
"tangled.org/core/appview/reporesolver"
"tangled.org/core/appview/validator"
"tangled.org/core/idresolver"
+
"tangled.org/core/orm"
+
"tangled.org/core/rbac"
"tangled.org/core/tid"
)
type Issues struct {
-
oauth *oauth.OAuth
-
repoResolver *reporesolver.RepoResolver
-
pages *pages.Pages
-
idResolver *idresolver.Resolver
-
db *db.DB
-
config *config.Config
-
notifier notify.Notifier
-
logger *slog.Logger
-
validator *validator.Validator
+
oauth *oauth.OAuth
+
repoResolver *reporesolver.RepoResolver
+
enforcer *rbac.Enforcer
+
pages *pages.Pages
+
idResolver *idresolver.Resolver
+
mentionsResolver *mentions.Resolver
+
db *db.DB
+
config *config.Config
+
notifier notify.Notifier
+
logger *slog.Logger
+
validator *validator.Validator
+
indexer *issues_indexer.Indexer
}
func New(
oauth *oauth.OAuth,
repoResolver *reporesolver.RepoResolver,
+
enforcer *rbac.Enforcer,
pages *pages.Pages,
idResolver *idresolver.Resolver,
+
mentionsResolver *mentions.Resolver,
db *db.DB,
config *config.Config,
notifier notify.Notifier,
validator *validator.Validator,
+
indexer *issues_indexer.Indexer,
logger *slog.Logger,
) *Issues {
return &Issues{
-
oauth: oauth,
-
repoResolver: repoResolver,
-
pages: pages,
-
idResolver: idResolver,
-
db: db,
-
config: config,
-
notifier: notifier,
-
logger: logger,
-
validator: validator,
+
oauth: oauth,
+
repoResolver: repoResolver,
+
enforcer: enforcer,
+
pages: pages,
+
idResolver: idResolver,
+
mentionsResolver: mentionsResolver,
+
db: db,
+
config: config,
+
notifier: notifier,
+
logger: logger,
+
validator: validator,
+
indexer: indexer,
}
}
···
userReactions = db.GetReactionStatusMap(rp.db, user.Did, issue.AtUri())
}
+
backlinks, err := db.GetBacklinks(rp.db, issue.AtUri())
+
if err != nil {
+
l.Error("failed to fetch backlinks", "err", err)
+
rp.pages.Error503(w)
+
return
+
}
+
labelDefs, err := db.GetLabelDefinitions(
rp.db,
-
db.FilterIn("at_uri", f.Repo.Labels),
-
db.FilterContains("scope", tangled.RepoIssueNSID),
+
orm.FilterIn("at_uri", f.Labels),
+
orm.FilterContains("scope", tangled.RepoIssueNSID),
)
if err != nil {
l.Error("failed to fetch labels", "err", err)
···
rp.pages.RepoSingleIssue(w, pages.RepoSingleIssueParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Issue: issue,
CommentList: issue.CommentList(),
+
Backlinks: backlinks,
OrderedReactionKinds: models.OrderedReactionKinds,
Reactions: reactionMap,
UserReacted: userReactions,
···
func (rp *Issues) EditIssue(w http.ResponseWriter, r *http.Request) {
l := rp.logger.With("handler", "EditIssue")
user := rp.oauth.GetUser(r)
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
issue, ok := r.Context().Value("issue").(*models.Issue)
if !ok {
···
case http.MethodGet:
rp.pages.EditIssueFragment(w, pages.EditIssueParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Issue: issue,
})
case http.MethodPost:
···
newIssue := issue
newIssue.Title = r.FormValue("title")
newIssue.Body = r.FormValue("body")
+
newIssue.Mentions, newIssue.References = rp.mentionsResolver.Resolve(r.Context(), newIssue.Body)
if err := rp.validator.ValidateIssue(newIssue); err != nil {
l.Error("validation error", "err", err)
···
func (rp *Issues) DeleteIssue(w http.ResponseWriter, r *http.Request) {
l := rp.logger.With("handler", "DeleteIssue")
noticeId := "issue-actions-error"
-
-
user := rp.oauth.GetUser(r)
f, err := rp.repoResolver.Resolve(r)
if err != nil {
···
}
l = l.With("did", issue.Did, "rkey", issue.Rkey)
+
tx, err := rp.db.Begin()
+
if err != nil {
+
l.Error("failed to start transaction", "err", err)
+
rp.pages.Notice(w, "issue-comment", "Failed to create comment, try again later.")
+
return
+
}
+
defer tx.Rollback()
+
// delete from PDS
client, err := rp.oauth.AuthorizedClient(r)
if err != nil {
···
}
// delete from db
-
if err := db.DeleteIssues(rp.db, db.FilterEq("id", issue.Id)); err != nil {
+
if err := db.DeleteIssues(tx, issue.Did, issue.Rkey); err != nil {
l.Error("failed to delete issue", "err", err)
rp.pages.Notice(w, noticeId, "Failed to delete issue.")
return
}
+
tx.Commit()
+
+
rp.notifier.DeleteIssue(r.Context(), issue)
// return to all issues page
-
rp.pages.HxRedirect(w, "/"+f.RepoInfo(user).FullName()+"/issues")
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
rp.pages.HxRedirect(w, "/"+ownerSlashRepo+"/issues")
}
func (rp *Issues) CloseIssue(w http.ResponseWriter, r *http.Request) {
···
return
}
-
collaborators, err := f.Collaborators(r.Context())
-
if err != nil {
-
l.Error("failed to fetch repo collaborators", "err", err)
-
}
-
isCollaborator := slices.ContainsFunc(collaborators, func(collab pages.Collaborator) bool {
-
return user.Did == collab.Did
-
})
+
roles := repoinfo.RolesInRepo{Roles: rp.enforcer.GetPermissionsInRepo(user.Did, f.Knot, f.DidSlashRepo())}
+
isRepoOwner := roles.IsOwner()
+
isCollaborator := roles.IsCollaborator()
isIssueOwner := user.Did == issue.Did
// TODO: make this more granular
-
if isIssueOwner || isCollaborator {
+
if isIssueOwner || isRepoOwner || isCollaborator {
err = db.CloseIssues(
rp.db,
-
db.FilterEq("id", issue.Id),
+
orm.FilterEq("id", issue.Id),
)
if err != nil {
l.Error("failed to close issue", "err", err)
rp.pages.Notice(w, "issue-action", "Failed to close issue. Try again later.")
return
}
+
// change the issue state (this will pass down to the notifiers)
+
issue.Open = false
// notify about the issue closure
-
rp.notifier.NewIssueClosed(r.Context(), issue)
+
rp.notifier.NewIssueState(r.Context(), syntax.DID(user.Did), issue)
-
rp.pages.HxLocation(w, fmt.Sprintf("/%s/issues/%d", f.OwnerSlashRepo(), issue.IssueId))
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
rp.pages.HxLocation(w, fmt.Sprintf("/%s/issues/%d", ownerSlashRepo, issue.IssueId))
return
} else {
l.Error("user is not permitted to close issue")
···
return
}
-
collaborators, err := f.Collaborators(r.Context())
-
if err != nil {
-
l.Error("failed to fetch repo collaborators", "err", err)
-
}
-
isCollaborator := slices.ContainsFunc(collaborators, func(collab pages.Collaborator) bool {
-
return user.Did == collab.Did
-
})
+
roles := repoinfo.RolesInRepo{Roles: rp.enforcer.GetPermissionsInRepo(user.Did, f.Knot, f.DidSlashRepo())}
+
isRepoOwner := roles.IsOwner()
+
isCollaborator := roles.IsCollaborator()
isIssueOwner := user.Did == issue.Did
-
if isCollaborator || isIssueOwner {
+
if isCollaborator || isRepoOwner || isIssueOwner {
err := db.ReopenIssues(
rp.db,
-
db.FilterEq("id", issue.Id),
+
orm.FilterEq("id", issue.Id),
)
if err != nil {
l.Error("failed to reopen issue", "err", err)
rp.pages.Notice(w, "issue-action", "Failed to reopen issue. Try again later.")
return
}
-
rp.pages.HxLocation(w, fmt.Sprintf("/%s/issues/%d", f.OwnerSlashRepo(), issue.IssueId))
+
// change the issue state (this will pass down to the notifiers)
+
issue.Open = true
+
+
// notify about the issue reopen
+
rp.notifier.NewIssueState(r.Context(), syntax.DID(user.Did), issue)
+
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
rp.pages.HxLocation(w, fmt.Sprintf("/%s/issues/%d", ownerSlashRepo, issue.IssueId))
return
} else {
l.Error("user is not the owner of the repo")
···
replyTo = &replyToUri
}
+
mentions, references := rp.mentionsResolver.Resolve(r.Context(), body)
+
comment := models.IssueComment{
-
Did: user.Did,
-
Rkey: tid.TID(),
-
IssueAt: issue.AtUri().String(),
-
ReplyTo: replyTo,
-
Body: body,
-
Created: time.Now(),
+
Did: user.Did,
+
Rkey: tid.TID(),
+
IssueAt: issue.AtUri().String(),
+
ReplyTo: replyTo,
+
Body: body,
+
Created: time.Now(),
+
Mentions: mentions,
+
References: references,
}
if err = rp.validator.ValidateIssueComment(&comment); err != nil {
l.Error("failed to validate comment", "err", err)
···
}
}()
-
commentId, err := db.AddIssueComment(rp.db, comment)
+
tx, err := rp.db.Begin()
+
if err != nil {
+
l.Error("failed to start transaction", "err", err)
+
rp.pages.Notice(w, "issue-comment", "Failed to create comment, try again later.")
+
return
+
}
+
defer tx.Rollback()
+
+
commentId, err := db.AddIssueComment(tx, comment)
if err != nil {
l.Error("failed to create comment", "err", err)
rp.pages.Notice(w, "issue-comment", "Failed to create comment.")
return
}
+
err = tx.Commit()
+
if err != nil {
+
l.Error("failed to commit transaction", "err", err)
+
rp.pages.Notice(w, "issue-comment", "Failed to create comment, try again later.")
+
return
+
}
// reset atUri to make rollback a no-op
atUri = ""
// notify about the new comment
comment.Id = commentId
-
rp.notifier.NewIssueComment(r.Context(), &comment)
+
+
rp.notifier.NewIssueComment(r.Context(), &comment, mentions)
-
rp.pages.HxLocation(w, fmt.Sprintf("/%s/issues/%d#comment-%d", f.OwnerSlashRepo(), issue.IssueId, commentId))
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
rp.pages.HxLocation(w, fmt.Sprintf("/%s/issues/%d#comment-%d", ownerSlashRepo, issue.IssueId, commentId))
}
func (rp *Issues) IssueComment(w http.ResponseWriter, r *http.Request) {
l := rp.logger.With("handler", "IssueComment")
user := rp.oauth.GetUser(r)
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
issue, ok := r.Context().Value("issue").(*models.Issue)
if !ok {
···
commentId := chi.URLParam(r, "commentId")
comments, err := db.GetIssueComments(
rp.db,
-
db.FilterEq("id", commentId),
+
orm.FilterEq("id", commentId),
)
if err != nil {
l.Error("failed to fetch comment", "id", commentId)
···
rp.pages.IssueCommentBodyFragment(w, pages.IssueCommentBodyParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Issue: issue,
Comment: &comment,
})
···
func (rp *Issues) EditIssueComment(w http.ResponseWriter, r *http.Request) {
l := rp.logger.With("handler", "EditIssueComment")
user := rp.oauth.GetUser(r)
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
issue, ok := r.Context().Value("issue").(*models.Issue)
if !ok {
···
commentId := chi.URLParam(r, "commentId")
comments, err := db.GetIssueComments(
rp.db,
-
db.FilterEq("id", commentId),
+
orm.FilterEq("id", commentId),
)
if err != nil {
l.Error("failed to fetch comment", "id", commentId)
···
case http.MethodGet:
rp.pages.EditIssueCommentFragment(w, pages.EditIssueCommentParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Issue: issue,
Comment: &comment,
})
···
newComment := comment
newComment.Body = newBody
newComment.Edited = &now
+
newComment.Mentions, newComment.References = rp.mentionsResolver.Resolve(r.Context(), newBody)
+
record := newComment.AsRecord()
-
_, err = db.AddIssueComment(rp.db, newComment)
+
tx, err := rp.db.Begin()
+
if err != nil {
+
l.Error("failed to start transaction", "err", err)
+
rp.pages.Notice(w, "repo-notice", "Failed to update description, try again later.")
+
return
+
}
+
defer tx.Rollback()
+
+
_, err = db.AddIssueComment(tx, newComment)
if err != nil {
l.Error("failed to perferom update-description query", "err", err)
rp.pages.Notice(w, "repo-notice", "Failed to update description, try again later.")
return
}
+
tx.Commit()
// rkey is optional, it was introduced later
if newComment.Rkey != "" {
···
// return new comment body with htmx
rp.pages.IssueCommentBodyFragment(w, pages.IssueCommentBodyParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Issue: issue,
Comment: &newComment,
})
···
func (rp *Issues) ReplyIssueCommentPlaceholder(w http.ResponseWriter, r *http.Request) {
l := rp.logger.With("handler", "ReplyIssueCommentPlaceholder")
user := rp.oauth.GetUser(r)
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
issue, ok := r.Context().Value("issue").(*models.Issue)
if !ok {
···
commentId := chi.URLParam(r, "commentId")
comments, err := db.GetIssueComments(
rp.db,
-
db.FilterEq("id", commentId),
+
orm.FilterEq("id", commentId),
)
if err != nil {
l.Error("failed to fetch comment", "id", commentId)
···
rp.pages.ReplyIssueCommentPlaceholderFragment(w, pages.ReplyIssueCommentPlaceholderParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Issue: issue,
Comment: &comment,
})
···
func (rp *Issues) ReplyIssueComment(w http.ResponseWriter, r *http.Request) {
l := rp.logger.With("handler", "ReplyIssueComment")
user := rp.oauth.GetUser(r)
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
issue, ok := r.Context().Value("issue").(*models.Issue)
if !ok {
···
commentId := chi.URLParam(r, "commentId")
comments, err := db.GetIssueComments(
rp.db,
-
db.FilterEq("id", commentId),
+
orm.FilterEq("id", commentId),
)
if err != nil {
l.Error("failed to fetch comment", "id", commentId)
···
rp.pages.ReplyIssueCommentFragment(w, pages.ReplyIssueCommentParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Issue: issue,
Comment: &comment,
})
···
func (rp *Issues) DeleteIssueComment(w http.ResponseWriter, r *http.Request) {
l := rp.logger.With("handler", "DeleteIssueComment")
user := rp.oauth.GetUser(r)
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
issue, ok := r.Context().Value("issue").(*models.Issue)
if !ok {
···
commentId := chi.URLParam(r, "commentId")
comments, err := db.GetIssueComments(
rp.db,
-
db.FilterEq("id", commentId),
+
orm.FilterEq("id", commentId),
)
if err != nil {
l.Error("failed to fetch comment", "id", commentId)
···
// optimistic deletion
deleted := time.Now()
-
err = db.DeleteIssueComments(rp.db, db.FilterEq("id", comment.Id))
+
err = db.DeleteIssueComments(rp.db, orm.FilterEq("id", comment.Id))
if err != nil {
l.Error("failed to delete comment", "err", err)
rp.pages.Notice(w, fmt.Sprintf("comment-%s-status", commentId), "failed to delete comment")
···
// htmx fragment of comment after deletion
rp.pages.IssueCommentBodyFragment(w, pages.IssueCommentBodyParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Issue: issue,
Comment: &comment,
})
···
isOpen = true
}
-
page, ok := r.Context().Value("page").(pagination.Page)
-
if !ok {
-
l.Error("failed to get page")
-
page = pagination.FirstPage()
-
}
+
page := pagination.FromContext(r.Context())
user := rp.oauth.GetUser(r)
f, err := rp.repoResolver.Resolve(r)
···
return
}
-
openVal := 0
+
totalIssues := 0
if isOpen {
-
openVal = 1
+
totalIssues = f.RepoStats.IssueCount.Open
+
} else {
+
totalIssues = f.RepoStats.IssueCount.Closed
+
}
+
+
keyword := params.Get("q")
+
+
var issues []models.Issue
+
searchOpts := models.IssueSearchOptions{
+
Keyword: keyword,
+
RepoAt: f.RepoAt().String(),
+
IsOpen: isOpen,
+
Page: page,
}
-
issues, err := db.GetIssuesPaginated(
-
rp.db,
-
page,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterEq("open", openVal),
-
)
-
if err != nil {
-
l.Error("failed to get issues", "err", err)
-
rp.pages.Notice(w, "issues", "Failed to load issues. Try again later.")
-
return
+
if keyword != "" {
+
res, err := rp.indexer.Search(r.Context(), searchOpts)
+
if err != nil {
+
l.Error("failed to search for issues", "err", err)
+
return
+
}
+
l.Debug("searched issues with indexer", "count", len(res.Hits))
+
totalIssues = int(res.Total)
+
+
issues, err = db.GetIssues(
+
rp.db,
+
orm.FilterIn("id", res.Hits),
+
)
+
if err != nil {
+
l.Error("failed to get issues", "err", err)
+
rp.pages.Notice(w, "issues", "Failed to load issues. Try again later.")
+
return
+
}
+
+
} else {
+
openInt := 0
+
if isOpen {
+
openInt = 1
+
}
+
issues, err = db.GetIssuesPaginated(
+
rp.db,
+
page,
+
orm.FilterEq("repo_at", f.RepoAt()),
+
orm.FilterEq("open", openInt),
+
)
+
if err != nil {
+
l.Error("failed to get issues", "err", err)
+
rp.pages.Notice(w, "issues", "Failed to load issues. Try again later.")
+
return
+
}
}
labelDefs, err := db.GetLabelDefinitions(
rp.db,
-
db.FilterIn("at_uri", f.Repo.Labels),
-
db.FilterContains("scope", tangled.RepoIssueNSID),
+
orm.FilterIn("at_uri", f.Labels),
+
orm.FilterContains("scope", tangled.RepoIssueNSID),
)
if err != nil {
l.Error("failed to fetch labels", "err", err)
···
rp.pages.RepoIssues(w, pages.RepoIssuesParams{
LoggedInUser: rp.oauth.GetUser(r),
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Issues: issues,
+
IssueCount: totalIssues,
LabelDefs: defs,
FilteringByOpen: isOpen,
+
FilterQuery: keyword,
Page: page,
})
}
···
case http.MethodGet:
rp.pages.RepoNewIssue(w, pages.RepoNewIssueParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
})
case http.MethodPost:
+
body := r.FormValue("body")
+
mentions, references := rp.mentionsResolver.Resolve(r.Context(), body)
+
issue := &models.Issue{
-
RepoAt: f.RepoAt(),
-
Rkey: tid.TID(),
-
Title: r.FormValue("title"),
-
Body: r.FormValue("body"),
-
Did: user.Did,
-
Created: time.Now(),
-
Repo: &f.Repo,
+
RepoAt: f.RepoAt(),
+
Rkey: tid.TID(),
+
Title: r.FormValue("title"),
+
Body: body,
+
Open: true,
+
Did: user.Did,
+
Created: time.Now(),
+
Mentions: mentions,
+
References: references,
+
Repo: f,
}
if err := rp.validator.ValidateIssue(issue); err != nil {
···
// everything is successful, do not rollback the atproto record
atUri = ""
-
rp.notifier.NewIssue(r.Context(), issue)
-
rp.pages.HxLocation(w, fmt.Sprintf("/%s/issues/%d", f.OwnerSlashRepo(), issue.IssueId))
+
+
rp.notifier.NewIssue(r.Context(), issue, mentions)
+
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
rp.pages.HxLocation(w, fmt.Sprintf("/%s/issues/%d", ownerSlashRepo, issue.IssueId))
return
}
}
+8 -8
appview/issues/opengraph.go
···
var statusBgColor color.RGBA
if issue.Open {
-
statusIcon = "static/icons/circle-dot.svg"
+
statusIcon = "circle-dot"
statusText = "open"
statusBgColor = color.RGBA{34, 139, 34, 255} // green
} else {
-
statusIcon = "static/icons/circle-dot.svg"
+
statusIcon = "ban"
statusText = "closed"
statusBgColor = color.RGBA{52, 58, 64, 255} // dark gray
}
···
badgeIconSize := 36
// Draw icon with status color (no background)
-
err = statusCommentsArea.DrawSVGIcon(statusIcon, statsX, statsY+iconBaselineOffset-badgeIconSize/2+5, badgeIconSize, statusBgColor)
+
err = statusCommentsArea.DrawLucideIcon(statusIcon, statsX, statsY+iconBaselineOffset-badgeIconSize/2+5, badgeIconSize, statusBgColor)
if err != nil {
log.Printf("failed to draw status icon: %v", err)
}
···
currentX := statsX + badgeIconSize + 12 + statusTextWidth + 50
// Draw comment count
-
err = statusCommentsArea.DrawSVGIcon("static/icons/message-square.svg", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
+
err = statusCommentsArea.DrawLucideIcon("message-square", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
if err != nil {
log.Printf("failed to draw comment icon: %v", err)
}
···
dollyX := dollyBounds.Min.X + (dollyBounds.Dx() / 2) - (dollySize / 2)
dollyY := statsY + iconBaselineOffset - dollySize/2 + 25
dollyColor := color.RGBA{180, 180, 180, 255} // light gray
-
err = dollyArea.DrawSVGIcon("templates/fragments/dolly/silhouette.svg", dollyX, dollyY, dollySize, dollyColor)
+
err = dollyArea.DrawDollySilhouette(dollyX, dollyY, dollySize, dollyColor)
if err != nil {
log.Printf("dolly silhouette not available (this is ok): %v", err)
}
···
// Get owner handle for avatar
var ownerHandle string
-
owner, err := rp.idResolver.ResolveIdent(r.Context(), f.Repo.Did)
+
owner, err := rp.idResolver.ResolveIdent(r.Context(), f.Did)
if err != nil {
-
ownerHandle = f.Repo.Did
+
ownerHandle = f.Did
} else {
ownerHandle = "@" + owner.Handle.String()
}
-
card, err := rp.drawIssueSummaryCard(issue, &f.Repo, commentCount, ownerHandle)
+
card, err := rp.drawIssueSummaryCard(issue, f, commentCount, ownerHandle)
if err != nil {
log.Println("failed to draw issue summary card", err)
http.Error(w, "failed to draw issue summary card", http.StatusInternalServerError)
+46 -19
appview/knots/knots.go
···
"log/slog"
"net/http"
"slices"
+
"strings"
"time"
"github.com/go-chi/chi/v5"
···
"tangled.org/core/appview/xrpcclient"
"tangled.org/core/eventconsumer"
"tangled.org/core/idresolver"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
"tangled.org/core/tid"
···
Knotstream *eventconsumer.Consumer
}
+
type tab = map[string]any
+
+
var (
+
knotsTabs []tab = []tab{
+
{"Name": "profile", "Icon": "user"},
+
{"Name": "keys", "Icon": "key"},
+
{"Name": "emails", "Icon": "mail"},
+
{"Name": "notifications", "Icon": "bell"},
+
{"Name": "knots", "Icon": "volleyball"},
+
{"Name": "spindles", "Icon": "spool"},
+
}
+
)
+
func (k *Knots) Router() http.Handler {
r := chi.NewRouter()
···
user := k.OAuth.GetUser(r)
registrations, err := db.GetRegistrations(
k.Db,
-
db.FilterEq("did", user.Did),
+
orm.FilterEq("did", user.Did),
)
if err != nil {
k.Logger.Error("failed to fetch knot registrations", "err", err)
···
k.Pages.Knots(w, pages.KnotsParams{
LoggedInUser: user,
Registrations: registrations,
+
Tabs: knotsTabs,
+
Tab: "knots",
})
}
···
registrations, err := db.GetRegistrations(
k.Db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("domain", domain),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("domain", domain),
)
if err != nil {
l.Error("failed to get registrations", "err", err)
···
repos, err := db.GetRepos(
k.Db,
0,
-
db.FilterEq("knot", domain),
+
orm.FilterEq("knot", domain),
)
if err != nil {
l.Error("failed to get knot repos", "err", err)
···
Members: members,
Repos: repoMap,
IsOwner: true,
+
Tabs: knotsTabs,
+
Tab: "knots",
})
}
···
}
domain := r.FormValue("domain")
+
// Strip protocol, trailing slashes, and whitespace
+
// Rkey cannot contain slashes
+
domain = strings.TrimSpace(domain)
+
domain = strings.TrimPrefix(domain, "https://")
+
domain = strings.TrimPrefix(domain, "http://")
+
domain = strings.TrimSuffix(domain, "/")
if domain == "" {
k.Pages.Notice(w, noticeId, "Incomplete form.")
return
···
// get record from db first
registrations, err := db.GetRegistrations(
k.Db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("domain", domain),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("domain", domain),
)
if err != nil {
l.Error("failed to get registration", "err", err)
···
err = db.DeleteKnot(
tx,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("domain", domain),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("domain", domain),
)
if err != nil {
l.Error("failed to delete registration", "err", err)
···
// get record from db first
registrations, err := db.GetRegistrations(
k.Db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("domain", domain),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("domain", domain),
)
if err != nil {
l.Error("failed to get registration", "err", err)
···
// Get updated registration to show
registrations, err = db.GetRegistrations(
k.Db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("domain", domain),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("domain", domain),
)
if err != nil {
l.Error("failed to get registration", "err", err)
···
registrations, err := db.GetRegistrations(
k.Db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("domain", domain),
-
db.FilterIsNot("registered", "null"),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("domain", domain),
+
orm.FilterIsNot("registered", "null"),
)
if err != nil {
l.Error("failed to get registration", "err", err)
···
}
member := r.FormValue("member")
+
member = strings.TrimPrefix(member, "@")
if member == "" {
l.Error("empty member")
k.Pages.Notice(w, noticeId, "Failed to add member, empty form.")
···
}
// success
-
k.Pages.HxRedirect(w, fmt.Sprintf("/knots/%s", domain))
+
k.Pages.HxRedirect(w, fmt.Sprintf("/settings/knots/%s", domain))
}
func (k *Knots) removeMember(w http.ResponseWriter, r *http.Request) {
···
registrations, err := db.GetRegistrations(
k.Db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("domain", domain),
-
db.FilterIsNot("registered", "null"),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("domain", domain),
+
orm.FilterIsNot("registered", "null"),
)
if err != nil {
l.Error("failed to get registration", "err", err)
···
}
member := r.FormValue("member")
+
member = strings.TrimPrefix(member, "@")
if member == "" {
l.Error("empty member")
k.Pages.Notice(w, noticeId, "Failed to remove member, empty form.")
+6 -5
appview/labels/labels.go
···
"tangled.org/core/appview/oauth"
"tangled.org/core/appview/pages"
"tangled.org/core/appview/validator"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
"tangled.org/core/tid"
···
}
}
-
func (l *Labels) Router(mw *middleware.Middleware) http.Handler {
+
func (l *Labels) Router() http.Handler {
r := chi.NewRouter()
r.Use(middleware.AuthMiddleware(l.oauth))
···
repoAt := r.Form.Get("repo")
subjectUri := r.Form.Get("subject")
-
repo, err := db.GetRepo(l.db, db.FilterEq("at_uri", repoAt))
+
repo, err := db.GetRepo(l.db, orm.FilterEq("at_uri", repoAt))
if err != nil {
fail("Failed to get repository.", err)
return
}
// find all the labels that this repo subscribes to
-
repoLabels, err := db.GetRepoLabels(l.db, db.FilterEq("repo_at", repoAt))
+
repoLabels, err := db.GetRepoLabels(l.db, orm.FilterEq("repo_at", repoAt))
if err != nil {
fail("Failed to get labels for this repository.", err)
return
···
labelAts = append(labelAts, rl.LabelAt.String())
}
-
actx, err := db.NewLabelApplicationCtx(l.db, db.FilterIn("at_uri", labelAts))
+
actx, err := db.NewLabelApplicationCtx(l.db, orm.FilterIn("at_uri", labelAts))
if err != nil {
fail("Invalid form data.", err)
return
}
// calculate the start state by applying already known labels
-
existingOps, err := db.GetLabelOps(l.db, db.FilterEq("subject", subjectUri))
+
existingOps, err := db.GetLabelOps(l.db, orm.FilterEq("subject", subjectUri))
if err != nil {
fail("Invalid form data.", err)
return
+67
appview/mentions/resolver.go
···
+
package mentions
+
+
import (
+
"context"
+
"log/slog"
+
+
"github.com/bluesky-social/indigo/atproto/syntax"
+
"tangled.org/core/appview/config"
+
"tangled.org/core/appview/db"
+
"tangled.org/core/appview/models"
+
"tangled.org/core/appview/pages/markup"
+
"tangled.org/core/idresolver"
+
)
+
+
type Resolver struct {
+
config *config.Config
+
idResolver *idresolver.Resolver
+
execer db.Execer
+
logger *slog.Logger
+
}
+
+
func New(
+
config *config.Config,
+
idResolver *idresolver.Resolver,
+
execer db.Execer,
+
logger *slog.Logger,
+
) *Resolver {
+
return &Resolver{
+
config,
+
idResolver,
+
execer,
+
logger,
+
}
+
}
+
+
func (r *Resolver) Resolve(ctx context.Context, source string) ([]syntax.DID, []syntax.ATURI) {
+
l := r.logger.With("method", "Resolve")
+
+
rawMentions, rawRefs := markup.FindReferences(r.config.Core.AppviewHost, source)
+
l.Debug("found possible references", "mentions", rawMentions, "refs", rawRefs)
+
+
idents := r.idResolver.ResolveIdents(ctx, rawMentions)
+
var mentions []syntax.DID
+
for _, ident := range idents {
+
if ident != nil && !ident.Handle.IsInvalidHandle() {
+
mentions = append(mentions, ident.DID)
+
}
+
}
+
l.Debug("found mentions", "mentions", mentions)
+
+
var resolvedRefs []models.ReferenceLink
+
for _, rawRef := range rawRefs {
+
ident, err := r.idResolver.ResolveIdent(ctx, rawRef.Handle)
+
if err != nil || ident == nil || ident.Handle.IsInvalidHandle() {
+
continue
+
}
+
rawRef.Handle = string(ident.DID)
+
resolvedRefs = append(resolvedRefs, rawRef)
+
}
+
aturiRefs, err := db.ValidateReferenceLinks(r.execer, resolvedRefs)
+
if err != nil {
+
l.Error("failed running query", "err", err)
+
}
+
l.Debug("found references", "refs", aturiRefs)
+
+
return mentions, aturiRefs
+
}
+16 -20
appview/middleware/middleware.go
···
"tangled.org/core/appview/pagination"
"tangled.org/core/appview/reporesolver"
"tangled.org/core/idresolver"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
)
···
}
}
-
ctx := context.WithValue(r.Context(), "page", page)
+
ctx := pagination.IntoContext(r.Context(), page)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
···
ok, err := mw.enforcer.E.Enforce(actor.Did, f.Knot, f.DidSlashRepo(), requiredPerm)
if err != nil || !ok {
// we need a logged in user
-
log.Printf("%s does not have perms of a %s in repo %s", actor.Did, requiredPerm, f.OwnerSlashRepo())
+
log.Printf("%s does not have perms of a %s in repo %s", actor.Did, requiredPerm, f.DidSlashRepo())
http.Error(w, "Forbiden", http.StatusUnauthorized)
return
}
···
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
didOrHandle := chi.URLParam(req, "user")
+
didOrHandle = strings.TrimPrefix(didOrHandle, "@")
+
if slices.Contains(excluded, didOrHandle) {
next.ServeHTTP(w, req)
return
}
-
-
didOrHandle = strings.TrimPrefix(didOrHandle, "@")
id, err := mw.idResolver.ResolveIdent(req.Context(), didOrHandle)
if err != nil {
···
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
repoName := chi.URLParam(req, "repo")
+
repoName = strings.TrimSuffix(repoName, ".git")
+
id, ok := req.Context().Value("resolvedId").(identity.Identity)
if !ok {
log.Println("malformed middleware")
···
repo, err := db.GetRepo(
mw.db,
-
db.FilterEq("did", id.DID.String()),
-
db.FilterEq("name", repoName),
+
orm.FilterEq("did", id.DID.String()),
+
orm.FilterEq("name", repoName),
)
if err != nil {
log.Println("failed to resolve repo", "err", err)
···
prId := chi.URLParam(r, "pull")
prIdInt, err := strconv.Atoi(prId)
if err != nil {
-
http.Error(w, "bad pr id", http.StatusBadRequest)
log.Println("failed to parse pr id", err)
+
mw.pages.Error404(w)
return
}
pr, err := db.GetPull(mw.db, f.RepoAt(), prIdInt)
if err != nil {
log.Println("failed to get pull and comments", err)
+
mw.pages.Error404(w)
return
}
···
issueId, err := strconv.Atoi(issueIdStr)
if err != nil {
log.Println("failed to fully resolve issue ID", err)
-
mw.pages.ErrorKnot404(w)
+
mw.pages.Error404(w)
return
}
-
issues, err := db.GetIssues(
-
mw.db,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterEq("issue_id", issueId),
-
)
+
issue, err := db.GetIssue(mw.db, f.RepoAt(), issueId)
if err != nil {
log.Println("failed to get issues", "err", err)
-
return
-
}
-
if len(issues) != 1 {
-
log.Println("got incorrect number of issues", "len(issuse)", len(issues))
+
mw.pages.Error404(w)
return
}
-
issue := issues[0]
-
ctx := context.WithValue(r.Context(), "issue", &issue)
+
ctx := context.WithValue(r.Context(), "issue", issue)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
···
return
}
-
fullName := f.OwnerHandle() + "/" + f.Name
+
fullName := reporesolver.GetBaseRepoPath(r, f)
if r.Header.Get("User-Agent") == "Go-http-client/1.1" {
if r.URL.Query().Get("go-get") == "1" {
+70 -34
appview/models/issue.go
···
)
type Issue struct {
-
Id int64
-
Did string
-
Rkey string
-
RepoAt syntax.ATURI
-
IssueId int
-
Created time.Time
-
Edited *time.Time
-
Deleted *time.Time
-
Title string
-
Body string
-
Open bool
+
Id int64
+
Did string
+
Rkey string
+
RepoAt syntax.ATURI
+
IssueId int
+
Created time.Time
+
Edited *time.Time
+
Deleted *time.Time
+
Title string
+
Body string
+
Open bool
+
Mentions []syntax.DID
+
References []syntax.ATURI
// optionally, populate this when querying for reverse mappings
// like comment counts, parent repo etc.
···
}
func (i *Issue) AsRecord() tangled.RepoIssue {
+
mentions := make([]string, len(i.Mentions))
+
for i, did := range i.Mentions {
+
mentions[i] = string(did)
+
}
+
references := make([]string, len(i.References))
+
for i, uri := range i.References {
+
references[i] = string(uri)
+
}
return tangled.RepoIssue{
-
Repo: i.RepoAt.String(),
-
Title: i.Title,
-
Body: &i.Body,
-
CreatedAt: i.Created.Format(time.RFC3339),
+
Repo: i.RepoAt.String(),
+
Title: i.Title,
+
Body: &i.Body,
+
Mentions: mentions,
+
References: references,
+
CreatedAt: i.Created.Format(time.RFC3339),
}
}
···
}
type IssueComment struct {
-
Id int64
-
Did string
-
Rkey string
-
IssueAt string
-
ReplyTo *string
-
Body string
-
Created time.Time
-
Edited *time.Time
-
Deleted *time.Time
+
Id int64
+
Did string
+
Rkey string
+
IssueAt string
+
ReplyTo *string
+
Body string
+
Created time.Time
+
Edited *time.Time
+
Deleted *time.Time
+
Mentions []syntax.DID
+
References []syntax.ATURI
}
func (i *IssueComment) AtUri() syntax.ATURI {
···
}
func (i *IssueComment) AsRecord() tangled.RepoIssueComment {
+
mentions := make([]string, len(i.Mentions))
+
for i, did := range i.Mentions {
+
mentions[i] = string(did)
+
}
+
references := make([]string, len(i.References))
+
for i, uri := range i.References {
+
references[i] = string(uri)
+
}
return tangled.RepoIssueComment{
-
Body: i.Body,
-
Issue: i.IssueAt,
-
CreatedAt: i.Created.Format(time.RFC3339),
-
ReplyTo: i.ReplyTo,
+
Body: i.Body,
+
Issue: i.IssueAt,
+
CreatedAt: i.Created.Format(time.RFC3339),
+
ReplyTo: i.ReplyTo,
+
Mentions: mentions,
+
References: references,
}
}
···
return nil, err
}
+
i := record
+
mentions := make([]syntax.DID, len(record.Mentions))
+
for i, did := range record.Mentions {
+
mentions[i] = syntax.DID(did)
+
}
+
references := make([]syntax.ATURI, len(record.References))
+
for i, uri := range i.References {
+
references[i] = syntax.ATURI(uri)
+
}
+
comment := IssueComment{
-
Did: ownerDid,
-
Rkey: rkey,
-
Body: record.Body,
-
IssueAt: record.Issue,
-
ReplyTo: record.ReplyTo,
-
Created: created,
+
Did: ownerDid,
+
Rkey: rkey,
+
Body: record.Body,
+
IssueAt: record.Issue,
+
ReplyTo: record.ReplyTo,
+
Created: created,
+
Mentions: mentions,
+
References: references,
}
return &comment, nil
+25 -43
appview/models/label.go
···
"github.com/bluesky-social/indigo/atproto/syntax"
"github.com/bluesky-social/indigo/xrpc"
"tangled.org/core/api/tangled"
-
"tangled.org/core/consts"
"tangled.org/core/idresolver"
)
···
return result
}
-
var (
-
LabelWontfix = fmt.Sprintf("at://%s/%s/%s", consts.TangledDid, tangled.LabelDefinitionNSID, "wontfix")
-
LabelDuplicate = fmt.Sprintf("at://%s/%s/%s", consts.TangledDid, tangled.LabelDefinitionNSID, "duplicate")
-
LabelAssignee = fmt.Sprintf("at://%s/%s/%s", consts.TangledDid, tangled.LabelDefinitionNSID, "assignee")
-
LabelGoodFirstIssue = fmt.Sprintf("at://%s/%s/%s", consts.TangledDid, tangled.LabelDefinitionNSID, "good-first-issue")
-
LabelDocumentation = fmt.Sprintf("at://%s/%s/%s", consts.TangledDid, tangled.LabelDefinitionNSID, "documentation")
-
)
+
func FetchLabelDefs(r *idresolver.Resolver, aturis []string) ([]LabelDefinition, error) {
+
var labelDefs []LabelDefinition
+
ctx := context.Background()
-
func DefaultLabelDefs() []string {
-
return []string{
-
LabelWontfix,
-
LabelDuplicate,
-
LabelAssignee,
-
LabelGoodFirstIssue,
-
LabelDocumentation,
-
}
-
}
+
for _, dl := range aturis {
+
atUri, err := syntax.ParseATURI(dl)
+
if err != nil {
+
return nil, fmt.Errorf("failed to parse AT-URI %s: %v", dl, err)
+
}
+
if atUri.Collection() != tangled.LabelDefinitionNSID {
+
return nil, fmt.Errorf("expected AT-URI pointing %s collection: %s", tangled.LabelDefinitionNSID, atUri)
+
}
-
func FetchDefaultDefs(r *idresolver.Resolver) ([]LabelDefinition, error) {
-
resolved, err := r.ResolveIdent(context.Background(), consts.TangledDid)
-
if err != nil {
-
return nil, fmt.Errorf("failed to resolve tangled.sh DID %s: %v", consts.TangledDid, err)
-
}
-
pdsEndpoint := resolved.PDSEndpoint()
-
if pdsEndpoint == "" {
-
return nil, fmt.Errorf("no PDS endpoint found for tangled.sh DID %s", consts.TangledDid)
-
}
-
client := &xrpc.Client{
-
Host: pdsEndpoint,
-
}
+
owner, err := r.ResolveIdent(ctx, atUri.Authority().String())
+
if err != nil {
+
return nil, fmt.Errorf("failed to resolve default label owner DID %s: %v", atUri.Authority(), err)
+
}
-
var labelDefs []LabelDefinition
+
xrpcc := xrpc.Client{
+
Host: owner.PDSEndpoint(),
+
}
-
for _, dl := range DefaultLabelDefs() {
-
atUri := syntax.ATURI(dl)
-
parsedUri, err := syntax.ParseATURI(string(atUri))
-
if err != nil {
-
return nil, fmt.Errorf("failed to parse AT-URI %s: %v", atUri, err)
-
}
record, err := atproto.RepoGetRecord(
-
context.Background(),
-
client,
+
ctx,
+
&xrpcc,
"",
-
parsedUri.Collection().String(),
-
parsedUri.Authority().String(),
-
parsedUri.RecordKey().String(),
+
atUri.Collection().String(),
+
atUri.Authority().String(),
+
atUri.RecordKey().String(),
)
if err != nil {
return nil, fmt.Errorf("failed to get record for %s: %v", atUri, err)
···
}
labelDef, err := LabelDefinitionFromRecord(
-
parsedUri.Authority().String(),
-
parsedUri.RecordKey().String(),
+
atUri.Authority().String(),
+
atUri.RecordKey().String(),
labelRecord,
)
if err != nil {
+17
appview/models/notifications.go
···
NotificationTypeFollowed NotificationType = "followed"
NotificationTypePullMerged NotificationType = "pull_merged"
NotificationTypeIssueClosed NotificationType = "issue_closed"
+
NotificationTypeIssueReopen NotificationType = "issue_reopen"
NotificationTypePullClosed NotificationType = "pull_closed"
+
NotificationTypePullReopen NotificationType = "pull_reopen"
+
NotificationTypeUserMentioned NotificationType = "user_mentioned"
)
type Notification struct {
···
return "message-square"
case NotificationTypeIssueClosed:
return "ban"
+
case NotificationTypeIssueReopen:
+
return "circle-dot"
case NotificationTypePullCreated:
return "git-pull-request-create"
case NotificationTypePullCommented:
···
return "git-merge"
case NotificationTypePullClosed:
return "git-pull-request-closed"
+
case NotificationTypePullReopen:
+
return "git-pull-request-create"
case NotificationTypeFollowed:
return "user-plus"
+
case NotificationTypeUserMentioned:
+
return "at-sign"
default:
return ""
}
···
PullCreated bool
PullCommented bool
Followed bool
+
UserMentioned bool
PullMerged bool
IssueClosed bool
EmailNotifications bool
···
return prefs.IssueCommented
case NotificationTypeIssueClosed:
return prefs.IssueClosed
+
case NotificationTypeIssueReopen:
+
return prefs.IssueCreated // smae pref for now
case NotificationTypePullCreated:
return prefs.PullCreated
case NotificationTypePullCommented:
···
return prefs.PullMerged
case NotificationTypePullClosed:
return prefs.PullMerged // same pref for now
+
case NotificationTypePullReopen:
+
return prefs.PullCreated // same pref for now
case NotificationTypeFollowed:
return prefs.Followed
+
case NotificationTypeUserMentioned:
+
return prefs.UserMentioned
default:
return false
}
···
PullCreated: true,
PullCommented: true,
Followed: true,
+
UserMentioned: true,
PullMerged: true,
IssueClosed: true,
EmailNotifications: false,
+4 -1
appview/models/profile.go
···
Links [5]string
Stats [2]VanityStat
PinnedRepos [6]syntax.ATURI
+
Pronouns string
}
func (p Profile) IsLinksEmpty() bool {
···
}
type ByMonth struct {
+
Commits int
RepoEvents []RepoEvent
IssueEvents IssueEvents
PullEvents PullEvents
···
func (b ByMonth) IsEmpty() bool {
return len(b.RepoEvents) == 0 &&
len(b.IssueEvents.Items) == 0 &&
-
len(b.PullEvents.Items) == 0
+
len(b.PullEvents.Items) == 0 &&
+
b.Commits == 0
}
type IssueEvents struct {
+42 -4
appview/models/pull.go
···
TargetBranch string
State PullState
Submissions []*PullSubmission
+
Mentions []syntax.DID
+
References []syntax.ATURI
// stacking
StackId string // nullable string
···
source.Repo = &s
}
}
+
mentions := make([]string, len(p.Mentions))
+
for i, did := range p.Mentions {
+
mentions[i] = string(did)
+
}
+
references := make([]string, len(p.References))
+
for i, uri := range p.References {
+
references[i] = string(uri)
+
}
record := tangled.RepoPull{
-
Title: p.Title,
-
Body: &p.Body,
-
CreatedAt: p.Created.Format(time.RFC3339),
+
Title: p.Title,
+
Body: &p.Body,
+
Mentions: mentions,
+
References: references,
+
CreatedAt: p.Created.Format(time.RFC3339),
Target: &tangled.RepoPull_Target{
Repo: p.RepoAt.String(),
Branch: p.TargetBranch,
···
Body string
// meta
+
Mentions []syntax.DID
+
References []syntax.ATURI
+
+
// meta
Created time.Time
}
+
func (p *PullComment) AtUri() syntax.ATURI {
+
return syntax.ATURI(p.CommentAt)
+
}
+
+
// func (p *PullComment) AsRecord() tangled.RepoPullComment {
+
// mentions := make([]string, len(p.Mentions))
+
// for i, did := range p.Mentions {
+
// mentions[i] = string(did)
+
// }
+
// references := make([]string, len(p.References))
+
// for i, uri := range p.References {
+
// references[i] = string(uri)
+
// }
+
// return tangled.RepoPullComment{
+
// Pull: p.PullAt,
+
// Body: p.Body,
+
// Mentions: mentions,
+
// References: references,
+
// CreatedAt: p.Created.Format(time.RFC3339),
+
// }
+
// }
+
func (p *Pull) LastRoundNumber() int {
return len(p.Submissions) - 1
}
···
return p.LatestSubmission().SourceRev
}
-
func (p *Pull) PullAt() syntax.ATURI {
+
func (p *Pull) AtUri() syntax.ATURI {
return syntax.ATURI(fmt.Sprintf("at://%s/%s/%s", p.OwnerDid, tangled.RepoPullNSID, p.Rkey))
}
+49
appview/models/reference.go
···
+
package models
+
+
import "fmt"
+
+
type RefKind int
+
+
const (
+
RefKindIssue RefKind = iota
+
RefKindPull
+
)
+
+
func (k RefKind) String() string {
+
if k == RefKindIssue {
+
return "issues"
+
} else {
+
return "pulls"
+
}
+
}
+
+
// /@alice.com/cool-proj/issues/123
+
// /@alice.com/cool-proj/issues/123#comment-321
+
type ReferenceLink struct {
+
Handle string
+
Repo string
+
Kind RefKind
+
SubjectId int
+
CommentId *int
+
}
+
+
func (l ReferenceLink) String() string {
+
comment := ""
+
if l.CommentId != nil {
+
comment = fmt.Sprintf("#comment-%d", *l.CommentId)
+
}
+
return fmt.Sprintf("/%s/%s/%s/%d%s",
+
l.Handle,
+
l.Repo,
+
l.Kind.String(),
+
l.SubjectId,
+
comment,
+
)
+
}
+
+
type RichReferenceLink struct {
+
ReferenceLink
+
Title string
+
// reusing PullState for both issue & PR
+
State PullState
+
}
+61 -1
appview/models/repo.go
···
import (
"fmt"
+
"strings"
"time"
"github.com/bluesky-social/indigo/atproto/syntax"
···
Rkey string
Created time.Time
Description string
+
Website string
+
Topics []string
Spindle string
Labels []string
···
}
func (r *Repo) AsRecord() tangled.Repo {
-
var source, spindle, description *string
+
var source, spindle, description, website *string
if r.Source != "" {
source = &r.Source
···
description = &r.Description
}
+
if r.Website != "" {
+
website = &r.Website
+
}
+
return tangled.Repo{
Knot: r.Knot,
Name: r.Name,
Description: description,
+
Website: website,
+
Topics: r.Topics,
CreatedAt: r.Created.Format(time.RFC3339),
Source: source,
Spindle: spindle,
···
func (r Repo) DidSlashRepo() string {
p, _ := securejoin.SecureJoin(r.Did, r.Name)
return p
+
}
+
+
func (r Repo) TopicStr() string {
+
return strings.Join(r.Topics, " ")
}
type RepoStats struct {
···
Repo *Repo
Issues []Issue
}
+
+
type BlobContentType int
+
+
const (
+
BlobContentTypeCode BlobContentType = iota
+
BlobContentTypeMarkup
+
BlobContentTypeImage
+
BlobContentTypeSvg
+
BlobContentTypeVideo
+
BlobContentTypeSubmodule
+
)
+
+
func (ty BlobContentType) IsCode() bool { return ty == BlobContentTypeCode }
+
func (ty BlobContentType) IsMarkup() bool { return ty == BlobContentTypeMarkup }
+
func (ty BlobContentType) IsImage() bool { return ty == BlobContentTypeImage }
+
func (ty BlobContentType) IsSvg() bool { return ty == BlobContentTypeSvg }
+
func (ty BlobContentType) IsVideo() bool { return ty == BlobContentTypeVideo }
+
func (ty BlobContentType) IsSubmodule() bool { return ty == BlobContentTypeSubmodule }
+
+
type BlobView struct {
+
HasTextView bool // can show as code/text
+
HasRenderedView bool // can show rendered (markup/image/video/submodule)
+
HasRawView bool // can download raw (everything except submodule)
+
+
// current display mode
+
ShowingRendered bool // currently in rendered mode
+
ShowingText bool // currently in text/code mode
+
+
// content type flags
+
ContentType BlobContentType
+
+
// Content data
+
Contents string
+
ContentSrc string // URL for media files
+
Lines int
+
SizeHint uint64
+
}
+
+
// if both views are available, then show a toggle between them
+
func (b BlobView) ShowToggle() bool {
+
return b.HasTextView && b.HasRenderedView
+
}
+
+
func (b BlobView) IsUnsupported() bool {
+
// no view available, only raw
+
return !(b.HasRenderedView || b.HasTextView)
+
}
+31
appview/models/search.go
···
+
package models
+
+
import "tangled.org/core/appview/pagination"
+
+
type IssueSearchOptions struct {
+
Keyword string
+
RepoAt string
+
IsOpen bool
+
+
Page pagination.Page
+
}
+
+
type PullSearchOptions struct {
+
Keyword string
+
RepoAt string
+
State PullState
+
+
Page pagination.Page
+
}
+
+
// func (so *SearchOptions) ToFilters() []filter {
+
// var filters []filter
+
// if so.IsOpen != nil {
+
// openValue := 0
+
// if *so.IsOpen {
+
// openValue = 1
+
// }
+
// filters = append(filters, FilterEq("open", openValue))
+
// }
+
// return filters
+
// }
+14 -5
appview/models/star.go
···
)
type Star struct {
-
StarredByDid string
-
RepoAt syntax.ATURI
-
Created time.Time
-
Rkey string
+
Did string
+
RepoAt syntax.ATURI
+
Created time.Time
+
Rkey string
+
}
-
// optionally, populate this when querying for reverse mappings
+
// RepoStar is used for reverse mapping to repos
+
type RepoStar struct {
+
Star
Repo *Repo
}
+
+
// StringStar is used for reverse mapping to strings
+
type StringStar struct {
+
Star
+
String *String
+
}
+1 -1
appview/models/string.go
···
Edited *time.Time
}
-
func (s *String) StringAt() syntax.ATURI {
+
func (s *String) AtUri() syntax.ATURI {
return syntax.ATURI(fmt.Sprintf("at://%s/%s/%s", s.Did, tangled.StringNSID, s.Rkey))
}
+1 -1
appview/models/timeline.go
···
type TimelineEvent struct {
*Repo
*Follow
-
*Star
+
*RepoStar
EventAt time.Time
+6 -9
appview/notifications/notifications.go
···
"tangled.org/core/appview/oauth"
"tangled.org/core/appview/pages"
"tangled.org/core/appview/pagination"
+
"tangled.org/core/orm"
)
type Notifications struct {
···
l := n.logger.With("handler", "notificationsPage")
user := n.oauth.GetUser(r)
-
page, ok := r.Context().Value("page").(pagination.Page)
-
if !ok {
-
l.Error("failed to get page")
-
page = pagination.FirstPage()
-
}
+
page := pagination.FromContext(r.Context())
total, err := db.CountNotifications(
n.db,
-
db.FilterEq("recipient_did", user.Did),
+
orm.FilterEq("recipient_did", user.Did),
)
if err != nil {
l.Error("failed to get total notifications", "err", err)
···
notifications, err := db.GetNotificationsWithEntities(
n.db,
page,
-
db.FilterEq("recipient_did", user.Did),
+
orm.FilterEq("recipient_did", user.Did),
)
if err != nil {
l.Error("failed to get notifications", "err", err)
···
count, err := db.CountNotifications(
n.db,
-
db.FilterEq("recipient_did", user.Did),
-
db.FilterEq("read", 0),
+
orm.FilterEq("recipient_did", user.Did),
+
orm.FilterEq("read", 0),
)
if err != nil {
http.Error(w, "Failed to get unread count", http.StatusInternalServerError)
+145 -121
appview/notify/db/db.go
···
import (
"context"
"log"
-
"maps"
"slices"
"github.com/bluesky-social/indigo/atproto/syntax"
+
"tangled.org/core/api/tangled"
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/notify"
"tangled.org/core/idresolver"
+
"tangled.org/core/orm"
+
"tangled.org/core/sets"
+
)
+
+
const (
+
maxMentions = 8
)
type databaseNotifier struct {
···
}
func (n *databaseNotifier) NewStar(ctx context.Context, star *models.Star) {
+
if star.RepoAt.Collection().String() != tangled.RepoNSID {
+
// skip string stars for now
+
return
+
}
var err error
-
repo, err := db.GetRepo(n.db, db.FilterEq("at_uri", string(star.RepoAt)))
+
repo, err := db.GetRepo(n.db, orm.FilterEq("at_uri", string(star.RepoAt)))
if err != nil {
log.Printf("NewStar: failed to get repos: %v", err)
return
}
-
actorDid := syntax.DID(star.StarredByDid)
-
recipients := []syntax.DID{syntax.DID(repo.Did)}
+
actorDid := syntax.DID(star.Did)
+
recipients := sets.Singleton(syntax.DID(repo.Did))
eventType := models.NotificationTypeRepoStarred
entityType := "repo"
entityId := star.RepoAt.String()
···
// no-op
}
-
func (n *databaseNotifier) NewIssue(ctx context.Context, issue *models.Issue) {
+
func (n *databaseNotifier) NewIssue(ctx context.Context, issue *models.Issue, mentions []syntax.DID) {
+
collaborators, err := db.GetCollaborators(n.db, orm.FilterEq("repo_at", issue.Repo.RepoAt()))
+
if err != nil {
+
log.Printf("failed to fetch collaborators: %v", err)
+
return
+
}
// build the recipients list
// - owner of the repo
// - collaborators in the repo
-
var recipients []syntax.DID
-
recipients = append(recipients, syntax.DID(issue.Repo.Did))
-
collaborators, err := db.GetCollaborators(n.db, db.FilterEq("repo_at", issue.Repo.RepoAt()))
-
if err != nil {
-
log.Printf("failed to fetch collaborators: %v", err)
-
return
+
// - remove users already mentioned
+
recipients := sets.Singleton(syntax.DID(issue.Repo.Did))
+
for _, c := range collaborators {
+
recipients.Insert(c.SubjectDid)
}
-
for _, c := range collaborators {
-
recipients = append(recipients, c.SubjectDid)
+
for _, m := range mentions {
+
recipients.Remove(m)
}
actorDid := syntax.DID(issue.Did)
-
eventType := models.NotificationTypeIssueCreated
entityType := "issue"
entityId := issue.AtUri().String()
repoId := &issue.Repo.Id
···
n.notifyEvent(
actorDid,
recipients,
-
eventType,
+
models.NotificationTypeIssueCreated,
+
entityType,
+
entityId,
+
repoId,
+
issueId,
+
pullId,
+
)
+
n.notifyEvent(
+
actorDid,
+
sets.Collect(slices.Values(mentions)),
+
models.NotificationTypeUserMentioned,
entityType,
entityId,
repoId,
···
)
}
-
func (n *databaseNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment) {
-
issues, err := db.GetIssues(n.db, db.FilterEq("at_uri", comment.IssueAt))
+
func (n *databaseNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment, mentions []syntax.DID) {
+
issues, err := db.GetIssues(n.db, orm.FilterEq("at_uri", comment.IssueAt))
if err != nil {
log.Printf("NewIssueComment: failed to get issues: %v", err)
return
···
}
issue := issues[0]
-
var recipients []syntax.DID
-
recipients = append(recipients, syntax.DID(issue.Repo.Did))
+
// built the recipients list:
+
// - the owner of the repo
+
// - | if the comment is a reply -> everybody on that thread
+
// | if the comment is a top level -> just the issue owner
+
// - remove mentioned users from the recipients list
+
recipients := sets.Singleton(syntax.DID(issue.Repo.Did))
if comment.IsReply() {
// if this comment is a reply, then notify everybody in that thread
parentAtUri := *comment.ReplyTo
-
allThreads := issue.CommentList()
// find the parent thread, and add all DIDs from here to the recipient list
-
for _, t := range allThreads {
+
for _, t := range issue.CommentList() {
if t.Self.AtUri().String() == parentAtUri {
-
recipients = append(recipients, t.Participants()...)
+
for _, p := range t.Participants() {
+
recipients.Insert(p)
+
}
}
}
} else {
// not a reply, notify just the issue author
-
recipients = append(recipients, syntax.DID(issue.Did))
+
recipients.Insert(syntax.DID(issue.Did))
+
}
+
+
for _, m := range mentions {
+
recipients.Remove(m)
}
actorDid := syntax.DID(comment.Did)
-
eventType := models.NotificationTypeIssueCommented
entityType := "issue"
entityId := issue.AtUri().String()
repoId := &issue.Repo.Id
···
n.notifyEvent(
actorDid,
recipients,
-
eventType,
+
models.NotificationTypeIssueCommented,
+
entityType,
+
entityId,
+
repoId,
+
issueId,
+
pullId,
+
)
+
n.notifyEvent(
+
actorDid,
+
sets.Collect(slices.Values(mentions)),
+
models.NotificationTypeUserMentioned,
entityType,
entityId,
repoId,
···
)
}
+
func (n *databaseNotifier) DeleteIssue(ctx context.Context, issue *models.Issue) {
+
// no-op for now
+
}
+
func (n *databaseNotifier) NewFollow(ctx context.Context, follow *models.Follow) {
actorDid := syntax.DID(follow.UserDid)
-
recipients := []syntax.DID{syntax.DID(follow.SubjectDid)}
+
recipients := sets.Singleton(syntax.DID(follow.SubjectDid))
eventType := models.NotificationTypeFollowed
entityType := "follow"
entityId := follow.UserDid
···
}
func (n *databaseNotifier) NewPull(ctx context.Context, pull *models.Pull) {
-
repo, err := db.GetRepo(n.db, db.FilterEq("at_uri", string(pull.RepoAt)))
+
repo, err := db.GetRepo(n.db, orm.FilterEq("at_uri", string(pull.RepoAt)))
if err != nil {
log.Printf("NewPull: failed to get repos: %v", err)
return
}
-
-
// build the recipients list
-
// - owner of the repo
-
// - collaborators in the repo
-
var recipients []syntax.DID
-
recipients = append(recipients, syntax.DID(repo.Did))
-
collaborators, err := db.GetCollaborators(n.db, db.FilterEq("repo_at", repo.RepoAt()))
+
collaborators, err := db.GetCollaborators(n.db, orm.FilterEq("repo_at", repo.RepoAt()))
if err != nil {
log.Printf("failed to fetch collaborators: %v", err)
return
}
+
+
// build the recipients list
+
// - owner of the repo
+
// - collaborators in the repo
+
recipients := sets.Singleton(syntax.DID(repo.Did))
for _, c := range collaborators {
-
recipients = append(recipients, c.SubjectDid)
+
recipients.Insert(c.SubjectDid)
}
actorDid := syntax.DID(pull.OwnerDid)
eventType := models.NotificationTypePullCreated
entityType := "pull"
-
entityId := pull.PullAt().String()
+
entityId := pull.AtUri().String()
repoId := &repo.Id
var issueId *int64
p := int64(pull.ID)
···
)
}
-
func (n *databaseNotifier) NewPullComment(ctx context.Context, comment *models.PullComment) {
+
func (n *databaseNotifier) NewPullComment(ctx context.Context, comment *models.PullComment, mentions []syntax.DID) {
pull, err := db.GetPull(n.db,
syntax.ATURI(comment.RepoAt),
comment.PullId,
···
return
}
-
repo, err := db.GetRepo(n.db, db.FilterEq("at_uri", comment.RepoAt))
+
repo, err := db.GetRepo(n.db, orm.FilterEq("at_uri", comment.RepoAt))
if err != nil {
log.Printf("NewPullComment: failed to get repos: %v", err)
return
···
// build up the recipients list:
// - repo owner
// - all pull participants
-
var recipients []syntax.DID
-
recipients = append(recipients, syntax.DID(repo.Did))
+
// - remove those already mentioned
+
recipients := sets.Singleton(syntax.DID(repo.Did))
for _, p := range pull.Participants() {
-
recipients = append(recipients, syntax.DID(p))
+
recipients.Insert(syntax.DID(p))
+
}
+
for _, m := range mentions {
+
recipients.Remove(m)
}
actorDid := syntax.DID(comment.OwnerDid)
eventType := models.NotificationTypePullCommented
entityType := "pull"
-
entityId := pull.PullAt().String()
+
entityId := pull.AtUri().String()
repoId := &repo.Id
var issueId *int64
p := int64(pull.ID)
···
issueId,
pullId,
)
+
n.notifyEvent(
+
actorDid,
+
sets.Collect(slices.Values(mentions)),
+
models.NotificationTypeUserMentioned,
+
entityType,
+
entityId,
+
repoId,
+
issueId,
+
pullId,
+
)
}
func (n *databaseNotifier) UpdateProfile(ctx context.Context, profile *models.Profile) {
···
// no-op
}
-
func (n *databaseNotifier) NewIssueClosed(ctx context.Context, issue *models.Issue) {
-
// build up the recipients list:
-
// - repo owner
-
// - repo collaborators
-
// - all issue participants
-
var recipients []syntax.DID
-
recipients = append(recipients, syntax.DID(issue.Repo.Did))
-
collaborators, err := db.GetCollaborators(n.db, db.FilterEq("repo_at", issue.Repo.RepoAt()))
+
func (n *databaseNotifier) NewIssueState(ctx context.Context, actor syntax.DID, issue *models.Issue) {
+
collaborators, err := db.GetCollaborators(n.db, orm.FilterEq("repo_at", issue.Repo.RepoAt()))
if err != nil {
log.Printf("failed to fetch collaborators: %v", err)
return
}
+
+
// build up the recipients list:
+
// - repo owner
+
// - repo collaborators
+
// - all issue participants
+
recipients := sets.Singleton(syntax.DID(issue.Repo.Did))
for _, c := range collaborators {
-
recipients = append(recipients, c.SubjectDid)
+
recipients.Insert(c.SubjectDid)
}
for _, p := range issue.Participants() {
-
recipients = append(recipients, syntax.DID(p))
+
recipients.Insert(syntax.DID(p))
}
-
actorDid := syntax.DID(issue.Repo.Did)
-
eventType := models.NotificationTypeIssueClosed
entityType := "pull"
entityId := issue.AtUri().String()
repoId := &issue.Repo.Id
issueId := &issue.Id
var pullId *int64
+
var eventType models.NotificationType
+
+
if issue.Open {
+
eventType = models.NotificationTypeIssueReopen
+
} else {
+
eventType = models.NotificationTypeIssueClosed
+
}
n.notifyEvent(
-
actorDid,
+
actor,
recipients,
eventType,
entityType,
···
)
}
-
func (n *databaseNotifier) NewPullMerged(ctx context.Context, pull *models.Pull) {
+
func (n *databaseNotifier) NewPullState(ctx context.Context, actor syntax.DID, pull *models.Pull) {
// Get repo details
-
repo, err := db.GetRepo(n.db, db.FilterEq("at_uri", string(pull.RepoAt)))
+
repo, err := db.GetRepo(n.db, orm.FilterEq("at_uri", string(pull.RepoAt)))
if err != nil {
-
log.Printf("NewPullMerged: failed to get repos: %v", err)
+
log.Printf("NewPullState: failed to get repos: %v", err)
return
}
-
// build up the recipients list:
-
// - repo owner
-
// - all pull participants
-
var recipients []syntax.DID
-
recipients = append(recipients, syntax.DID(repo.Did))
-
collaborators, err := db.GetCollaborators(n.db, db.FilterEq("repo_at", repo.RepoAt()))
+
collaborators, err := db.GetCollaborators(n.db, orm.FilterEq("repo_at", repo.RepoAt()))
if err != nil {
log.Printf("failed to fetch collaborators: %v", err)
return
}
-
for _, c := range collaborators {
-
recipients = append(recipients, c.SubjectDid)
-
}
-
for _, p := range pull.Participants() {
-
recipients = append(recipients, syntax.DID(p))
-
}
-
-
actorDid := syntax.DID(repo.Did)
-
eventType := models.NotificationTypePullMerged
-
entityType := "pull"
-
entityId := pull.PullAt().String()
-
repoId := &repo.Id
-
var issueId *int64
-
p := int64(pull.ID)
-
pullId := &p
-
-
n.notifyEvent(
-
actorDid,
-
recipients,
-
eventType,
-
entityType,
-
entityId,
-
repoId,
-
issueId,
-
pullId,
-
)
-
}
-
-
func (n *databaseNotifier) NewPullClosed(ctx context.Context, pull *models.Pull) {
-
// Get repo details
-
repo, err := db.GetRepo(n.db, db.FilterEq("at_uri", string(pull.RepoAt)))
-
if err != nil {
-
log.Printf("NewPullMerged: failed to get repos: %v", err)
-
return
-
}
// build up the recipients list:
// - repo owner
// - all pull participants
-
var recipients []syntax.DID
-
recipients = append(recipients, syntax.DID(repo.Did))
-
collaborators, err := db.GetCollaborators(n.db, db.FilterEq("repo_at", repo.RepoAt()))
-
if err != nil {
-
log.Printf("failed to fetch collaborators: %v", err)
-
return
-
}
+
recipients := sets.Singleton(syntax.DID(repo.Did))
for _, c := range collaborators {
-
recipients = append(recipients, c.SubjectDid)
+
recipients.Insert(c.SubjectDid)
}
for _, p := range pull.Participants() {
-
recipients = append(recipients, syntax.DID(p))
+
recipients.Insert(syntax.DID(p))
}
-
actorDid := syntax.DID(repo.Did)
-
eventType := models.NotificationTypePullClosed
entityType := "pull"
-
entityId := pull.PullAt().String()
+
entityId := pull.AtUri().String()
repoId := &repo.Id
var issueId *int64
+
var eventType models.NotificationType
+
switch pull.State {
+
case models.PullClosed:
+
eventType = models.NotificationTypePullClosed
+
case models.PullOpen:
+
eventType = models.NotificationTypePullReopen
+
case models.PullMerged:
+
eventType = models.NotificationTypePullMerged
+
default:
+
log.Println("NewPullState: unexpected new PR state:", pull.State)
+
return
+
}
p := int64(pull.ID)
pullId := &p
n.notifyEvent(
-
actorDid,
+
actor,
recipients,
eventType,
entityType,
···
func (n *databaseNotifier) notifyEvent(
actorDid syntax.DID,
-
recipients []syntax.DID,
+
recipients sets.Set[syntax.DID],
eventType models.NotificationType,
entityType string,
entityId string,
···
issueId *int64,
pullId *int64,
) {
-
recipientSet := make(map[syntax.DID]struct{})
-
for _, did := range recipients {
-
// everybody except actor themselves
-
if did != actorDid {
-
recipientSet[did] = struct{}{}
-
}
+
// if the user is attempting to mention >maxMentions users, this is probably spam, do not mention anybody
+
if eventType == models.NotificationTypeUserMentioned && recipients.Len() > maxMentions {
+
return
}
+
recipients.Remove(actorDid)
+
prefMap, err := db.GetNotificationPreferences(
n.db,
-
db.FilterIn("user_did", slices.Collect(maps.Keys(recipientSet))),
+
orm.FilterIn("user_did", slices.Collect(recipients.All())),
)
if err != nil {
// failed to get prefs for users
···
defer tx.Rollback()
// filter based on preferences
-
for recipientDid := range recipientSet {
+
for recipientDid := range recipients.All() {
prefs, ok := prefMap[recipientDid]
if !ok {
prefs = models.DefaultNotificationPreferences(recipientDid)
+57 -59
appview/notify/merged_notifier.go
···
import (
"context"
+
"log/slog"
+
"reflect"
+
"sync"
+
"github.com/bluesky-social/indigo/atproto/syntax"
"tangled.org/core/appview/models"
+
"tangled.org/core/log"
)
type mergedNotifier struct {
notifiers []Notifier
+
logger *slog.Logger
}
-
func NewMergedNotifier(notifiers ...Notifier) Notifier {
-
return &mergedNotifier{notifiers}
+
func NewMergedNotifier(notifiers []Notifier, logger *slog.Logger) Notifier {
+
return &mergedNotifier{notifiers, logger}
}
var _ Notifier = &mergedNotifier{}
-
func (m *mergedNotifier) NewRepo(ctx context.Context, repo *models.Repo) {
-
for _, notifier := range m.notifiers {
-
notifier.NewRepo(ctx, repo)
+
// fanout calls the same method on all notifiers concurrently
+
func (m *mergedNotifier) fanout(method string, ctx context.Context, args ...any) {
+
ctx = log.IntoContext(ctx, m.logger.With("method", method))
+
var wg sync.WaitGroup
+
for _, n := range m.notifiers {
+
wg.Add(1)
+
go func(notifier Notifier) {
+
defer wg.Done()
+
v := reflect.ValueOf(notifier).MethodByName(method)
+
in := make([]reflect.Value, len(args)+1)
+
in[0] = reflect.ValueOf(ctx)
+
for i, arg := range args {
+
in[i+1] = reflect.ValueOf(arg)
+
}
+
v.Call(in)
+
}(n)
}
+
wg.Wait()
+
}
+
+
func (m *mergedNotifier) NewRepo(ctx context.Context, repo *models.Repo) {
+
m.fanout("NewRepo", ctx, repo)
}
func (m *mergedNotifier) NewStar(ctx context.Context, star *models.Star) {
-
for _, notifier := range m.notifiers {
-
notifier.NewStar(ctx, star)
-
}
+
m.fanout("NewStar", ctx, star)
}
+
func (m *mergedNotifier) DeleteStar(ctx context.Context, star *models.Star) {
-
for _, notifier := range m.notifiers {
-
notifier.DeleteStar(ctx, star)
-
}
+
m.fanout("DeleteStar", ctx, star)
}
-
func (m *mergedNotifier) NewIssue(ctx context.Context, issue *models.Issue) {
-
for _, notifier := range m.notifiers {
-
notifier.NewIssue(ctx, issue)
-
}
+
func (m *mergedNotifier) NewIssue(ctx context.Context, issue *models.Issue, mentions []syntax.DID) {
+
m.fanout("NewIssue", ctx, issue, mentions)
}
-
func (m *mergedNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment) {
-
for _, notifier := range m.notifiers {
-
notifier.NewIssueComment(ctx, comment)
-
}
+
+
func (m *mergedNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment, mentions []syntax.DID) {
+
m.fanout("NewIssueComment", ctx, comment, mentions)
}
-
func (m *mergedNotifier) NewIssueClosed(ctx context.Context, issue *models.Issue) {
-
for _, notifier := range m.notifiers {
-
notifier.NewIssueClosed(ctx, issue)
-
}
+
func (m *mergedNotifier) NewIssueState(ctx context.Context, actor syntax.DID, issue *models.Issue) {
+
m.fanout("NewIssueState", ctx, actor, issue)
+
}
+
+
func (m *mergedNotifier) DeleteIssue(ctx context.Context, issue *models.Issue) {
+
m.fanout("DeleteIssue", ctx, issue)
}
func (m *mergedNotifier) NewFollow(ctx context.Context, follow *models.Follow) {
-
for _, notifier := range m.notifiers {
-
notifier.NewFollow(ctx, follow)
-
}
+
m.fanout("NewFollow", ctx, follow)
}
+
func (m *mergedNotifier) DeleteFollow(ctx context.Context, follow *models.Follow) {
-
for _, notifier := range m.notifiers {
-
notifier.DeleteFollow(ctx, follow)
-
}
+
m.fanout("DeleteFollow", ctx, follow)
}
func (m *mergedNotifier) NewPull(ctx context.Context, pull *models.Pull) {
-
for _, notifier := range m.notifiers {
-
notifier.NewPull(ctx, pull)
-
}
-
}
-
func (m *mergedNotifier) NewPullComment(ctx context.Context, comment *models.PullComment) {
-
for _, notifier := range m.notifiers {
-
notifier.NewPullComment(ctx, comment)
-
}
+
m.fanout("NewPull", ctx, pull)
}
-
func (m *mergedNotifier) NewPullMerged(ctx context.Context, pull *models.Pull) {
-
for _, notifier := range m.notifiers {
-
notifier.NewPullMerged(ctx, pull)
-
}
+
func (m *mergedNotifier) NewPullComment(ctx context.Context, comment *models.PullComment, mentions []syntax.DID) {
+
m.fanout("NewPullComment", ctx, comment, mentions)
}
-
func (m *mergedNotifier) NewPullClosed(ctx context.Context, pull *models.Pull) {
-
for _, notifier := range m.notifiers {
-
notifier.NewPullClosed(ctx, pull)
-
}
+
func (m *mergedNotifier) NewPullState(ctx context.Context, actor syntax.DID, pull *models.Pull) {
+
m.fanout("NewPullState", ctx, actor, pull)
}
func (m *mergedNotifier) UpdateProfile(ctx context.Context, profile *models.Profile) {
-
for _, notifier := range m.notifiers {
-
notifier.UpdateProfile(ctx, profile)
-
}
+
m.fanout("UpdateProfile", ctx, profile)
}
-
func (m *mergedNotifier) NewString(ctx context.Context, string *models.String) {
-
for _, notifier := range m.notifiers {
-
notifier.NewString(ctx, string)
-
}
+
func (m *mergedNotifier) NewString(ctx context.Context, s *models.String) {
+
m.fanout("NewString", ctx, s)
}
-
func (m *mergedNotifier) EditString(ctx context.Context, string *models.String) {
-
for _, notifier := range m.notifiers {
-
notifier.EditString(ctx, string)
-
}
+
func (m *mergedNotifier) EditString(ctx context.Context, s *models.String) {
+
m.fanout("EditString", ctx, s)
}
func (m *mergedNotifier) DeleteString(ctx context.Context, did, rkey string) {
-
for _, notifier := range m.notifiers {
-
notifier.DeleteString(ctx, did, rkey)
-
}
+
m.fanout("DeleteString", ctx, did, rkey)
}
+16 -13
appview/notify/notifier.go
···
import (
"context"
+
"github.com/bluesky-social/indigo/atproto/syntax"
"tangled.org/core/appview/models"
)
···
NewStar(ctx context.Context, star *models.Star)
DeleteStar(ctx context.Context, star *models.Star)
-
NewIssue(ctx context.Context, issue *models.Issue)
-
NewIssueComment(ctx context.Context, comment *models.IssueComment)
-
NewIssueClosed(ctx context.Context, issue *models.Issue)
+
NewIssue(ctx context.Context, issue *models.Issue, mentions []syntax.DID)
+
NewIssueComment(ctx context.Context, comment *models.IssueComment, mentions []syntax.DID)
+
NewIssueState(ctx context.Context, actor syntax.DID, issue *models.Issue)
+
DeleteIssue(ctx context.Context, issue *models.Issue)
NewFollow(ctx context.Context, follow *models.Follow)
DeleteFollow(ctx context.Context, follow *models.Follow)
NewPull(ctx context.Context, pull *models.Pull)
-
NewPullComment(ctx context.Context, comment *models.PullComment)
-
NewPullMerged(ctx context.Context, pull *models.Pull)
-
NewPullClosed(ctx context.Context, pull *models.Pull)
+
NewPullComment(ctx context.Context, comment *models.PullComment, mentions []syntax.DID)
+
NewPullState(ctx context.Context, actor syntax.DID, pull *models.Pull)
UpdateProfile(ctx context.Context, profile *models.Profile)
···
func (m *BaseNotifier) NewStar(ctx context.Context, star *models.Star) {}
func (m *BaseNotifier) DeleteStar(ctx context.Context, star *models.Star) {}
-
func (m *BaseNotifier) NewIssue(ctx context.Context, issue *models.Issue) {}
-
func (m *BaseNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment) {}
-
func (m *BaseNotifier) NewIssueClosed(ctx context.Context, issue *models.Issue) {}
+
func (m *BaseNotifier) NewIssue(ctx context.Context, issue *models.Issue, mentions []syntax.DID) {}
+
func (m *BaseNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment, mentions []syntax.DID) {
+
}
+
func (m *BaseNotifier) NewIssueState(ctx context.Context, actor syntax.DID, issue *models.Issue) {}
+
func (m *BaseNotifier) DeleteIssue(ctx context.Context, issue *models.Issue) {}
func (m *BaseNotifier) NewFollow(ctx context.Context, follow *models.Follow) {}
func (m *BaseNotifier) DeleteFollow(ctx context.Context, follow *models.Follow) {}
-
func (m *BaseNotifier) NewPull(ctx context.Context, pull *models.Pull) {}
-
func (m *BaseNotifier) NewPullComment(ctx context.Context, models *models.PullComment) {}
-
func (m *BaseNotifier) NewPullMerged(ctx context.Context, pull *models.Pull) {}
-
func (m *BaseNotifier) NewPullClosed(ctx context.Context, pull *models.Pull) {}
+
func (m *BaseNotifier) NewPull(ctx context.Context, pull *models.Pull) {}
+
func (m *BaseNotifier) NewPullComment(ctx context.Context, models *models.PullComment, mentions []syntax.DID) {
+
}
+
func (m *BaseNotifier) NewPullState(ctx context.Context, actor syntax.DID, pull *models.Pull) {}
func (m *BaseNotifier) UpdateProfile(ctx context.Context, profile *models.Profile) {}
+35 -11
appview/notify/posthog/notifier.go
···
"context"
"log"
+
"github.com/bluesky-social/indigo/atproto/syntax"
"github.com/posthog/posthog-go"
"tangled.org/core/appview/models"
"tangled.org/core/appview/notify"
···
func (n *posthogNotifier) NewStar(ctx context.Context, star *models.Star) {
err := n.client.Enqueue(posthog.Capture{
-
DistinctId: star.StarredByDid,
+
DistinctId: star.Did,
Event: "star",
Properties: posthog.Properties{"repo_at": star.RepoAt.String()},
})
···
func (n *posthogNotifier) DeleteStar(ctx context.Context, star *models.Star) {
err := n.client.Enqueue(posthog.Capture{
-
DistinctId: star.StarredByDid,
+
DistinctId: star.Did,
Event: "unstar",
Properties: posthog.Properties{"repo_at": star.RepoAt.String()},
})
···
}
}
-
func (n *posthogNotifier) NewIssue(ctx context.Context, issue *models.Issue) {
+
func (n *posthogNotifier) NewIssue(ctx context.Context, issue *models.Issue, mentions []syntax.DID) {
err := n.client.Enqueue(posthog.Capture{
DistinctId: issue.Did,
Event: "new_issue",
Properties: posthog.Properties{
"repo_at": issue.RepoAt.String(),
"issue_id": issue.IssueId,
+
"mentions": mentions,
},
})
if err != nil {
···
}
}
-
func (n *posthogNotifier) NewPullComment(ctx context.Context, comment *models.PullComment) {
+
func (n *posthogNotifier) NewPullComment(ctx context.Context, comment *models.PullComment, mentions []syntax.DID) {
err := n.client.Enqueue(posthog.Capture{
DistinctId: comment.OwnerDid,
Event: "new_pull_comment",
Properties: posthog.Properties{
-
"repo_at": comment.RepoAt,
-
"pull_id": comment.PullId,
+
"repo_at": comment.RepoAt,
+
"pull_id": comment.PullId,
+
"mentions": mentions,
},
})
if err != nil {
···
}
}
-
func (n *posthogNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment) {
+
func (n *posthogNotifier) NewIssueComment(ctx context.Context, comment *models.IssueComment, mentions []syntax.DID) {
err := n.client.Enqueue(posthog.Capture{
DistinctId: comment.Did,
Event: "new_issue_comment",
Properties: posthog.Properties{
"issue_at": comment.IssueAt,
+
"mentions": mentions,
},
})
if err != nil {
···
}
}
-
func (n *posthogNotifier) NewIssueClosed(ctx context.Context, issue *models.Issue) {
+
func (n *posthogNotifier) NewIssueState(ctx context.Context, actor syntax.DID, issue *models.Issue) {
+
var event string
+
if issue.Open {
+
event = "issue_reopen"
+
} else {
+
event = "issue_closed"
+
}
err := n.client.Enqueue(posthog.Capture{
DistinctId: issue.Did,
-
Event: "issue_closed",
+
Event: event,
Properties: posthog.Properties{
"repo_at": issue.RepoAt.String(),
+
"actor": actor,
"issue_id": issue.IssueId,
},
})
···
}
}
-
func (n *posthogNotifier) NewPullMerged(ctx context.Context, pull *models.Pull) {
+
func (n *posthogNotifier) NewPullState(ctx context.Context, actor syntax.DID, pull *models.Pull) {
+
var event string
+
switch pull.State {
+
case models.PullClosed:
+
event = "pull_closed"
+
case models.PullOpen:
+
event = "pull_reopen"
+
case models.PullMerged:
+
event = "pull_merged"
+
default:
+
log.Println("posthog: unexpected new PR state:", pull.State)
+
return
+
}
err := n.client.Enqueue(posthog.Capture{
DistinctId: pull.OwnerDid,
-
Event: "pull_merged",
+
Event: event,
Properties: posthog.Properties{
"repo_at": pull.RepoAt,
"pull_id": pull.PullId,
+
"actor": actor,
},
})
if err != nil {
+21 -17
appview/oauth/handler.go
···
"bytes"
"context"
"encoding/json"
+
"errors"
"fmt"
"net/http"
"slices"
"time"
+
"github.com/bluesky-social/indigo/atproto/auth/oauth"
"github.com/go-chi/chi/v5"
-
"github.com/lestrrat-go/jwx/v2/jwk"
"github.com/posthog/posthog-go"
"tangled.org/core/api/tangled"
"tangled.org/core/appview/db"
"tangled.org/core/consts"
+
"tangled.org/core/orm"
"tangled.org/core/tid"
)
···
func (o *OAuth) clientMetadata(w http.ResponseWriter, r *http.Request) {
doc := o.ClientApp.Config.ClientMetadata()
doc.JWKSURI = &o.JwksUri
+
doc.ClientName = &o.ClientName
+
doc.ClientURI = &o.ClientUri
w.Header().Set("Content-Type", "application/json")
if err := json.NewEncoder(w).Encode(doc); err != nil {
···
}
func (o *OAuth) jwks(w http.ResponseWriter, r *http.Request) {
-
jwks := o.Config.OAuth.Jwks
-
pubKey, err := pubKeyFromJwk(jwks)
-
if err != nil {
-
o.Logger.Error("error parsing public key", "err", err)
+
w.Header().Set("Content-Type", "application/json")
+
body := o.ClientApp.Config.PublicJWKS()
+
if err := json.NewEncoder(w).Encode(body); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
-
-
response := map[string]any{
-
"keys": []jwk.Key{pubKey},
-
}
-
-
w.Header().Set("Content-Type", "application/json")
-
w.WriteHeader(http.StatusOK)
-
json.NewEncoder(w).Encode(response)
}
func (o *OAuth) callback(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
+
l := o.Logger.With("query", r.URL.Query())
sessData, err := o.ClientApp.ProcessCallback(ctx, r.URL.Query())
if err != nil {
-
http.Error(w, err.Error(), http.StatusInternalServerError)
+
var callbackErr *oauth.AuthRequestCallbackError
+
if errors.As(err, &callbackErr) {
+
l.Debug("callback error", "err", callbackErr)
+
http.Redirect(w, r, fmt.Sprintf("/login?error=%s", callbackErr.ErrorCode), http.StatusFound)
+
return
+
}
+
l.Error("failed to process callback", "err", err)
+
http.Redirect(w, r, "/login?error=oauth", http.StatusFound)
return
}
if err := o.SaveSession(w, r, sessData); err != nil {
-
http.Error(w, err.Error(), http.StatusInternalServerError)
+
l.Error("failed to save session", "data", sessData, "err", err)
+
http.Redirect(w, r, "/login?error=session", http.StatusFound)
return
}
···
// and create an sh.tangled.spindle.member record with that
spindleMembers, err := db.GetSpindleMembers(
o.Db,
-
db.FilterEq("instance", "spindle.tangled.sh"),
-
db.FilterEq("subject", did),
+
orm.FilterEq("instance", "spindle.tangled.sh"),
+
orm.FilterEq("subject", did),
)
if err != nil {
l.Error("failed to get spindle members", "err", err)
+50 -24
appview/oauth/oauth.go
···
comatproto "github.com/bluesky-social/indigo/api/atproto"
"github.com/bluesky-social/indigo/atproto/auth/oauth"
atpclient "github.com/bluesky-social/indigo/atproto/client"
+
atcrypto "github.com/bluesky-social/indigo/atproto/crypto"
"github.com/bluesky-social/indigo/atproto/syntax"
xrpc "github.com/bluesky-social/indigo/xrpc"
"github.com/gorilla/sessions"
-
"github.com/lestrrat-go/jwx/v2/jwk"
"github.com/posthog/posthog-go"
"tangled.org/core/appview/config"
"tangled.org/core/appview/db"
···
SessStore *sessions.CookieStore
Config *config.Config
JwksUri string
+
ClientName string
+
ClientUri string
Posthog posthog.Client
Db *db.DB
Enforcer *rbac.Enforcer
···
}
func New(config *config.Config, ph posthog.Client, db *db.DB, enforcer *rbac.Enforcer, res *idresolver.Resolver, logger *slog.Logger) (*OAuth, error) {
-
var oauthConfig oauth.ClientConfig
var clientUri string
-
if config.Core.Dev {
clientUri = "http://127.0.0.1:3000"
callbackUri := clientUri + "/oauth/callback"
···
oauthConfig = oauth.NewPublicConfig(clientId, callbackUri, []string{"atproto", "transition:generic"})
}
+
// configure client secret
+
priv, err := atcrypto.ParsePrivateMultibase(config.OAuth.ClientSecret)
+
if err != nil {
+
return nil, err
+
}
+
if err := oauthConfig.SetClientSecret(priv, config.OAuth.ClientKid); err != nil {
+
return nil, err
+
}
+
jwksUri := clientUri + "/oauth/jwks.json"
-
authStore, err := NewRedisStore(config.Redis.ToURL())
+
authStore, err := NewRedisStore(&RedisStoreConfig{
+
RedisURL: config.Redis.ToURL(),
+
SessionExpiryDuration: time.Hour * 24 * 90,
+
SessionInactivityDuration: time.Hour * 24 * 14,
+
AuthRequestExpiryDuration: time.Minute * 30,
+
})
if err != nil {
return nil, err
}
sessStore := sessions.NewCookieStore([]byte(config.Core.CookieSecret))
+
clientApp := oauth.NewClientApp(&oauthConfig, authStore)
+
clientApp.Dir = res.Directory()
+
// allow non-public transports in dev mode
+
if config.Core.Dev {
+
clientApp.Resolver.Client.Transport = http.DefaultTransport
+
}
+
+
clientName := config.Core.AppviewName
+
+
logger.Info("oauth setup successfully", "IsConfidential", clientApp.Config.IsConfidential())
return &OAuth{
-
ClientApp: oauth.NewClientApp(&oauthConfig, authStore),
+
ClientApp: clientApp,
Config: config,
SessStore: sessStore,
JwksUri: jwksUri,
+
ClientName: clientName,
+
ClientUri: clientUri,
Posthog: ph,
Db: db,
Enforcer: enforcer,
···
return errors.Join(err1, err2)
}
-
func pubKeyFromJwk(jwks string) (jwk.Key, error) {
-
k, err := jwk.ParseKey([]byte(jwks))
-
if err != nil {
-
return nil, err
-
}
-
pubKey, err := k.PublicKey()
-
if err != nil {
-
return nil, err
-
}
-
return pubKey, nil
-
}
-
type User struct {
Did string
Pds string
}
func (o *OAuth) GetUser(r *http.Request) *User {
-
sess, err := o.SessStore.Get(r, SessionName)
-
-
if err != nil || sess.IsNew {
+
sess, err := o.ResumeSession(r)
+
if err != nil {
return nil
}
return &User{
-
Did: sess.Values[SessionDid].(string),
-
Pds: sess.Values[SessionPds].(string),
+
Did: sess.Data.AccountDID.String(),
+
Pds: sess.Data.HostURL,
}
}
···
exp int64
lxm string
dev bool
+
timeout time.Duration
}
type ServiceClientOpt func(*ServiceClientOpts)
+
func DefaultServiceClientOpts() ServiceClientOpts {
+
return ServiceClientOpts{
+
timeout: time.Second * 5,
+
}
+
}
+
func WithService(service string) ServiceClientOpt {
return func(s *ServiceClientOpts) {
s.service = service
···
}
}
+
func WithTimeout(timeout time.Duration) ServiceClientOpt {
+
return func(s *ServiceClientOpts) {
+
s.timeout = timeout
+
}
+
}
+
func (s *ServiceClientOpts) Audience() string {
return fmt.Sprintf("did:web:%s", s.service)
}
···
}
func (o *OAuth) ServiceClient(r *http.Request, os ...ServiceClientOpt) (*xrpc.Client, error) {
-
opts := ServiceClientOpts{}
+
opts := DefaultServiceClientOpts()
for _, o := range os {
o(&opts)
}
···
},
Host: opts.Host(),
Client: &http.Client{
-
Timeout: time.Second * 5,
+
Timeout: opts.timeout,
},
}, nil
}
+110 -11
appview/oauth/store.go
···
"github.com/redis/go-redis/v9"
)
+
type RedisStoreConfig struct {
+
RedisURL string
+
+
// The purpose of these limits is to avoid dead sessions hanging around in the db indefinitely.
+
// The durations here should be *at least as long as* the expected duration of the oauth session itself.
+
SessionExpiryDuration time.Duration // duration since session creation (max TTL)
+
SessionInactivityDuration time.Duration // duration since last session update
+
AuthRequestExpiryDuration time.Duration // duration since auth request creation
+
}
+
// redis-backed implementation of ClientAuthStore.
type RedisStore struct {
-
client *redis.Client
-
SessionTTL time.Duration
-
AuthRequestTTL time.Duration
+
client *redis.Client
+
cfg *RedisStoreConfig
}
var _ oauth.ClientAuthStore = &RedisStore{}
-
func NewRedisStore(redisURL string) (*RedisStore, error) {
-
opts, err := redis.ParseURL(redisURL)
+
type sessionMetadata struct {
+
CreatedAt time.Time `json:"created_at"`
+
UpdatedAt time.Time `json:"updated_at"`
+
}
+
+
func NewRedisStore(cfg *RedisStoreConfig) (*RedisStore, error) {
+
if cfg == nil {
+
return nil, fmt.Errorf("missing cfg")
+
}
+
if cfg.RedisURL == "" {
+
return nil, fmt.Errorf("missing RedisURL")
+
}
+
if cfg.SessionExpiryDuration == 0 {
+
return nil, fmt.Errorf("missing SessionExpiryDuration")
+
}
+
if cfg.SessionInactivityDuration == 0 {
+
return nil, fmt.Errorf("missing SessionInactivityDuration")
+
}
+
if cfg.AuthRequestExpiryDuration == 0 {
+
return nil, fmt.Errorf("missing AuthRequestExpiryDuration")
+
}
+
+
opts, err := redis.ParseURL(cfg.RedisURL)
if err != nil {
return nil, fmt.Errorf("failed to parse redis URL: %w", err)
}
···
}
return &RedisStore{
-
client: client,
-
SessionTTL: 30 * 24 * time.Hour, // 30 days
-
AuthRequestTTL: 10 * time.Minute, // 10 minutes
+
client: client,
+
cfg: cfg,
}, nil
}
···
return fmt.Sprintf("oauth:session:%s:%s", did, sessionID)
}
+
func sessionMetadataKey(did syntax.DID, sessionID string) string {
+
return fmt.Sprintf("oauth:session_meta:%s:%s", did, sessionID)
+
}
+
func authRequestKey(state string) string {
return fmt.Sprintf("oauth:auth_request:%s", state)
}
func (r *RedisStore) GetSession(ctx context.Context, did syntax.DID, sessionID string) (*oauth.ClientSessionData, error) {
key := sessionKey(did, sessionID)
+
metaKey := sessionMetadataKey(did, sessionID)
+
+
// Check metadata for inactivity expiry
+
metaData, err := r.client.Get(ctx, metaKey).Bytes()
+
if err == redis.Nil {
+
return nil, fmt.Errorf("session not found: %s", did)
+
}
+
if err != nil {
+
return nil, fmt.Errorf("failed to get session metadata: %w", err)
+
}
+
+
var meta sessionMetadata
+
if err := json.Unmarshal(metaData, &meta); err != nil {
+
return nil, fmt.Errorf("failed to unmarshal session metadata: %w", err)
+
}
+
+
// Check if session has been inactive for too long
+
inactiveThreshold := time.Now().Add(-r.cfg.SessionInactivityDuration)
+
if meta.UpdatedAt.Before(inactiveThreshold) {
+
// Session is inactive, delete it
+
r.client.Del(ctx, key, metaKey)
+
return nil, fmt.Errorf("session expired due to inactivity: %s", did)
+
}
+
+
// Get the actual session data
data, err := r.client.Get(ctx, key).Bytes()
if err == redis.Nil {
return nil, fmt.Errorf("session not found: %s", did)
···
func (r *RedisStore) SaveSession(ctx context.Context, sess oauth.ClientSessionData) error {
key := sessionKey(sess.AccountDID, sess.SessionID)
+
metaKey := sessionMetadataKey(sess.AccountDID, sess.SessionID)
data, err := json.Marshal(sess)
if err != nil {
return fmt.Errorf("failed to marshal session: %w", err)
}
-
if err := r.client.Set(ctx, key, data, r.SessionTTL).Err(); err != nil {
+
// Check if session already exists to preserve CreatedAt
+
var meta sessionMetadata
+
existingMetaData, err := r.client.Get(ctx, metaKey).Bytes()
+
if err == redis.Nil {
+
// New session
+
meta = sessionMetadata{
+
CreatedAt: time.Now(),
+
UpdatedAt: time.Now(),
+
}
+
} else if err != nil {
+
return fmt.Errorf("failed to check existing session metadata: %w", err)
+
} else {
+
// Existing session - preserve CreatedAt, update UpdatedAt
+
if err := json.Unmarshal(existingMetaData, &meta); err != nil {
+
return fmt.Errorf("failed to unmarshal existing session metadata: %w", err)
+
}
+
meta.UpdatedAt = time.Now()
+
}
+
+
// Calculate remaining TTL based on creation time
+
remainingTTL := r.cfg.SessionExpiryDuration - time.Since(meta.CreatedAt)
+
if remainingTTL <= 0 {
+
return fmt.Errorf("session has expired")
+
}
+
+
// Use the shorter of: remaining TTL or inactivity duration
+
ttl := min(r.cfg.SessionInactivityDuration, remainingTTL)
+
+
// Save session data
+
if err := r.client.Set(ctx, key, data, ttl).Err(); err != nil {
return fmt.Errorf("failed to save session: %w", err)
}
+
// Save metadata
+
metaData, err := json.Marshal(meta)
+
if err != nil {
+
return fmt.Errorf("failed to marshal session metadata: %w", err)
+
}
+
if err := r.client.Set(ctx, metaKey, metaData, ttl).Err(); err != nil {
+
return fmt.Errorf("failed to save session metadata: %w", err)
+
}
+
return nil
}
func (r *RedisStore) DeleteSession(ctx context.Context, did syntax.DID, sessionID string) error {
key := sessionKey(did, sessionID)
-
if err := r.client.Del(ctx, key).Err(); err != nil {
+
metaKey := sessionMetadataKey(did, sessionID)
+
+
if err := r.client.Del(ctx, key, metaKey).Err(); err != nil {
return fmt.Errorf("failed to delete session: %w", err)
}
return nil
···
return fmt.Errorf("failed to marshal auth request: %w", err)
}
-
if err := r.client.Set(ctx, key, data, r.AuthRequestTTL).Err(); err != nil {
+
if err := r.client.Set(ctx, key, data, r.cfg.AuthRequestExpiryDuration).Err(); err != nil {
return fmt.Errorf("failed to save auth request: %w", err)
}
+59 -10
appview/ogcard/card.go
···
import (
"bytes"
"fmt"
+
"html/template"
"image"
"image/color"
"io"
···
return width, nil
}
-
// DrawSVGIcon draws an SVG icon from the embedded files at the specified position
-
func (c *Card) DrawSVGIcon(svgPath string, x, y, size int, iconColor color.Color) error {
-
svgData, err := pages.Files.ReadFile(svgPath)
-
if err != nil {
-
return fmt.Errorf("failed to read SVG file %s: %w", svgPath, err)
-
}
-
+
func BuildSVGIconFromData(svgData []byte, iconColor color.Color) (*oksvg.SvgIcon, error) {
// Convert color to hex string for SVG
rgba, isRGBA := iconColor.(color.RGBA)
if !isRGBA {
···
// Parse SVG
icon, err := oksvg.ReadIconStream(strings.NewReader(svgString))
if err != nil {
-
return fmt.Errorf("failed to parse SVG %s: %w", svgPath, err)
+
return nil, fmt.Errorf("failed to parse SVG: %w", err)
}
+
return icon, nil
+
}
+
+
func BuildSVGIconFromPath(svgPath string, iconColor color.Color) (*oksvg.SvgIcon, error) {
+
svgData, err := pages.Files.ReadFile(svgPath)
+
if err != nil {
+
return nil, fmt.Errorf("failed to read SVG file %s: %w", svgPath, err)
+
}
+
+
icon, err := BuildSVGIconFromData(svgData, iconColor)
+
if err != nil {
+
return nil, fmt.Errorf("failed to build SVG icon %s: %w", svgPath, err)
+
}
+
+
return icon, nil
+
}
+
+
func BuildLucideIcon(name string, iconColor color.Color) (*oksvg.SvgIcon, error) {
+
return BuildSVGIconFromPath(fmt.Sprintf("static/icons/%s.svg", name), iconColor)
+
}
+
+
func (c *Card) DrawLucideIcon(name string, x, y, size int, iconColor color.Color) error {
+
icon, err := BuildSVGIconFromPath(fmt.Sprintf("static/icons/%s.svg", name), iconColor)
+
if err != nil {
+
return err
+
}
+
+
c.DrawSVGIcon(icon, x, y, size)
+
+
return nil
+
}
+
+
func (c *Card) DrawDollySilhouette(x, y, size int, iconColor color.Color) error {
+
tpl, err := template.New("dolly").
+
ParseFS(pages.Files, "templates/fragments/dolly/silhouette.html")
+
if err != nil {
+
return fmt.Errorf("failed to read dolly silhouette template: %w", err)
+
}
+
+
var svgData bytes.Buffer
+
if err = tpl.ExecuteTemplate(&svgData, "fragments/dolly/silhouette", nil); err != nil {
+
return fmt.Errorf("failed to execute dolly silhouette template: %w", err)
+
}
+
+
icon, err := BuildSVGIconFromData(svgData.Bytes(), iconColor)
+
if err != nil {
+
return err
+
}
+
+
c.DrawSVGIcon(icon, x, y, size)
+
+
return nil
+
}
+
+
// DrawSVGIcon draws an SVG icon from the embedded files at the specified position
+
func (c *Card) DrawSVGIcon(icon *oksvg.SvgIcon, x, y, size int) {
// Set the icon size
w, h := float64(size), float64(size)
icon.SetTarget(0, 0, w, h)
···
}
draw.Draw(c.Img, destRect, iconImg, image.Point{}, draw.Over)
-
-
return nil
}
// DrawImage fills the card with an image, scaled to maintain the original aspect ratio and centered with respect to the non-filled dimension
+102 -13
appview/pages/funcmap.go
···
package pages
import (
+
"bytes"
"context"
"crypto/hmac"
"crypto/sha256"
···
"strings"
"time"
+
"github.com/alecthomas/chroma/v2"
+
chromahtml "github.com/alecthomas/chroma/v2/formatters/html"
+
"github.com/alecthomas/chroma/v2/lexers"
+
"github.com/alecthomas/chroma/v2/styles"
"github.com/dustin/go-humanize"
"github.com/go-enry/go-enry/v2"
+
"github.com/yuin/goldmark"
"tangled.org/core/appview/filetree"
+
"tangled.org/core/appview/models"
"tangled.org/core/appview/pages/markup"
"tangled.org/core/crypto"
)
···
"contains": func(s string, target string) bool {
return strings.Contains(s, target)
},
+
"stripPort": func(hostname string) string {
+
if strings.Contains(hostname, ":") {
+
return strings.Split(hostname, ":")[0]
+
}
+
return hostname
+
},
"mapContains": func(m any, key any) bool {
mapValue := reflect.ValueOf(m)
if mapValue.Kind() != reflect.Map {
···
return "handle.invalid"
}
-
return "@" + identity.Handle.String()
+
return identity.Handle.String()
+
},
+
"ownerSlashRepo": func(repo *models.Repo) string {
+
ownerId, err := p.resolver.ResolveIdent(context.Background(), repo.Did)
+
if err != nil {
+
return repo.DidSlashRepo()
+
}
+
handle := ownerId.Handle
+
if handle != "" && !handle.IsInvalidHandle() {
+
return string(handle) + "/" + repo.Name
+
}
+
return repo.DidSlashRepo()
},
"truncateAt30": func(s string) string {
if len(s) <= 30 {
···
},
"splitOn": func(s, sep string) []string {
return strings.Split(s, sep)
+
},
+
"string": func(v any) string {
+
return fmt.Sprint(v)
},
"int64": func(a int) int64 {
return int64(a)
···
"sub": func(a, b int) int {
return a - b
},
+
"mul": func(a, b int) int {
+
return a * b
+
},
+
"div": func(a, b int) int {
+
return a / b
+
},
+
"mod": func(a, b int) int {
+
return a % b
+
},
"f64": func(a int) float64 {
return float64(a)
},
···
return b
},
-
"didOrHandle": func(did, handle string) string {
-
if handle != "" {
-
return fmt.Sprintf("@%s", handle)
-
} else {
-
return did
-
}
-
},
"assoc": func(values ...string) ([][]string, error) {
if len(values)%2 != 0 {
return nil, fmt.Errorf("invalid assoc call, must have an even number of arguments")
···
}
return pairs, nil
},
-
"append": func(s []string, values ...string) []string {
+
"append": func(s []any, values ...any) []any {
s = append(s, values...)
return s
},
···
},
"description": func(text string) template.HTML {
p.rctx.RendererType = markup.RendererTypeDefault
+
htmlString := p.rctx.RenderMarkdownWith(text, goldmark.New())
+
sanitized := p.rctx.SanitizeDescription(htmlString)
+
return template.HTML(sanitized)
+
},
+
"readme": func(text string) template.HTML {
+
p.rctx.RendererType = markup.RendererTypeRepoMarkdown
htmlString := p.rctx.RenderMarkdown(text)
-
sanitized := p.rctx.SanitizeDescription(htmlString)
+
sanitized := p.rctx.SanitizeDefault(htmlString)
return template.HTML(sanitized)
},
+
"code": func(content, path string) string {
+
var style *chroma.Style = styles.Get("catpuccin-latte")
+
formatter := chromahtml.New(
+
chromahtml.InlineCode(false),
+
chromahtml.WithLineNumbers(true),
+
chromahtml.WithLinkableLineNumbers(true, "L"),
+
chromahtml.Standalone(false),
+
chromahtml.WithClasses(true),
+
)
+
+
lexer := lexers.Get(filepath.Base(path))
+
if lexer == nil {
+
lexer = lexers.Fallback
+
}
+
+
iterator, err := lexer.Tokenise(nil, content)
+
if err != nil {
+
p.logger.Error("chroma tokenize", "err", "err")
+
return ""
+
}
+
+
var code bytes.Buffer
+
err = formatter.Format(&code, style, iterator)
+
if err != nil {
+
p.logger.Error("chroma format", "err", "err")
+
return ""
+
}
+
+
return code.String()
+
},
+
"trimUriScheme": func(text string) string {
+
text = strings.TrimPrefix(text, "https://")
+
text = strings.TrimPrefix(text, "http://")
+
return text
+
},
"isNil": func(t any) bool {
// returns false for other "zero" values
return t == nil
···
u, _ := url.PathUnescape(s)
return u
},
-
+
"safeUrl": func(s string) template.URL {
+
return template.URL(s)
+
},
"tinyAvatar": func(handle string) string {
return p.AvatarUrl(handle, "tiny")
},
···
},
"normalizeForHtmlId": func(s string) string {
-
// TODO: extend this to handle other cases?
-
return strings.ReplaceAll(s, ":", "_")
+
normalized := strings.ReplaceAll(s, ":", "_")
+
normalized = strings.ReplaceAll(normalized, ".", "_")
+
return normalized
},
"sshFingerprint": func(pubKey string) string {
fp, err := crypto.SSHFingerprint(pubKey)
···
}
}
+
func (p *Pages) resolveDid(did string) string {
+
identity, err := p.resolver.ResolveIdent(context.Background(), did)
+
+
if err != nil {
+
return did
+
}
+
+
if identity.Handle.IsInvalidHandle() {
+
return "handle.invalid"
+
}
+
+
return identity.Handle.String()
+
}
+
func (p *Pages) AvatarUrl(handle, size string) string {
handle = strings.TrimPrefix(handle, "@")
+
+
handle = p.resolveDid(handle)
secret := p.avatar.SharedSecret
h := hmac.New(sha256.New, []byte(secret))
+111
appview/pages/markup/extension/atlink.go
···
+
// heavily inspired by: https://github.com/kaleocheng/goldmark-extensions
+
+
package extension
+
+
import (
+
"regexp"
+
+
"github.com/yuin/goldmark"
+
"github.com/yuin/goldmark/ast"
+
"github.com/yuin/goldmark/parser"
+
"github.com/yuin/goldmark/renderer"
+
"github.com/yuin/goldmark/renderer/html"
+
"github.com/yuin/goldmark/text"
+
"github.com/yuin/goldmark/util"
+
)
+
+
// An AtNode struct represents an AtNode
+
type AtNode struct {
+
Handle string
+
ast.BaseInline
+
}
+
+
var _ ast.Node = &AtNode{}
+
+
// Dump implements Node.Dump.
+
func (n *AtNode) Dump(source []byte, level int) {
+
ast.DumpHelper(n, source, level, nil, nil)
+
}
+
+
// KindAt is a NodeKind of the At node.
+
var KindAt = ast.NewNodeKind("At")
+
+
// Kind implements Node.Kind.
+
func (n *AtNode) Kind() ast.NodeKind {
+
return KindAt
+
}
+
+
var atRegexp = regexp.MustCompile(`(^|\s|\()(@)([a-zA-Z0-9.-]+)(\b)`)
+
+
type atParser struct{}
+
+
// NewAtParser return a new InlineParser that parses
+
// at expressions.
+
func NewAtParser() parser.InlineParser {
+
return &atParser{}
+
}
+
+
func (s *atParser) Trigger() []byte {
+
return []byte{'@'}
+
}
+
+
func (s *atParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node {
+
line, segment := block.PeekLine()
+
m := atRegexp.FindSubmatchIndex(line)
+
if m == nil {
+
return nil
+
}
+
atSegment := text.NewSegment(segment.Start, segment.Start+m[1])
+
block.Advance(m[1])
+
node := &AtNode{}
+
node.AppendChild(node, ast.NewTextSegment(atSegment))
+
node.Handle = string(atSegment.Value(block.Source())[1:])
+
return node
+
}
+
+
// atHtmlRenderer is a renderer.NodeRenderer implementation that
+
// renders At nodes.
+
type atHtmlRenderer struct {
+
html.Config
+
}
+
+
// NewAtHTMLRenderer returns a new AtHTMLRenderer.
+
func NewAtHTMLRenderer(opts ...html.Option) renderer.NodeRenderer {
+
r := &atHtmlRenderer{
+
Config: html.NewConfig(),
+
}
+
for _, opt := range opts {
+
opt.SetHTMLOption(&r.Config)
+
}
+
return r
+
}
+
+
// RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs.
+
func (r *atHtmlRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
+
reg.Register(KindAt, r.renderAt)
+
}
+
+
func (r *atHtmlRenderer) renderAt(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
+
if entering {
+
w.WriteString(`<a href="/@`)
+
w.WriteString(n.(*AtNode).Handle)
+
w.WriteString(`" class="mention font-bold">`)
+
} else {
+
w.WriteString("</a>")
+
}
+
return ast.WalkContinue, nil
+
}
+
+
type atExt struct{}
+
+
// At is an extension that allow you to use at expression like '@user.bsky.social' .
+
var AtExt = &atExt{}
+
+
func (e *atExt) Extend(m goldmark.Markdown) {
+
m.Parser().AddOptions(parser.WithInlineParsers(
+
util.Prioritized(NewAtParser(), 500),
+
))
+
m.Renderer().AddOptions(renderer.WithNodeRenderers(
+
util.Prioritized(NewAtHTMLRenderer(), 500),
+
))
+
}
+11 -4
appview/pages/markup/markdown.go
···
chromahtml "github.com/alecthomas/chroma/v2/formatters/html"
"github.com/alecthomas/chroma/v2/styles"
-
treeblood "github.com/wyatt915/goldmark-treeblood"
"github.com/yuin/goldmark"
highlighting "github.com/yuin/goldmark-highlighting/v2"
"github.com/yuin/goldmark/ast"
···
htmlparse "golang.org/x/net/html"
"tangled.org/core/api/tangled"
+
textension "tangled.org/core/appview/pages/markup/extension"
"tangled.org/core/appview/pages/repoinfo"
)
···
Files fs.FS
}
-
func (rctx *RenderContext) RenderMarkdown(source string) string {
+
func NewMarkdown() goldmark.Markdown {
md := goldmark.New(
goldmark.WithExtensions(
extension.GFM,
···
extension.NewFootnote(
extension.WithFootnoteIDPrefix([]byte("footnote")),
),
-
treeblood.MathML(),
callout.CalloutExtention,
+
textension.AtExt,
),
goldmark.WithParserOptions(
parser.WithAutoHeadingID(),
),
goldmark.WithRendererOptions(html.WithUnsafe()),
)
+
return md
+
}
+
func (rctx *RenderContext) RenderMarkdown(source string) string {
+
return rctx.RenderMarkdownWith(source, NewMarkdown())
+
}
+
+
func (rctx *RenderContext) RenderMarkdownWith(source string, md goldmark.Markdown) string {
if rctx != nil {
var transformers []util.PrioritizedValue
···
repoName := fmt.Sprintf("%s/%s", rctx.RepoInfo.OwnerDid, rctx.RepoInfo.Name)
query := fmt.Sprintf("repo=%s&ref=%s&path=%s&raw=true",
-
url.PathEscape(repoName), url.PathEscape(rctx.RepoInfo.Ref), actualPath)
+
url.QueryEscape(repoName), url.QueryEscape(rctx.RepoInfo.Ref), actualPath)
parsedURL := &url.URL{
Scheme: scheme,
+124
appview/pages/markup/reference_link.go
···
+
package markup
+
+
import (
+
"maps"
+
"net/url"
+
"path"
+
"slices"
+
"strconv"
+
"strings"
+
+
"github.com/yuin/goldmark/ast"
+
"github.com/yuin/goldmark/text"
+
"tangled.org/core/appview/models"
+
textension "tangled.org/core/appview/pages/markup/extension"
+
)
+
+
// FindReferences collects all links referencing tangled-related objects
+
// like issues, PRs, comments or even @-mentions
+
// This funciton doesn't actually check for the existence of records in the DB
+
// or the PDS; it merely returns a list of what are presumed to be references.
+
func FindReferences(baseUrl string, source string) ([]string, []models.ReferenceLink) {
+
var (
+
refLinkSet = make(map[models.ReferenceLink]struct{})
+
mentionsSet = make(map[string]struct{})
+
md = NewMarkdown()
+
sourceBytes = []byte(source)
+
root = md.Parser().Parse(text.NewReader(sourceBytes))
+
)
+
// trim url scheme. the SSL shouldn't matter
+
baseUrl = strings.TrimPrefix(baseUrl, "https://")
+
baseUrl = strings.TrimPrefix(baseUrl, "http://")
+
+
ast.Walk(root, func(n ast.Node, entering bool) (ast.WalkStatus, error) {
+
if !entering {
+
return ast.WalkContinue, nil
+
}
+
switch n.Kind() {
+
case textension.KindAt:
+
handle := n.(*textension.AtNode).Handle
+
mentionsSet[handle] = struct{}{}
+
return ast.WalkSkipChildren, nil
+
case ast.KindLink:
+
dest := string(n.(*ast.Link).Destination)
+
ref := parseTangledLink(baseUrl, dest)
+
if ref != nil {
+
refLinkSet[*ref] = struct{}{}
+
}
+
return ast.WalkSkipChildren, nil
+
case ast.KindAutoLink:
+
an := n.(*ast.AutoLink)
+
if an.AutoLinkType == ast.AutoLinkURL {
+
dest := string(an.URL(sourceBytes))
+
ref := parseTangledLink(baseUrl, dest)
+
if ref != nil {
+
refLinkSet[*ref] = struct{}{}
+
}
+
}
+
return ast.WalkSkipChildren, nil
+
}
+
return ast.WalkContinue, nil
+
})
+
mentions := slices.Collect(maps.Keys(mentionsSet))
+
references := slices.Collect(maps.Keys(refLinkSet))
+
return mentions, references
+
}
+
+
func parseTangledLink(baseHost string, urlStr string) *models.ReferenceLink {
+
u, err := url.Parse(urlStr)
+
if err != nil {
+
return nil
+
}
+
+
if u.Host != "" && !strings.EqualFold(u.Host, baseHost) {
+
return nil
+
}
+
+
p := path.Clean(u.Path)
+
parts := strings.FieldsFunc(p, func(r rune) bool { return r == '/' })
+
if len(parts) < 4 {
+
// need at least: handle / repo / kind / id
+
return nil
+
}
+
+
var (
+
handle = parts[0]
+
repo = parts[1]
+
kindSeg = parts[2]
+
subjectSeg = parts[3]
+
)
+
+
handle = strings.TrimPrefix(handle, "@")
+
+
var kind models.RefKind
+
switch kindSeg {
+
case "issues":
+
kind = models.RefKindIssue
+
case "pulls":
+
kind = models.RefKindPull
+
default:
+
return nil
+
}
+
+
subjectId, err := strconv.Atoi(subjectSeg)
+
if err != nil {
+
return nil
+
}
+
var commentId *int
+
if u.Fragment != "" {
+
if strings.HasPrefix(u.Fragment, "comment-") {
+
commentIdStr := u.Fragment[len("comment-"):]
+
if id, err := strconv.Atoi(commentIdStr); err == nil {
+
commentId = &id
+
}
+
}
+
}
+
+
return &models.ReferenceLink{
+
Handle: handle,
+
Repo: repo,
+
Kind: kind,
+
SubjectId: subjectId,
+
CommentId: commentId,
+
}
+
}
+3
appview/pages/markup/sanitizer.go
···
policy.AllowAttrs("class").Matching(regexp.MustCompile(`heading`)).OnElements("h1", "h2", "h3", "h4", "h5", "h6", "h7", "h8")
policy.AllowAttrs("class").Matching(regexp.MustCompile(strings.Join(slices.Collect(maps.Values(chroma.StandardTypes)), "|"))).OnElements("span")
+
// at-mentions
+
policy.AllowAttrs("class").Matching(regexp.MustCompile(`mention`)).OnElements("a")
+
// centering content
policy.AllowElements("center")
+75 -148
appview/pages/pages.go
···
package pages
import (
-
"bytes"
"crypto/sha256"
"embed"
"encoding/hex"
···
"path/filepath"
"strings"
"sync"
+
"time"
"tangled.org/core/api/tangled"
"tangled.org/core/appview/commitverify"
···
"tangled.org/core/patchutil"
"tangled.org/core/types"
-
"github.com/alecthomas/chroma/v2"
-
chromahtml "github.com/alecthomas/chroma/v2/formatters/html"
-
"github.com/alecthomas/chroma/v2/lexers"
-
"github.com/alecthomas/chroma/v2/styles"
"github.com/bluesky-social/indigo/atproto/identity"
"github.com/bluesky-social/indigo/atproto/syntax"
"github.com/go-git/go-git/v5/plumbing"
-
"github.com/go-git/go-git/v5/plumbing/object"
)
//go:embed templates/* static legal
···
type LoginParams struct {
ReturnUrl string
+
ErrorCode string
}
func (p *Pages) Login(w io.Writer, params LoginParams) error {
···
type KnotsParams struct {
LoggedInUser *oauth.User
Registrations []models.Registration
+
Tabs []map[string]any
+
Tab string
}
func (p *Pages) Knots(w io.Writer, params KnotsParams) error {
···
Members []string
Repos map[string][]models.Repo
IsOwner bool
+
Tabs []map[string]any
+
Tab string
}
func (p *Pages) Knot(w io.Writer, params KnotParams) error {
···
type SpindlesParams struct {
LoggedInUser *oauth.User
Spindles []models.Spindle
+
Tabs []map[string]any
+
Tab string
}
func (p *Pages) Spindles(w io.Writer, params SpindlesParams) error {
···
type SpindleListingParams struct {
models.Spindle
+
Tabs []map[string]any
+
Tab string
}
func (p *Pages) SpindleListing(w io.Writer, params SpindleListingParams) error {
···
Spindle models.Spindle
Members []string
Repos map[string][]models.Repo
+
Tabs []map[string]any
+
Tab string
}
func (p *Pages) SpindleDashboard(w io.Writer, params SpindleDashboardParams) error {
···
type ProfileCard struct {
UserDid string
-
UserHandle string
FollowStatus models.FollowStatus
Punchcard *models.Punchcard
Profile *models.Profile
···
return p.executePlain("user/fragments/editPins", w, params)
}
-
type RepoStarFragmentParams struct {
+
type StarBtnFragmentParams struct {
IsStarred bool
-
RepoAt syntax.ATURI
-
Stats models.RepoStats
-
}
-
-
func (p *Pages) RepoStarFragment(w io.Writer, params RepoStarFragmentParams) error {
-
return p.executePlain("repo/fragments/repoStar", w, params)
-
}
-
-
type RepoDescriptionParams struct {
-
RepoInfo repoinfo.RepoInfo
-
}
-
-
func (p *Pages) EditRepoDescriptionFragment(w io.Writer, params RepoDescriptionParams) error {
-
return p.executePlain("repo/fragments/editRepoDescription", w, params)
+
SubjectAt syntax.ATURI
+
StarCount int
}
-
func (p *Pages) RepoDescriptionFragment(w io.Writer, params RepoDescriptionParams) error {
-
return p.executePlain("repo/fragments/repoDescription", w, params)
+
func (p *Pages) StarBtnFragment(w io.Writer, params StarBtnFragmentParams) error {
+
return p.executePlain("fragments/starBtn", w, params)
}
type RepoIndexParams struct {
···
RepoInfo repoinfo.RepoInfo
Active string
TagMap map[string][]string
-
CommitsTrunc []*object.Commit
+
CommitsTrunc []types.Commit
TagsTrunc []*types.TagReference
BranchesTrunc []types.Branch
// ForkInfo *types.ForkInfo
-
HTMLReadme template.HTML
-
Raw bool
-
EmailToDidOrHandle map[string]string
-
VerifiedCommits commitverify.VerifiedCommits
-
Languages []types.RepoLanguageDetails
-
Pipelines map[string]models.Pipeline
-
NeedsKnotUpgrade bool
+
HTMLReadme template.HTML
+
Raw bool
+
EmailToDid map[string]string
+
VerifiedCommits commitverify.VerifiedCommits
+
Languages []types.RepoLanguageDetails
+
Pipelines map[string]models.Pipeline
+
NeedsKnotUpgrade bool
types.RepoIndexResponse
}
···
}
type RepoLogParams struct {
-
LoggedInUser *oauth.User
-
RepoInfo repoinfo.RepoInfo
-
TagMap map[string][]string
+
LoggedInUser *oauth.User
+
RepoInfo repoinfo.RepoInfo
+
TagMap map[string][]string
+
Active string
+
EmailToDid map[string]string
+
VerifiedCommits commitverify.VerifiedCommits
+
Pipelines map[string]models.Pipeline
+
types.RepoLogResponse
-
Active string
-
EmailToDidOrHandle map[string]string
-
VerifiedCommits commitverify.VerifiedCommits
-
Pipelines map[string]models.Pipeline
}
func (p *Pages) RepoLog(w io.Writer, params RepoLogParams) error {
···
}
type RepoCommitParams struct {
-
LoggedInUser *oauth.User
-
RepoInfo repoinfo.RepoInfo
-
Active string
-
EmailToDidOrHandle map[string]string
-
Pipeline *models.Pipeline
-
DiffOpts types.DiffOpts
+
LoggedInUser *oauth.User
+
RepoInfo repoinfo.RepoInfo
+
Active string
+
EmailToDid map[string]string
+
Pipeline *models.Pipeline
+
DiffOpts types.DiffOpts
// singular because it's always going to be just one
VerifiedCommit commitverify.VerifiedCommits
···
func (r RepoTreeParams) TreeStats() RepoTreeStats {
numFolders, numFiles := 0, 0
for _, f := range r.Files {
-
if !f.IsFile {
+
if !f.IsFile() {
numFolders += 1
-
} else if f.IsFile {
+
} else if f.IsFile() {
numFiles += 1
}
}
···
}
type RepoBlobParams struct {
-
LoggedInUser *oauth.User
-
RepoInfo repoinfo.RepoInfo
-
Active string
-
Unsupported bool
-
IsImage bool
-
IsVideo bool
-
ContentSrc string
-
BreadCrumbs [][]string
-
ShowRendered bool
-
RenderToggle bool
-
RenderedContents template.HTML
+
LoggedInUser *oauth.User
+
RepoInfo repoinfo.RepoInfo
+
Active string
+
BreadCrumbs [][]string
+
BlobView models.BlobView
*tangled.RepoBlob_Output
-
// Computed fields for template compatibility
-
Contents string
-
Lines int
-
SizeHint uint64
-
IsBinary bool
}
func (p *Pages) RepoBlob(w io.Writer, params RepoBlobParams) error {
-
var style *chroma.Style = styles.Get("catpuccin-latte")
-
-
if params.ShowRendered {
-
switch markup.GetFormat(params.Path) {
-
case markup.FormatMarkdown:
-
p.rctx.RepoInfo = params.RepoInfo
-
p.rctx.RendererType = markup.RendererTypeRepoMarkdown
-
htmlString := p.rctx.RenderMarkdown(params.Contents)
-
sanitized := p.rctx.SanitizeDefault(htmlString)
-
params.RenderedContents = template.HTML(sanitized)
-
}
+
switch params.BlobView.ContentType {
+
case models.BlobContentTypeMarkup:
+
p.rctx.RepoInfo = params.RepoInfo
}
-
c := params.Contents
-
formatter := chromahtml.New(
-
chromahtml.InlineCode(false),
-
chromahtml.WithLineNumbers(true),
-
chromahtml.WithLinkableLineNumbers(true, "L"),
-
chromahtml.Standalone(false),
-
chromahtml.WithClasses(true),
-
)
-
-
lexer := lexers.Get(filepath.Base(params.Path))
-
if lexer == nil {
-
lexer = lexers.Fallback
-
}
-
-
iterator, err := lexer.Tokenise(nil, c)
-
if err != nil {
-
return fmt.Errorf("chroma tokenize: %w", err)
-
}
-
-
var code bytes.Buffer
-
err = formatter.Format(&code, style, iterator)
-
if err != nil {
-
return fmt.Errorf("chroma format: %w", err)
-
}
-
-
params.Contents = code.String()
params.Active = "overview"
return p.executeRepo("repo/blob", w, params)
}
type Collaborator struct {
-
Did string
-
Handle string
-
Role string
+
Did string
+
Role string
}
type RepoSettingsParams struct {
···
RepoInfo repoinfo.RepoInfo
Active string
Issues []models.Issue
+
IssueCount int
LabelDefs map[string]*models.LabelDefinition
Page pagination.Page
FilteringByOpen bool
+
FilterQuery string
}
func (p *Pages) RepoIssues(w io.Writer, params RepoIssuesParams) error {
···
Active string
Issue *models.Issue
CommentList []models.CommentListItem
+
Backlinks []models.RichReferenceLink
LabelDefs map[string]*models.LabelDefinition
OrderedReactionKinds []models.ReactionKind
···
Pulls []*models.Pull
Active string
FilteringBy models.PullState
+
FilterQuery string
Stacks map[string]models.Stack
Pipelines map[string]models.Pipeline
LabelDefs map[string]*models.LabelDefinition
···
Pull *models.Pull
Stack models.Stack
AbandonedPulls []*models.Pull
+
Backlinks []models.RichReferenceLink
BranchDeleteStatus *models.BranchDeleteStatus
MergeCheck types.MergeCheckResponse
ResubmitCheck ResubmitResult
···
return p.executePlain("repo/fragments/compareAllowPull", w, params)
-
type RepoCompareDiffParams struct {
-
LoggedInUser *oauth.User
-
RepoInfo repoinfo.RepoInfo
-
Diff types.NiceDiff
+
type RepoCompareDiffFragmentParams struct {
+
Diff types.NiceDiff
+
DiffOpts types.DiffOpts
-
func (p *Pages) RepoCompareDiff(w io.Writer, params RepoCompareDiffParams) error {
-
return p.executePlain("repo/fragments/diff", w, []any{params.RepoInfo.FullName, &params.Diff})
+
func (p *Pages) RepoCompareDiffFragment(w io.Writer, params RepoCompareDiffFragmentParams) error {
+
return p.executePlain("repo/fragments/diff", w, []any{&params.Diff, &params.DiffOpts})
type LabelPanelParams struct {
···
Name string
Command string
Collapsed bool
+
StartTime time.Time
func (p *Pages) LogBlock(w io.Writer, params LogBlockParams) error {
return p.executePlain("repo/pipelines/fragments/logBlock", w, params)
+
}
+
+
type LogBlockEndParams struct {
+
Id int
+
StartTime time.Time
+
EndTime time.Time
+
}
+
+
func (p *Pages) LogBlockEnd(w io.Writer, params LogBlockEndParams) error {
+
return p.executePlain("repo/pipelines/fragments/logBlockEnd", w, params)
type LogLineParams struct {
···
ShowRendered bool
RenderToggle bool
RenderedContents template.HTML
-
String models.String
+
String *models.String
Stats models.StringStats
+
IsStarred bool
+
StarCount int
Owner identity.Identity
func (p *Pages) SingleString(w io.Writer, params SingleStringParams) error {
-
var style *chroma.Style = styles.Get("catpuccin-latte")
-
-
if params.ShowRendered {
-
switch markup.GetFormat(params.String.Filename) {
-
case markup.FormatMarkdown:
-
p.rctx.RendererType = markup.RendererTypeRepoMarkdown
-
htmlString := p.rctx.RenderMarkdown(params.String.Contents)
-
sanitized := p.rctx.SanitizeDefault(htmlString)
-
params.RenderedContents = template.HTML(sanitized)
-
}
-
}
-
-
c := params.String.Contents
-
formatter := chromahtml.New(
-
chromahtml.InlineCode(false),
-
chromahtml.WithLineNumbers(true),
-
chromahtml.WithLinkableLineNumbers(true, "L"),
-
chromahtml.Standalone(false),
-
chromahtml.WithClasses(true),
-
)
-
-
lexer := lexers.Get(filepath.Base(params.String.Filename))
-
if lexer == nil {
-
lexer = lexers.Fallback
-
}
-
-
iterator, err := lexer.Tokenise(nil, c)
-
if err != nil {
-
return fmt.Errorf("chroma tokenize: %w", err)
-
}
-
-
var code bytes.Buffer
-
err = formatter.Format(&code, style, iterator)
-
if err != nil {
-
return fmt.Errorf("chroma format: %w", err)
-
}
-
-
params.String.Contents = code.String()
return p.execute("strings/string", w, params)
+27 -24
appview/pages/repoinfo/repoinfo.go
···
"fmt"
"path"
"slices"
-
"strings"
"github.com/bluesky-social/indigo/atproto/syntax"
+
"tangled.org/core/api/tangled"
"tangled.org/core/appview/models"
"tangled.org/core/appview/state/userutil"
)
-
func (r RepoInfo) OwnerWithAt() string {
+
func (r RepoInfo) owner() string {
if r.OwnerHandle != "" {
-
return fmt.Sprintf("@%s", r.OwnerHandle)
+
return r.OwnerHandle
} else {
return r.OwnerDid
}
}
func (r RepoInfo) FullName() string {
-
return path.Join(r.OwnerWithAt(), r.Name)
+
return path.Join(r.owner(), r.Name)
}
-
func (r RepoInfo) OwnerWithoutAt() string {
-
if after, ok := strings.CutPrefix(r.OwnerWithAt(), "@"); ok {
-
return after
+
func (r RepoInfo) ownerWithoutAt() string {
+
if r.OwnerHandle != "" {
+
return r.OwnerHandle
} else {
return userutil.FlattenDid(r.OwnerDid)
}
}
func (r RepoInfo) FullNameWithoutAt() string {
-
return path.Join(r.OwnerWithoutAt(), r.Name)
+
return path.Join(r.ownerWithoutAt(), r.Name)
}
func (r RepoInfo) GetTabs() [][]string {
···
}
return tabs
+
}
+
+
func (r RepoInfo) RepoAt() syntax.ATURI {
+
return syntax.ATURI(fmt.Sprintf("at://%s/%s/%s", r.OwnerDid, tangled.RepoNSID, r.Rkey))
}
type RepoInfo struct {
-
Name string
-
Rkey string
-
OwnerDid string
-
OwnerHandle string
-
Description string
-
Knot string
-
Spindle string
-
RepoAt syntax.ATURI
-
IsStarred bool
-
Stats models.RepoStats
-
Roles RolesInRepo
-
Source *models.Repo
-
SourceHandle string
-
Ref string
-
DisableFork bool
-
CurrentDir string
+
Name string
+
Rkey string
+
OwnerDid string
+
OwnerHandle string
+
Description string
+
Website string
+
Topics []string
+
Knot string
+
Spindle string
+
IsStarred bool
+
Stats models.RepoStats
+
Roles RolesInRepo
+
Source *models.Repo
+
Ref string
+
CurrentDir string
}
// each tab on a repo could have some metadata:
+82 -54
appview/pages/templates/fragments/dolly/logo.html
···
{{ define "fragments/dolly/logo" }}
-
<svg
-
version="1.1"
-
id="svg1"
-
class="{{.}}"
-
width="25"
-
height="25"
-
viewBox="0 0 25 25"
-
sodipodi:docname="tangled_dolly_face_only.png"
-
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
-
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
-
xmlns:xlink="http://www.w3.org/1999/xlink"
-
xmlns="http://www.w3.org/2000/svg"
-
xmlns:svg="http://www.w3.org/2000/svg">
-
<title>Dolly</title>
-
<defs
-
id="defs1" />
-
<sodipodi:namedview
-
id="namedview1"
-
pagecolor="#ffffff"
-
bordercolor="#000000"
-
borderopacity="0.25"
-
inkscape:showpageshadow="2"
-
inkscape:pageopacity="0.0"
-
inkscape:pagecheckerboard="true"
-
inkscape:deskcolor="#d5d5d5">
-
<inkscape:page
-
x="0"
-
y="0"
-
width="25"
-
height="25"
-
id="page2"
-
margin="0"
-
bleed="0" />
-
</sodipodi:namedview>
-
<g
-
inkscape:groupmode="layer"
-
inkscape:label="Image"
-
id="g1">
-
<image
-
width="252.48"
-
height="248.96001"
-
preserveAspectRatio="none"
-
xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAxUAAAMKCAYAAADznWlEAAABg2lDQ1BJQ0MgcHJvZmlsZQAAKJF9&#10;kT1Iw0AcxV9TpVoqDmYQcchQneyiIo61CkWoEGqFVh1MLv2CJi1Jiouj4Fpw8GOx6uDirKuDqyAI&#10;foC4C06KLlLi/5JCixgPjvvx7t7j7h0gNCtMt3rigG7YZjqZkLK5VSn0in6EISKGsMKs2pwsp+A7&#10;vu4R4OtdjGf5n/tzDGh5iwEBiTjOaqZNvEE8s2nXOO8Ti6ykaMTnxBMmXZD4keuqx2+ciy4LPFM0&#10;M+l5YpFYKnax2sWsZOrE08RRTTcoX8h6rHHe4qxX6qx9T/7CSN5YWeY6zVEksYglyJCgoo4yKrCp&#10;rzIMUiykaT/h4x9x/TK5VHKVwcixgCp0KK4f/A9+d2sVpia9pEgC6H1xnI8xILQLtBqO833sOK0T&#10;IPgMXBkdf7UJzH6S3uho0SNgcBu4uO5o6h5wuQMMP9UUU3GlIE2hUADez+ibcsDQLRBe83pr7+P0&#10;AchQV6kb4OAQGC9S9rrPu/u6e/v3TLu/H4tGcrDFxPPTAAAABmJLR0QAAAAAAAD5Q7t/AAAACXBI&#10;WXMAAC4jAAAuIwF4pT92AAAAB3RJTUUH6QkPFQ8jl/6e6wAAABl0RVh0Q29tbWVudABDcmVhdGVk&#10;IHdpdGggR0lNUFeBDhcAACAASURBVHic7N3nc5xXmqb564UjCXoripShvKNMqVSSynWXmZ6emd39&#10;NhH7R+6HjdiN6d3emZ7uru6ukqpKXUbeUKIMJZKitwBhzn64z1uZpEiJIAGkea9fRAZIgCCTQCLz&#10;3O95nueAJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmS&#10;JEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmS&#10;JEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmS&#10;JEmSJEmSJEmSJEmSJOkONYO+A5IkjYtSSgNM0Ht9LX036vvbj7fvX+77K/pfl0vTNAVJGgGGCkmS&#10;vkUNCjRNU+qvG2ASmKpvJ+rb9n3T9e0EvTCx3Pfr/o8ttf9M/fVS/bOL9bbU97HlvrcT7d/bNE1/&#10;KJGkgTBUSJI6ry8stIGhDQhTwAwJCu1CfhrYCGwGZoENwCZgS9/7pkgAmKuf0+5QTNbPnagfu9L3&#10;7zYkMCwA54BL9deQYHGlvm+eXkBZrH/P1fpnl+mFj4K7HZLWiaFCktQpNUDcuMMwScLCDFn0tyFh&#10;K7Ctvt1Q/9yG+vsd9WNbgO319zvrxxrgInCaLO6n6t87S4JHA5ytN/o+PkMCwTHgDHCNXrg5A5yq&#10;f+9ivf9X6/tPA5dJ8Gjftrsd1+qt3eX4S8gwcEhaLYYKSdLYakuXqnY3YANZ2LeBoD88bCMBof/X&#10;2+vHZ+kFjvbX01y/s9H+GrKIX7zh327DzK0+PkEW/m0IKH0fu0hCROn7d88DJ+pbyA7HMeAreuHi&#10;PAkdZ4EL9e9u+zkWgCXDhaS7ZaiQJI2VvmbpdvehDQJtydJOYD9wD7CL3i5DGyTaALGJBJAN9Pok&#10;2tsk6/8a2u40tOGkIQGhLX1q6IWIy/XPXwC+Bo4DX5LAcY5e6dU5ElYuk7KqtsfDXQxJK2KokCSN&#10;tBuap2folS5tIWFhL7CHXnnSXmAfsJvrdyA2kgAxQy84jJq2vKkNVldIaDhHSqdOkL6Mufq+L0jg&#10;OFk/3n5sjoSMhaZplpCk72CokCSNnL7diP6m6V0kPOyhFyT2kV2JvVzfAzFLr4ToxtfCcXttbMfW&#10;zpOdjXac7QWye3GclEsdIyVS50j4OFH/zEUSMtpGcJu/JX3DuD1xSpLG1E12JNogcS9wCHgBeIIE&#10;ie314+3kprbXYYLeORFdew3sPy8DeqNr5+ntZiyR4PEZ8A4JHZ+R0HGalFbN1c9bBsukJEXXnlAl&#10;SSOmlNI/inUrCQy7gfuAB+vtAeBx0ifR9kBM3Ozv000tcf342kv0+i2OkHDxFfAB8DHZxbhcP+ea&#10;Z2VIMlRIkoZO3ZWYoje+dScJD48CB+mVNe2rH2snNU0P4v6OsWWyQ3GJlEZ9BLxFgsbx+rGv6fVi&#10;LLhzIXWToUKSNDRKKW1p0yzpgdhHQsQDwHPAs6TcqT03Ygpfy9bLItm9+JTsXHxZf/0JCRhfkzMz&#10;2gP6Ft3BkLrDJ2JJ0sDccJL1NNlx2E9CxEP19gBwgISLvSR0tH0RWl9L9EbPXiFh4hgJGJ+TgPEp&#10;mSp1ht4hfDZ3S2POJ2RJ0kDUXon2JOk2TDwBvAQcJiGiHfnaP+rV167BakfWtk3d8/TKo06Sxu4/&#10;AG/XX5+h9l+4cyGNL5+YJUnrrpQyTQLDPWQn4kGyK/EE8BRpwt4ysDuolWpP527Pv/iEhIr3SbA4&#10;Shq9zwPz7lpI48dQIUlac7XMqb9fYjcZA3uY9Ek8SALGbrJr0ZY4afQsk7Knk/RO8X6LhIyP6/vb&#10;xu5FA4Y0HgwVkqQ10xcmNpEpTfvJLsTDwNPA88Aj5EyJ5oabRld74N4SCQ/tzsV7JFgcITsXZ4Gr&#10;wJKlUdJo80lbkrTq+sLEBjLq9SHge8CL9HYldtWPbcLXo3FWSN/FGTKC9jTZufgDKY9qT/K+gjsX&#10;0sjySVyStOpKKRtJaLiPnC3xPAkUT5Eg0Z507etQdyyRSVDXyOF5H5JQ8RE5VO8jUhp11V0LafT4&#10;ZC5JWhV1mtMMabC+D3iG7E48QfonDpKzJ3zt0TJp2j5F+i7eB34PvElG057DcCGNFJ/YJUl3rO+c&#10;iRmyA7GflDo9B7xMQsUeeqNgfd1Rq9CbGnUKeAf4d64vi7qAo2ilkeCTuyTpjtRAMUWarO8lOxOv&#10;0pvmtJeMjfVsCX2bQkqjLpPm7feBN8jOxUfkpO7L2G8hDbWpQd8BSdLoqYFihkx0epT0S7xCeicO&#10;kKAxhWNh9d3acLqNHIS4k4TUg8C79HovTpZSrjRNszSoOyrp1rxyJEm6bTVMbCS9EfeQ0bAvklKn&#10;J8nuxMaB3UGNg/aci6/ILsUR4DXSb/EJmSI1Z0mUNFwMFZKk79RX6rSVNGE/ScqdHqu/PkR6KtyZ&#10;0GppdyTOkhG0v6+3d0i/xWXst5CGhqFCknRLfY3YG8iI2MfIrsSPSTP2TrIzMY2BQmtjmRyQ9yXw&#10;R+B10sz9MWnwvmpJlDR4hgpJ0k3VQDHN9Y3YPyHlTg+TUqfpgd1Bdc08OTjvE3o7F38CjpIRtDZy&#10;SwNko7akNdN3lfu6d/e/8NezDdo/U/o/3vf5xcXC+qpf+00kTDwCPE3vROwHgFl8DdH62kD6eLYA&#10;+8hAgP0kXLxHGrmv+lwhDYYvCJJWpC424fqwMFFvk/VtU99Ok+eZ6fqxkr+iLJOSBur72ylBi8By&#10;/Xg7w576vkUyz779WPu2v+yhDSXWWN+FUko7iecBUur0A+Dx+vv92IitwZkkj81NpIdnNwkYu4E/&#10;A1+UUi42TbM4uLsodZOhQtI33BAc2sDQkBf0NgRM9n18hrzIz5KridP17eb6vtn68Wv0QkAbCKbq&#10;xydJ3fRC/fhy/fgyKXu4TCbCtMFiAZirn7NE30FapZQ5vhlArgsqXs38prprNE0Oq3uCnDnxH8m5&#10;E1vpPRakQZsiQWKW9PrsI+V4vwc+KqWcIU3c/pxL68RQIQm4rgypf9ehDQdtYNhSb9vq23aHoR0x&#10;urN+bCu5irij/rnZ+nefJiGgrdVvw8gmEgxu9vENwEXgJJkCU+q/t1h/f4IEDurnzdU/ewq4VH9/&#10;pb69Vj9voe58XFdu1WWllEkSAg8CLwH/AXiB7E5swzCh4dJe5NhMTnDfSkLFPaSR+23gy1LKnE3c&#10;0vowVEgdVnckJsnivQ0Ms+SFenP9/VayqNxOLyhsp3dQVbvwn62/33DDrS19gixY252IdrejnRpU&#10;6sfbsoV2R2SKhIHLZMeC+jntCbyX6ue0i97LZLZ9GygukvGTbWCZI2HkNGnuvFJKuVY/d4k0e3aq&#10;fKqUMk0C4ZMkUPyI9E60pU4O9dCwaieT7adXErWX7Fz8O3C0lHK+aZqFW/8VklaDLxTSmOtrdm4D&#10;xBTXL/pnyYLyHvJCvIteeGh3HbbSK2VqdxbaQNH2Taz380lb1gS98izolUq1pU6XyW7GhfqxK8Dx&#10;+r4v6q8vkkBxBThff9+WUC3B+JZLlVJmSLnTU8AvSaB4gjwmZvB1QqOj0Bs9+zbwW+B35ETuk3hg&#10;nrSmfLGQxlANEm0J0wwJABtJMGibG/vDQztJZS+90qY2QGyof0fbQwHXP3cM4/NI6Xvbho/2frY7&#10;FZfJ4uMYCRILZPfiS3KS74n6566SnZB5ej0hy6MeMmq520byPX8K+Cvgb0ig2MJwfl+l71LIz+sZ&#10;4CMSKl4nI2i/AC7ZxC2tDV80pDHRtyPRNk7PksCwh4SG9nYvCRB7SKhodyH6DzDrv91sLOwoaxvE&#10;l0nAmO/72CkSJr4GPiMBoy2TOlE/fp56km/9e0auJ6P2T8ySWvQXyISn75Pyp+2M1/db3bRIdh6P&#10;kbMsXiNN3B8A5yyHklafPRXSCOsLEu2OxGZ6k1AeIVegHyE7EzeWMLU7EG1vQ1c09J77psnXpV1E&#10;byd9HXPkSudFsji5BBwBPidXO4+RwHEGuFxKmWdEDt7q6594GHgF+CkJFnvJ48NAoXEwRX62D5Gd&#10;t930SvreK6Wcbprm2q0/XdJKGSqkEVTDRNsb0TZS7yY7EA/U26PkbIEDJEQ4veebblxAz9TbNrIA&#10;WSaBa5GUBZ2h14vRhotPSNg4U0q5Sp0wNWy1233lTveQg+xeJedPPEMeIz4+NG7aAxzb58B2R3Yr&#10;8FYp5SvSZzH0FwOkUeAVKWkE3GRHoi1tupdciTtQf32QXmnTTvLiabPt6mgXHnOk6fsCKZN6lxy6&#10;9TEplTpNr2djKEqk+sqdHgCeB35MGrIfII+R6UHdN2mdLJOfyY9JA/c/k3KoY8BVx85Kd8+FhjTE&#10;+hqu2/McttNrqn6IHEr2Arn6vIleQ3X/ydb+nK+u/sP0FkiA+Aw4SibMfFJvX9LrwfjLJKn1Dhd1&#10;h2Iz8CBpxv5r4DngPhI03KFQVxSyk3iCNHD/f8BvyM/vJYOFdHdcbEhDqq/EaTuZwf4g2ZV4CLif&#10;3q6EZwkM1iK9A/baxtCvyKjaj4FPyaLlK9LwfXW9SqPqY6htyP4x8F9IQ/ZuUjrnY0Zd1E56e4sE&#10;i38kY2cvDlvZojRK7KmQhki9qjxJr+53H+mNeIHUvt9DFoTbyNXn9nA5F4eDM0Xv4MBCSs8eI83d&#10;X5Hdig9JidSH5JTfc9RRtWu1c1EfS9tIoHiZ3gnZ++lWY750o2nyXDpJLgjMkefQT0op5xw5K90Z&#10;FyLSEKgLwHZayV7SH3E/mdDzNClXeYiUN2k0LJNxtcskXLxDr/fiU7Kj8TW192I1r5DWHortZETs&#10;j0jZ0/NkIbVhtf4dacRdI2WKfyJlUK8D7wGnHTkrrZw7FdKA1NKU9oTrTWQUbHtuwGFS7rSf3jhY&#10;m2lHSzttCfJ93UPG+54g42nfrrejwNlSyhVWoe+ilDJFdigeB34O/C0JpZtxh0LqN01KSGfJYItt&#10;9X1vllJOMyJjoqVhYaiQBqBvvGfbeH0/WXA+Q64oHyIvdG3jddt0rdHSfs8myYJlI5m49Dj5Xr9D&#10;pkcdIbsXx4HzpZT5le5c9PXg7Kh//09IqHiahFIbsqXrNeQ5djcZejFbb9NkV/FkKWXBYCHdHkOF&#10;tE5umOS0g5Q2PUcWfYdIaco+ckV782DupdZQGyQ3ku/vNlLmdpiUYLxHyjCOAMfbvovbCRf1sTVN&#10;FkfPAD8jB9s9TR5rBgrp1tpywSfIrvEW8vP5W+BYKeWawUL6boYKaR307UzsJAvJtvn6ZXJVeTcJ&#10;Gy7+umGC3gnnB0jvw+PkcfEOKYn6EPi4hotr3xEupkj53GHgF6Qp+xBZKFnyJH239mfycXqlgov1&#10;drLuHhospG9hqJDWUA0T0+TK173kyvHzZPH3MFlQbsOfxS5qe2r6DzTcT66WniTlF78h5VEnSymX&#10;uEm4KKVMk0DxDDmDoi152oQlc9JKTZNy1JfJBLdrZAfxZCnF07elb+FCRloDffXt7VjYh0mYeJUs&#10;/vaRheQ07k6oV9vdnoL+KHnMPEom0vwJ+Ag4UUq5TG3mrk3Ze8jZE39F+iiexHNLpLsxSe+wyIl6&#10;+3fS83RtgPdLGmqGCmmV1YXeZrIz8QxZ8D1Hrn4dIDXu/uzpZtqdiw2koXsrecw8AbxBdi+OAudK&#10;KddIedOzwP9CRsceIrtiBlXp7syS4RnT9bYM/K6UctKTt6Wbc2EjrZJa6tRebX6YhImXSe/EIVKO&#10;4mJPt2uanFmylTTxt2eXvAd8AVwkO14/JldUH6Y3wlbS3ZkgAf0hMur5HHAFWC6lnPEcC+mbDBXS&#10;Xeprwm4PrmsDxU/Jycq7sRxFd6YhYfQg2f06SMrovgDOkF6K50mphofaSatvhoT5H5JwAfUcC0/e&#10;lq5nqJDuUN+I2M3AfcBTpEzlyfrrR+mdD2Cg0N2YJGVzW8mu18V6a6c+2ZQtrY123Oxz5OLQNJkI&#10;9edSyrmVnicjjTNDhXTnZsiC7lFS5tS/M7EFy520uvonRW0kjz3I87iPM2ntTJJA/xT5WbsIXAU+&#10;KKVcsMdCCkOFtAJ9h4y1jdiHySFj3ycvOHtwgae11wYMSeuj7bF4hExZu1Lfb7CQKkOFdJtKKZNk&#10;Isg+0rz3FPADEijurx8zUEjS+NpMJq4tk2DfAO/VYGEplDrNUCF9h7o70W5/Pwi8SHYnnqi/30fK&#10;UQwUkjTepslz/sv0hiNcIefIXLnVJ0ldYKiQvkVfudNuEiJ+RMZ3HibNe+0BdjbJSlI3TJHBCU8C&#10;F4BTwOVSyhdN08wP9J5JA2SokG6hjoqdJWM8XwR+BrxEpu/sxCAhSV01SS42PUuCxWkSLE5aBqWu&#10;MlRIN6i7E1MkOLSTnX5U396PZU6SpJQ/3Uv66r4AzgLzpZTzBgt1kaFC6lMDxQy5AvUM8HPgF/R2&#10;JwwUkqTWRuAA8Cq9U7ffLaXMGSzUNYYKqeo7GXsP8DQJFD8nU548c0KSdKMJYBvwAnCJlEGdBb6q&#10;waIM8s5J68lQIfVsITsSz5PeiVfIYXabsX9CknRz7anbT5Ay2XPAEnAcsHFbnWGoUOeVUqbJlaZH&#10;SJD4KdmpuI+MkTVQSJK+zSQpg3qFBIpFYKGUcsKD8dQVhgp1Vi132gDsJSVOLwM/JNvYe8ioWEmS&#10;bsdWslsxTXYoLpCJUBcsg1IXGCrUSTVQbCa7ES+SsydeJidlbyZXnSRJWolN5FDUHwFfkTMsPiyl&#10;XDNYaNwZKtQ5NVBsAR4mT/y/IIfZHajvt9xJknSnNpELVD8j/RXnSX/FwgDvk7TmDBXqlL4dikOk&#10;1OlvgB+QEbIzGCgkSXdngowgfx44CXxKyqDO21+hcWaoUGeUUiZJzesh0kz3MzLl6R78WZAkrY6G&#10;9FXcA3wP+Bw4A3xQSrloGZTGlQspdUINFNvIiNi/An5MriLtx58DSdLqmwQeBf4D8CUphZoDrg3y&#10;TklrxcWUxl49JbudyvFz4H8l4WI7/gxIktZGQ8ptHyQXsT4CzpRSzrpboXHkgkpjrZ5B0R5K9FPg&#10;l+QMih2DvF+SpM7YBjwJPAOcAK562rbGkaFCY6uUMkPOmzhMpjz9BHiO7FpIkrQeNtA7XPUUmQb1&#10;JTkgTxobhgqNnVruNE0mOh0G/hMJFI+SkbETg7t3kqSOmSb9ez8gDdsngIt1GtTyQO+ZtIoMFRor&#10;NVBMkUDxDJnw9FPSQ7GV1Lg6NlaStJ6mgH1kGtRxEizmgSuDvFPSajJUaNxMAXvJE/dfk7KnR0ig&#10;cIdCkjQIbdP2E2S34h3gRO2tcLdCY8FFlsZGKWWKNGA/Sw61+1vgKdIk52NdkjRIk8AucqHrCbJz&#10;sWGg90haRS60NBbqORTtlKefAb8g/RQ7yRO5JEmD1pDXpadJWe7uekFMGnk+kDXySikTXB8ofkmu&#10;BBmaJUnDZhuZRPglKYW6VJu2HTGrkWao0EirjdmbSYhoz6F4BNg0yPslSdItbCQH4r1MGrZPk7Mr&#10;rhksNMq8kquRVXcoZoEHgJfI2NinyVUgJzxJkobRBBlv/jg5u+KZ+ntftzTSDBUaSX2B4hB5Uv4J&#10;eWLegTtwkqThNkFGnz8LvECatqcHeo+ku2So0MipJU8bgAMkTPxn4FXgHnxSliQNvwaYIa9jT5Gy&#10;3c31gpk0knzwahRN0ztE6JekLvU+Uqfq9rEkaRS0PYEPkxLe+4FN9cKZNHIMFRopdfTebjI54+fA&#10;94F7yRUfH8+SpFEyCRwEfgg8SSYZ+lqmkeQDVyOj7yyKx8lJ2T8mT8aWPEmSRlFDegGfJGcrtRfJ&#10;pJFjqNBIqHWmW0jd6avkqs4j+OQrSRptEyRYPA08Cuy0t0KjyAetRsVGMjr2x6SP4jBp1rb2VJI0&#10;6jaQXfiXyAWzjfZWaNQYKjT0SikzZLLT90ioeJpc1fHxK0kaB9PkwtnLZMysvRUaOT5gNdTqFvBO&#10;MnLvVbJDsYc0t0mSNA76S3yfAPbimUsaMYYKDa0aKNpxez8AXsQmNknSeGqAbWRE+n1kvKzrNI0M&#10;H6waSrWWdIZMd/oeCRUPk5Bhnakkadw0pH/wAPAYGZ/udEONDEOFhlV7HsVhUmP6NKkxtexJkjSu&#10;pkioeJ5cSNtiw7ZGhaFCQ6du924lV2peIU+u+/GKjSRpvDWkn+I54Blycc3eCo0EQ4WG0SbgfjJa&#10;7yXgUH2fV2skSeNuE5kE9TS5oLZhsHdHuj2GCg2VUso0GR/7HDng7gnSuCZJUldsIMHifmCrJVAa&#10;BYYKDY0bxsf+iASLXfg4lSR1Szuo5DFq+a/BQsPOxZqGQg0UW4BHSWO242MlSV01DewjPYXtjr1r&#10;Ng01H6AaFu1VmRfJ+NhHsI9CktRNE2Ti4bP15mF4GnqGCg1cKWUS2AE8SQLFU+TJ1MenJKmL2rOa&#10;7iMlUPfjYXgacj44NVC1RnSW7FK8QM6luAfPo5AkdVsbLO4lUxC34WujhpihQoM2RbZ1D5PSp/vJ&#10;iaKWPUmSuq4hZ1U8RF4r7TPU0DJUaGDqNu42es3ZbdmTgUKSpNhGLrgdICVQvkZqKBkqNBD1SXGG&#10;PEm+QELFAbwKI0lSvy0kVDxC+g8tgdJQMlRoUCbJ1ZengVfIboWnhkqSdL3NpPzpJdJ/6GulhpKh&#10;QoMyRU4LfZkccrcVH4+SJN1oivRVPE6atmctgdIwchGnddc3QvZRMipvDz4WJUm6lQ2kUXs/XoTT&#10;kPJBqXVVr65sIePxnsU+CkmSvsskee28l+xazLhboWFjqNB6mwb2Ac+T+tBDeHK2JEnfZRPwIOmr&#10;2Iyvmxoyhgqtm75digfJxKfHcYSsJEm3Y5LsVNwP7MQpUBoyhgqtixoopslp2YeB75H60KlB3i9J&#10;kkbEJNnpf5C8lk5bAqVhYqjQepkgzdlPAT8g87Y3DfQeSZI0OibJYJPHgYexdFhDxlChNdd30N1B&#10;0kfxIjmjwsefJEm3p70490y9bcUSKA0RF3VaDw0wSw7veZyMxLPsSZKklZkk058OArtIWbE0FAwV&#10;Wg8zpP7zCdJg5patJEkr15DX0D31tsG+Cg0LQ4XWVCllgmzRPkK2a+8luxQ+CUqStHIzZHLiHnIo&#10;nms5DQUfiFpr02TK0zPAk+RJ0BpQSZLu3Bby2jqLazkNCR+IWmubyKnZT9a3TnySJOnubCavqdux&#10;R1FDwlChNVNKmSYNZYdIL8Xmgd4hSZLGwyZSTryTlENJA2eo0JqovRTbSC/F8+Swno0DvVOSJI2H&#10;jaT8aQdOgNKQcMtMq6ZOoGjI42ozCRQ/IGdT3ItXUyRJWg0zJFBsx9dWDQlDhe5aDROT5PE0S7Zj&#10;7yeB4qfkbAp3KSRJWh1TJFDsBGZLKRNN0ywP+D6p4wwVumN9YWIjKXXaQw64O0zOpHiM9FN4erYk&#10;Satnioxr308OwTtWSplrmqYM9m6pywwVWrFSyiTZbt1CGrHvBe4juxNP0juPYhuZoe3jTJKk1TNB&#10;bwJUO1p2HjBUaGBc7Ok79fVKtLsSO0iIeKjeHgAOklOz9wP7SOOYB9xJkrQ2ZuidrD0LnBvs3VHX&#10;GSp0SzVMTJAnrk1k52E/KWt6md5Up60kbEyR4NGGEEmStDYmyOvvdvIa7euuBspQoZuqgWKKlDjt&#10;J70RD5OJTo/V236y/doGCUmStD4myGv0DurJ2qWUxr4KDYqhQn/RFyQ2kKsfe0mQeL7eHqA3wm5L&#10;/XOGCUmS1l/bV7GjvnUgigbKUKH+KU5bSD/E/fV2iOxIPEPCxdYB3UVJknS9NlRsr2+ngGvYrK0B&#10;MVR02A07EztIf8Tz5LC6h+mNqmt3JSRJ0nBo6A1PaQ/BuzrQe6ROM1R0UA0TkKCwi0xyehx4gQSK&#10;J0lT9mS9TWCZkyRJw6Qhr+O7yfTFHcCVUsqyfRUaBENFx/SdMbGNlDi9ALxCgsQB8uS0FWszJUka&#10;Zg15Pb+HHDh7hJQ/nSqlzBsstN4MFR1RSpmgtzPxAPAo8DQpd3qWPClND+wOSpKklZogQ1WeJ6VP&#10;20m4OFlKOV/ft9A0zfLg7qK6wpKWDiilTJO+iHtJkHgZeI4cXLe3fmwKHw+SJI2aBXLw3RfARyRU&#10;fAwcrbfTwJX65yyN0ppxETmm+g6umyZ1lo8A3wd+SEqeDpIGLw+rkyRpdJV6uwZcIgHjaxIwfge8&#10;QwLH2frxeQwXWgMuJMdQLXXaSEqdDpBA8T2yQ/F4ff8Mfv8lSRony8AiCRjngE+Bd8nOxcfAh8CX&#10;9WPXgGK40GpxUTlG+kbEbiPjYZ8jdZaPkDMn7if1ln7fJUkab0ukp+IU2aX4DPgT8DbwCXAcaPsu&#10;Fg0XulsuLsdEneq0AdhDDqx7CXiVhIo9ZOfCvglJkrqjPyhcIGHiPdJrcQT4gJRGnSF9F4s2detO&#10;ucAccXV3om3E3k8asX8M/IAcYLe7ftzvtSRJ3dX2XZwjAeIY8Md6a8uiTpO+C3cutGIuNEdYDRQb&#10;yVSnJ4DDpAn7BVL+tIFeI7YkSdIS6b2YI6VRR+rtfVIa9X59/xw2dGsFXGyOqFLKFOmPeAj4Edmd&#10;eITsVuwmYUOSJOlWFoGL9XaMTIr6HfAm6cE4A8xZEqXbYagYMX2H2O0jOxM/AP6K7E5sIzsTkiRJ&#10;KzEPnCA9F38kTd3vAZ+TfgwP0dO3MlSMiFrqNEl6Jw6SBuy/AV4kJ2RvxUAhSZLu3BJwmfRXvAf8&#10;HniDNHZ/TfotDBe6KUPFCOgbFbud9E68QkqeXgTuIaVOEwO7g5IkaVws0zul+yjwZ9Jz8SbptzhJ&#10;xtAu2W+hfoaKEVD7J3aTcqdfAj8lY2N34mQnSZK0+tpdizOk/OlN4DVSFvUJmRQ1766FWlODvgO6&#10;tbpDsZnsRjxBmrF/ATxDyqAkSZLWwiTp1dxKxtHuBQ6Qw3R/R5q6vyylXCQlUe5adJyhYgj19U9s&#10;JtOdvkdKda4xXAAAIABJREFUnl4g4cJAIUmS1kNTb/eSkHEvCRcHSWnUx8DXpZQrTdMsDexeauAs&#10;mxkyNVDMADvI4XWvkOlO36c3KtaGbEmStN7aA/TO0Ou3+C2ZFvUZcB4PzussdyqGSB0Xu5HeuNgf&#10;Ay+R3Yl7sH9CkiQNTkPG2u8l1RR7yM7FflIS9T5wspQyb7DoHkPFkKiBYpachP094IfAq/X3W8nu&#10;hSRJ0qBNkVKojaQkexcJGntIQ/fnpZSLNnF3i6FiCNSSpzZQ/IQ0Yz9Hzp+YHeBdkyRJupUZUl2x&#10;hUykbIPFG8CRUspZbOLuDEPFgPUFigfI2RP/iexS7MLvjyRJGm7tYJlHyXla++rtdTIh6kQpZQ5Y&#10;NlyMNxetA1RLnraRCU+vkh2KF0nKtxlbkiSNggnSa7Ef2EQujN5DwsUfSVP3xVKKTdxjzFAxIDVQ&#10;7ACeJIfZ/Qx4mvwQGigkSdKomSJlUBtIP+geEix+D3wAnCqlXDVYjCdDxTrrO4NiG/A4CRR/Czxf&#10;32egkCRJo2qC9FgcIsGibeLeBbwNfFFKueSZFuPHULGOaqCYJjWHj5Meip+R8bHbMVBIkqTx0DZx&#10;z5JztvaRnYvfkSbuC8CSuxbjw1CxvibJtuAzpH/iVVL+tBMDhSRJGi+TZLfiYXLxdDepypgBPgTO&#10;AgsDu3daVYaKdVJKmSRbf4dJoPhb0qC9Fb8PkiRpPLXncE2TXotNZO3zb8CbpZSTwDV3LEafi9l1&#10;UEqZJlt+zwB/TULFk2QEmyRJ0rhr10IvkN2KXWQd9Afgy1LKnIfljTZDxRorpUyRH5zvA39FDrd7&#10;miR1SZKkrmgnXz5T326ut98Cn5VSrhgsRpehYg3VkqcdJET8FxIqHiSBYmKAd02SJGlQZoD7gL8h&#10;66RZ4J9JsLhssBhNhoo1UgPFdlLm9AtySvYh8oPTDO6eSZIkDVRDgsVe4FngMmnYngGOllIuOHJ2&#10;9Bgq1kA92G4rGRv7E7JDcQgDhSRJUmuSjJr9PgkU24DXgPdLKWcNFqPFULHKaqCYJWVOr5DG7KdI&#10;yDBQSJIk9bRrpi0kVGwiYeOdUsq5pmkWB3nndPsMFauoHm63CThIUvdPgOfwHApJkqRbaUuhXiTr&#10;qHYE7dullDPAgiNnh5+hYpXUQLGRBIq25Ok58kPi11mSJOnWpkgv6nOkumMnmQz1BnCilOLp20PO&#10;xe7qmQbuJadk/2+k8Wh/fb8kSZK+XXsC9+Nk92IOmAeWgdOlFHcshpihYhXUw+32kW27/0CCxV4c&#10;GytJkrRSG0mfxY+Aq/V9b5Jg4enbQ8pQcZfq4XY7yc7EL8gPwE4MFJIkSXdqA/AEUOitV/8EnCbj&#10;ZzVkDBV3oZ5FsRV4FPgx8DJwAEueJEmS7kY7nv8ZEjCmSCnUm3UqlONmh4yh4g7VxuwtwMMkUPwQ&#10;eIhs2Tk6VpIk6e5MkLXWIeAl4BRwjd45Fp68PUQMFXfghklPLwE/JWdRbMdAIUmStJo2A4+RnQpI&#10;2Hi7lHLeYDE8DBV3ZgrYQ7bkfkTGn+3BsygkSZJW2zRZZ71ISqEgAeO9Usplg8VwMFSsUF8fxWOk&#10;h+J7ZPKTfRSSJElroz3H4gVSAnWJjJw9arAYDoaKFahlTxvImLMf1NuD9FKzJEmS1sYk6bF4EjhP&#10;pkBNAJ+UUi4ZLAbLULEy0+RAuxeAV8jhLFtwfKwkSdJ6aEgp1PdJyJis7/uI7F5oQAwVt6mWPe0E&#10;niaTng6TB7WBQpIkaf1sIMNy2kqRq8DlUsqngKduD4ih4jbUsqetwCNkh+JF4F7so5AkSRqEKWAX&#10;OXz4DHCO7FR8DSwO8H51lqHiO/T1UdxHttpeJvOSZwd4tyRJkrquLUt/GbhAAsXVUsoF+yvWn6Hi&#10;u02R6U6HyYP2SWAblj1JkiQNUgNsAu4nVSTHyG7Fh8DFAd6vTjJUfIvaR7GNNGS/DDxPAoZfN0mS&#10;pMFryOF4j5LDiM8Bl0op14Br9lesHxfHt1DLnmbJyNiXSOnTg8AMnpotSZI0LCaAvWS34hRwErgM&#10;fF1KsXF7nRgqbm2aPECfJ83Zj+H4WEmSpGHTkDXtTuA50lsxB/yJhAwbt9eBoeIm+qY9PUzKnp4G&#10;dmOgkCRJGlYbyaTOJRI0loA3Sinnbdxee4aKG9RAsRE4QNLuC2R87Mwg75ckSZK+1QSwg1wMXqY3&#10;YvZD4MoA71cnGCq+aZLMPX6K9FEcwvGxkiRJo2ILKVs/B3wBXCilfNE0zcJg79Z4M1T0KaVMkNFk&#10;7SF33yf1eZODvF+SJElaka1kx+JvSV/FpVLKacug1o6h4nrtmRQvkEBxP5Y9SZIkjZppsqZ7ETgC&#10;nCAH410xWKwNQ0VVdym2kbKnV4EnsOxJkiRpFPX3yP6U7FZcAI6WUuYcM7v6DBU9MyTRPkdOz947&#10;2LsjSZKku7SRrO1OAp8BZ4EFHDO76hyRynW7FA+Sfood2EchSZI06iZIf8Uj5KLxvcDGOu1Tq6jz&#10;OxV9I2TvI3V3z5BQIUkajP6yBF/4Jd2tBrgHeBb4mEyFulZvWiWdDxUkwe7m+hGymwZ5hySpwwq9&#10;0oRJUppqsJB0t3aQC8enydkVToNaZZ0OFX0jZB8Aniejx3bQ8a+LJA3QIjmw6gyZ3rKH7CZbkirp&#10;brRVKS+RsytOk2lQl23aXh1dXzxPkReswyRU3EtexCRJ668A88CXwLskSDxOprdsIc/P47RrUfre&#10;2uMorb2N5LiAHwFfkR2LOWzaXhWdDRV1l2KWnLj4Ehkhuxmf2CV9t1Jvy2SRO8F4LXYHZYFcPfx3&#10;4N/IC/3npN/tEWAXKYcah+fpQv5/banXRsYvNEnDZhLYTi4kfwy8B5wspSy5W3H3OhsquP6gu+eA&#10;/bi9Lun2LANXgctkgTtLSinHYbE7KIV8Tb8A3gBeJ1/fz8koyJdIieoB8vUe5efrZbIjc4b8f78g&#10;YeIZcqHLx5G0dqZJL+2j9XaUhHubtu9SJ0NFnfi0GTgIPElepDYM9E5JGhUFuEIWu0fJ4vYgGUk9&#10;6ovdQVomB1O1Vw8/r78/V29fk8OrnicDNbYzmk3ciyQ8nSAlXm8Ab5MSjJeAvyHhaSs+lqS1MknK&#10;oJ4nZ1dcLaWcbZpmabB3a7R1MlSQlLqLJNQHSK2uJN2Oa2RB2F5Nb8hu5wS9YDFqC91hsECCwxFS&#10;63ylaZrFUsoZsuA+R3YsvgZ+QK7o76G3QzTMX/O2XG4BuEh2Jv4MvAb8DviEBI236q//K3lM7SMX&#10;vIb5/yaNognSR/t94Di1aZvsjuoOdS5U1F2KTWQCwAskWGwd6J2SNCqWyGms7wK/An5d3/c1WfzN&#10;kheqUbyCPkhLJDQcBT4g4eEaQNM0y6WUdmfoEtmtOA68SoZsHCQ7z1Pk6uMwfd37w8Q8eex8QsLo&#10;v5EQcaJ+bJmEjf9Gdmv+K/Az4CHs95PWwjZyceKHZDjE2VLKnLsVd65zoYK84Owk51K8COzFLWZJ&#10;362Qq1gfkUXhG2Shu1g/Nkt2Pb9PDlkal4bitbZMvq5HSIP2e8DF/tnx9ddzpZR2Uku7a/EFKRW6&#10;jzyXb+f68bODChhtmFgkpXKnyH39gOxQ/IH8f880TbPQ93mLpZS2Uf08CSD/kTymduNrlbSaGnKM&#10;wNNkd/RTcoHoyiDv1CjrVKiouxQz5AX/EeylkHR7Fkl9/4dkd+I18gJ0hSyKj5GgMU0Wvd8j9bqz&#10;GCy+TSFfr2NkId0Gtfmb/eFaDnWufs4FeqNnHwEerreDZKEwQ17j1rM0qg0S10hQOkv+bx8B79fb&#10;kXq/L93s0K06geZyKeVNsjNztt5eJeFpZu3/G1JntGvCF8hz0Ed1t8ID8e5Ap0IFeXHZQbaTD2Ht&#10;s6TvtkCm9LxPSlZ+RRayF9tt8lqe8ykp47lIrjK/Sp5nttK959rb1X5t3wR+S3YpzpOgdlN10X21&#10;lPJl/dwjZHrfIXLF8en66z3ka7+JXDxqy6NgdZ73+8+YaMcLz9PbRTlGypg+IKHic+opvsC171q0&#10;NE1TSimfkjrvs/Xz/rr+37wYJq2OhqwFHyDPHW8B50opVw0WK9eZF7q6S7GBXOk5TA5U2obbyZJu&#10;rpCQcIZM5/lHskvxPmnq+8thSXUBeJUEi/bq8nlSE/8YuZjhc831lkkA+wj4DSkLOgEs3s68+KZp&#10;lmqYmyPfo8/JTtKfSK9cu2uxj5QObSe9CTNc33uxkp2M/gCxTB4D82Rn4gp5XBwlofNtEiiOkcfE&#10;Ink8ldudh1//j6fIztil+m/8ov7/NuIumLQaJsh68Fny/P4l8FUp5ZpnV6xMZ0IFedBsJYfcPUtS&#10;qadnS7qZNlCcA94hgeIfyQL4LLBw44tNbSieJ1ep/51cYZ4nV+OfIRPnFO2ZFJ+TsrHf11/Pr+RF&#10;vP7ZJWCplLJASo7aUa27yA7G/fV2oN7uIa8F0yRc9JdJQe8ww/b3y/XfaEPEAgkyV+jtSnxFmse/&#10;rP+PY2RX4jxwV6UUTdMs1AlYf6TX7P0fyVVVL4xJd68hIf0Z8vP7PvnZXqC3I6nb0KVQ0R528jAJ&#10;FNuw9EnSzbXjTd8G/oWUPL0PnGuaZvFWn1QXuQu12fYteiduT5KLGVvweQeyOD9OFsqvkTKhi3e5&#10;+F4CrpRS2p2LL0hp1HZSCrWPhIr95LVgK/l+bKIXKqbJ4mKWhI1FejsEcyQInSf9HOfJzsQxEmTO&#10;kZ2Xi/XPLaxW+UTtJTlLdnPmSHiaI2Nnd2GwkO7WJHmeeIIM8jlKfu49EG8FOhEq+g67u4+Eit24&#10;bSzpm9pJRMdInf+vSa3/B3xHoOhXy1baXY4pUno5Qw7b3Ey3g0U7lvcdEijeIlOQbutr+13qQr7d&#10;VZirV/k/J1/3reSCUntry6GmSKDYRAJFe4jhHAkP7eKi/f0FEjYukjBxZa3rr2tgvVhKeYfsWEzU&#10;+/4sCU5dfkxJq2GSXHRoy6DO1QPx3K24TZ0IFeSJdw95QX+U1DdLUqud2nOe3sjYfyU7FV+RBeRK&#10;Z5e3J0S/S2+87CR5DurquQPtFK33SaB4g3x9bzrtaTX07R61YeAEee1r6PVHTJFA0R6kt0QvRCyQ&#10;71u7m7HYd1sCltZz0dE0zdVSylHgf5LXslny2ubgEenu7SKh4hPqlDbcrbhtYx8q+nYpHiSzvg/V&#10;30sSZFE5R8px3iSB4nek6fdU/djySheOtXl7kd50o/7a/MdI6U2XgsUC+Xq+A/wTCW2fkJ6DNV+U&#10;13+jANdq/wX9/25ttG+4PmyU+n1s3/+XP37j56+zy2T37P+lF3iexj5B6W5toXcg3pukjNJQcZvG&#10;PlSQF4Lt5OrgUySFdumFXNLNtScdX6B3TsJr5GCyT+v7V9Q4fKO6IG0X02/Vd28ki7/H6q+7cHV5&#10;gV4PxT+T4PYhcH4Qp9fe7Hv6bfejL5AMhfq4ukQWPW153XZS4tuF13VprbSToB4mF6PfLKVctATq&#10;9oz1k0+9ujRNGvQepnfKraS1116Zv3GazjBYJLXwJ8jV8nfI7sTbZMv7MqtU1lL/jmt1NOjb9Gr2&#10;p8jQiHE+IK8Nbl+TUa//QHYoPiWBYlX6KLqoPq7OlVL+TMq2tpFzLO7DcyykuzFFem8fIuvGk46X&#10;vT1jHSqqzWTix/306mUlra1r9CbktFd+trD+Jxy3+seBXiElSR+RMPEOKSU5ShqI59eo6XaRTAv6&#10;I7m4sQS8TK6GbSMXQMZp16ItK/uaXFH/J3J44EfU0DawezZezpNdtkJ+tn5OXu98rZPuzAR5vXqE&#10;XJD+nBvOJtLNjXuoaBczB8ghSF2rYZYGoS11+RNZrG+gd4r9XjKBZ4Ze/fpaLqTbBuzLpATpc7Ko&#10;fY+MMf2C3nkCl7nNg9fuRC1ZuUZ2R35LJgedAH5ESjP3kq/VOASLZRLeviTlZL8i/+cjwKVBlDyN&#10;qzpu9hT5Ou8m02t20d1hANLdashF6CeA56kjr0sp6zqUYRSNbaiopU8bydbVfeTJdmz/v9KQWCYL&#10;yX8B/o4sIjeQMp/H6tt7620fvYDRf/jYalkg5wWcJiVO75Mw8SEpvzlNgsQ1EibWdCQo/CVYzJPQ&#10;NU/v8LQTwAtk12Irox0slslO0PskSLxGyr6OYaBYEzVYnCFf792keftFPMldulNT5ELY86Qf7jPy&#10;nO3z17cY50X2BEmaB8lV0r345CqtlUKecE+RReTfkWBxmvwsfkzKjO4hV1LvIwGjPYxsF72zAWZI&#10;EJmmd7I19WM3/gy3JU3zpNRmjowAPFv/7TNk0X6UBIv2pOOLJEyseKrT3ar/3ny9utyeynyq3s8f&#10;AI+TheGoTfIp5P9znLwIv07vjI9TrF1ZmWKB7MT9igTTfWR33tc9aeX6J4c+SkoMz2Go+FbjHira&#10;for7yGQMt4KltTFPzhv4PRlz+WuySF6oV+fnyMLyCFno7CAL5wOkZvV+Eiw20TukrH8yUnvScduL&#10;0AaJeXqnHJ+jFyI+JaVNp+kdUna1/vlF1vlsgZupV5cvkcBzkYSdkyQQPUvC1gYG04Nyu9qpSIvk&#10;//AZOXvi16T87TPSW7Mw6K/3uOv7OTtCwtzD5HXvIKMXUKVhsZ3sWBwkF0wcL/stxjlUtAfeHSQP&#10;Cklro53u83vg/yELyuP0LSTrCdPLXN8o/Tm5iv0GCRS7SA/UDL3Z+xvpnX68sf57F8nC+wq9CVNt&#10;cLjU9/ELZOdiqd4K9dyBtfkyrFzTNMu1HOoEva/L8fr7F0nYmiXhYi1KxO5G2/x+iQTGT0hd/2vk&#10;wL/j5P808ADXFX2jZt8C/hv5mfkZKTeUtHLbSLXL48DHpZTLTq27tbEMFaWUtnP/MXIg0H7cApbW&#10;Qls//2cyLvR16mFBNy4k+2b9LwOLdTF9lewwHCcL5/bk6XYM7XTf+yfplVnNkQVtewX/Wr21Jx2v&#10;W5/E3apfl8VSygVqQyAJFe+TBu5HSLjYy/CUsyyT791J0qPyLilve5eEizOk3MlSgXVWg+pZ0j/0&#10;Z3La9h7crZDuxCZSAvUyeY47XUq55IWSmxvLUEFedHcBz5FgsYPhLR+QRtlFsnj5Z+A3pNzl6u08&#10;4baL6XqbL6Vc5vqf05udb1H6btzq/aP4hF93c9rG8YtkJ+ctUsbyZL0dImVj/RO01ssSaWw/Q0LP&#10;Z6TU5iMSJNpJWpdYwylaui3tMIA3yONnB+lhGoZAKo2SadIL+DzprfiYugM7yDs1rMY1VGygd+Dd&#10;Hsb3/ykNSiGL36MkTPwrWVhevtPF5C1OLe7UE3ffQXlnSLD4kuxYvE12LZ4kW/EH6ZWLzZIXvrUo&#10;j1oioW+O7Ch9Rm9n4n3y/T9DgsQcKXkb+t2hcVd3Ky6SK6u/IYFiP905wV1aLW3D9v1k13gvcKqU&#10;su5DPkbBuC62N9ObKOMJ2tLqWyRXQn9PDjT7ALjik+zqqIvCa6QHpd0d+JQ0P99HGtwfIAHjfmAn&#10;1weMW51ifuPZIG05Wvu2vS2Rq90XSZj4mgSKd8n3+jOyW3Gh3sdlhqxfRSyQ3qIPSOB/kfE5B0Va&#10;b20Z1EHyXDw/2LsznMYuVJRS2uPVHyEPgNnB3iNp7CyTaUt/JmVPfwLOWz+/uvp7UEop7W7BGbI7&#10;sJWUtOwjAePBemvP/thEQsZmUvKyUP/aDfVjG0hwmCMvjldIgGhPQW9LnI6Svolz9KZrXSBBZx7L&#10;nIZW35kon5LdpdPkMTNMzf7SqJgiz7X3k5+jC3jC9jeMXaggOxM7yTd/FzanSavtMlmk/COZ3X3S&#10;aRhrqy7cF4CFUko7JeorcgV6K7mQso9sze8gwWETeT6cIWUv/e8r5EXxEr2+lkskULTjeU+T3ajz&#10;9BrjF6m7GYaJ4Vf7dL4mfU/vkcfHTgwW0kpNkufY++vbrzxh+5vGKlTUU7Rnyfi8B+mNoJS0OhbI&#10;AXKvkVrtz3Fu97qqPQvL9AJGOz3rQzIdajO9Eijqr7fW97fN3ddIqLhS/8wkvala8323K3jGxEhr&#10;mma+lHKETGY7SCYibh7svZJGzgS5ePMEqYT5iDyPukPfZ6xCBfmmbyW1xg/jLoW0mgpZwL5FTsv+&#10;mLtozNbdq1/7JWCplrpcJgGhv2digt5p5O37276Jtqm64fq+ir9M0/L7OxaOkwsBD5BgYaiQVmaC&#10;7PI9BRwGfkcuzBgq+oxrqNhHvvnj9v+TBmWZXJU5Qp5M38Y+iqHSBox6yOBN/8gNv79pWDBEjKXL&#10;pFTuT8Bfk/JgSbevITu995IBGbvJrr079X3GZtFdS5/aULEFJ1xIq6E9BfsyGW/6Gpn4dJxe86+G&#10;yLeEAsNCRzVNs1jHFL9HJkJJWrl2vOxecvF6UynlqmO0e8YmVJBv9kZyLsUuxuv/Jq2nQmrpT5GG&#10;4LNkEtDHpI/iPVL25BOpNDqukiurX5CyjS3YsC2t1BSphLmPTNg7T6+MtPPGaeE9Qb7BB8gZFbP4&#10;hCndjrYuf5HsPlwk5xC8RYLEV2Qxcrz++pxlT9LIWSIXCN4mhyg+jofhSSvVkAl7D5GL2Cdx1/4v&#10;xilUTJJv9AGyNeUhP9J3K2TKz1myM/EVOZvgbRIqviRXNS+Q2tFFdyik0VPPrbhMyhcfJa+T+/F1&#10;Ulqp9oTtg8BnpRQHllRjESpqP8VGUuN2kJQ/uUshfbdrZBfi30iQOEpvV+IMKZlY9BwKaSwskmEL&#10;7wLPkddMXyulldlE1pqHyM/SKTwIDxiTUEGvK38nufKyY7B3Rxp6hZy0+y7wDvArcs7BKXqnJXvA&#10;mTRelkh/1IfkjJmnGJ91gLReNpKdiufJRLVPMVQA4/Nk0pDyp81k+pOlT9LNLZKeidPkROx/IguM&#10;T0ivhE+M0pjqK4H6jCyELGWUVm6alA8eJg3bm0opc16EG59QAQkVsyRMLNXfS4r2MLOzwB+AN8ju&#10;xJvkQLs5XGBIY6+Olz1BdirmSCmHpJWZJuWD+8nF7Au4WzE2oWKKTH7aT8bkSeoppHfiHAkR/xfw&#10;OtmduEB6Jjp/hUXqkPNkp+JzUsphsJBWpj0XbS8puT9eSlnq+mvpuISKGfKNfQDPqJButETKHd4A&#10;/hn4F7KgcGKF1E1XSKB4i7x2zuDuvrQSEySQ7wK2k3Vn50/XHpepD5uAe+htQ43L/0u6W4tkktO/&#10;Av8n8Pfk7AkDhdRRTdMsAF8D75P+qs6XbUh3YIqEij3YywuMweK7lDJBein2km/uhsHeI2loFDIW&#10;9jXgv5Edis8AG8okXSTjo0/jFVbpTjTAbnJRezNjsKa+W+PwBZggQWIHaZqxNlRKoDhPxt39H6SH&#10;4uumaTpf8ykJSJP2MXLg5ZUB3xdpFDXkYrZVMtU49B40pAt/Q30rdd0ymfL0R+DvgN8BJx0XK6nP&#10;AtnJ/BpDhXQnGhImdpMhQZY/DfoOrIIpsu20A7+h0jK9HYq/J2dRHG+axvIGSf36z6y5OuD7Io2q&#10;WbL+3MJ4XKi/KyMdKkop7S7FTnIAyQwp+5C6qJDTsD8A/gH4n8BH5HRsSeq3TMLEWQwV0p3aRNag&#10;u4ANdV3aWSMdKqpJMs7rIL3D76QumidjIv+x3j4ArjZN46F2km7UhopzwCWcACWtVENK73cD95Lz&#10;0jq9WzHSoaI2nM6QpDiBgULdVYCTwG+A/0FGRV4yUEi6hWUy9elyvS3gTr+0UlOk/OkA2a2YGezd&#10;GayRDhV1m2kL2anw4B512VnSR/E/gbeBc055knQr9flhgRyOuYBjZaU7NUvOqthBxwcGjfo2zSTZ&#10;btpL70RDqUsKGQ35HvAr4LfAaXcoJN2G5XpbwPIn6U7NkDXoVjoeKkZ2p6LuUrSTn3aTYGGoUNdc&#10;Az4lgeKfgc/qabmSdDsWSbCQdGfaULEdmOlys/bIhoqq6Xs7iT0V6pYlMg7y30hj9vuOjpW0Am2j&#10;6S4cyy7dqWmyS7Ed2Mjor63v2Lj8xwtZYFlDri65ALxJr4/i4mDvjqQRU8jUuPbCnKSVa0PFDgwV&#10;I2+63hYxVKg75oCjpOzp34GvbcyWtEKFXIzwRG3pzk2RZu1t9e1UV0ugRj1UtI3a23DbVt2xDJwA&#10;3gD+BfjSsidJd6AhfVlzpFlb0spNkKMNtpE+3842a49yqGhIc8wu4B56Z1VI46w9NfttskvxQf29&#10;JK1UQ8o1CgkX7nZKK9euR7eRMqgZOnqhe9QX4VPkmzeN9aDqhnngCPBrUvZ0tmmapcHeJUkjbGN9&#10;606FdOemSahom7U7aZRDRUPuf8H52uqGRXJq9mv19ikuBCTduYYc2rWTjp8ELN2lSa6fANXJnYpR&#10;P9eh3Z1wxrbGXSHTnt4G/gl4q2maSwO9R5JG3QQJFHtIg6mkO9MfKjbR0VAx6jsV0+Sb19mtJnXG&#10;PPAZ6aP4PdmxkKS7sQycI31ZBXsqpDs1QYL5LKN/wf6OjXKomCSH9uypt06mQnXCMnCK9FD8DzLt&#10;yRd/SXdrAfiQXKj4CJu1pTs1QS5wT9Ph9eiop6m2MWYbox2QpG+zTKY8/Xfgg6Zprg74/kgaD4vA&#10;x8DfkzDxc+BhYAu+pkor0YaKGbK2niilNF27ADjqoWKy3jqbCjX2CnAM+Ld685AqSauiaZqlUsoZ&#10;4LfkuWUe+CXwOJ7/JK1EO555MynLn6KDg1RGOVQUcv+XyME90rgpwCWyQ/HfSR+F42MlrZoaLM4D&#10;b5Hnl0KqAJ6gw1NspBVq+3y319sGsjZ1p2KETJInQetANY6ukFrnfwDeBa51bStV0trrCxbv0TvE&#10;ayNwiCyODBbSd2snQG2ho6dqj3KoaOs9l/CcCo2fBeBz0pj9R3LInYFC0pqoweIc8CYp4ZgkpVAP&#10;MdprBWm9NGT6U/vz07kwPpJPFKWU9uC7CZIGPbRH46SQaU+/B/4F+MJTsyWttRosTgOvk0XRxnq7&#10;lxFdL0jrqCG7FG2o6JxRfpJoyP1vj0V3UoXGQQGuAu8D/0jKES4P9B5J6oymaUop5RRp3p4iF+5+&#10;Chz6hBLrAAAgAElEQVRgtNcM0lpr6E0k7eRo2VF+gmi/WRvxJFCNjyVS9vRrMu3pvGVPktZTX7B4&#10;nV6J8c+AB+jgQkm6TQ3pqdhGR3uRRjlULJO682vkCc8SKI26ZeACuUL4K+ALOjiSTtLgNU2zUEo5&#10;CbxBqgH2A/vIYsnKAOmbGlL6tIWsSSfp2Gv4SIaKehVlmYSJK/W2iQ6mQo2VeVL29K9kvONVdykk&#10;DdAC8DXwOxIotgKH8XA86VamSPCeAZquHYA3kqGiWiYhYoEsxqRRtkSas/8V+ANwqmma5cHeJUld&#10;Vi/gXQM+IaduT5Fdi4fJhTxJ15snr+ednP40ylcaGvJNW8ZzKjT6LpKm7NeBo03TGJQlDVy9uDEH&#10;HCUT6d4kF0A6VdYh3YZl4AxwnqxJS5d2KWD0Q0VDvomO29QouwocIdOe3gLODfbuSFJPXRhdAt4B&#10;/m9SDnVmoHdKGk6L9E6m71y1wSiXP7XbStN0tMteY2EROE5Knl4HvmqaxiuAkoZK0zTLdSLUG2QK&#10;1P3ADvL6Kykm6Z1R0aldChjRnYp6+F1720zGdxkqNGqWydW/D0lZwUdk10KShlHbuP2HevuSDl6N&#10;lW6hoTf1qZM/F6O8U1HIVd5pPKdCo2mBvCj/EfgzqVNeHOg9kqRbqLsVF8jz1S4yZvZecl6UpPws&#10;TNMbJtQpI7lTUes7C2nQvorTnzR6lslj90MSKj4G5rvW1CVp5CwBZ8lgibeB09i0LUFCxKZ6mwYm&#10;a2VNZ4xkqKjaqU+XgMt0sHZNI20BOElKCN4FzjZN48ABSUOtXvhYIIdzvknO1rlER8s9pBtMkhKo&#10;KdypGCnLZIfiAhnHaajQqChk5NyfSdPj53ilT9LoWCa7Fe+T57CvsGJAKqQCYW7Qd2RQRjlUtOVP&#10;hgqNmnngUzJC9m3gnGVPkkZF0zSlaZpF4BgZMvEuGTHrbqu6rJCfg7Nkfbrctdf2UQ4VkKsll7H8&#10;SaOjkBGybwB/Iidn+0IsaRSdJxdGXiN9YZcHe3ekgVui16TduZLAUQ4V7fSnKxgqNBoK2Vn7kPRS&#10;fIYjZCWNrnkywe5NcoDnBXwtVrdNkb6KdqBQp4xyqIDUobflT17t1bBreymOkFrks/i4lTSi+k7a&#10;/oSUQH1FBxdSUtV/TkUnX9tHOVQUeo3a50nA8MlMw2yRTHw6QnYp5rpWbylp7CyRA/H+DHxAdl87&#10;V/YhVTNkbd3JM6dGOVRAvmlXSfnTHIYKDa92tvsfSenTcTp6JUPSWGl3K94hz2/HSJOqr8fqogl6&#10;5fmOlB0V9QrvMlmYLZBQ4dURDat58mL7B+AjPOhOY6qUMlFKmay3zr2odk19Hmt3Yf8A/AY4hRdN&#10;1D3L5LHfHgjZudf4qUHfgVXQHoJnqNCwWiS7FO+Q8oAzdPDJRuOpBocpsu2/id72f8mHS0Ovxrgd&#10;rLFEB8ctjqv6fZwrpXxEQsVjwE7GY40h3a5l8vp+hjpSdrB3Z/2Nww/8Anmhso5Tw2qelDv9mexW&#10;2EuhkVZKmSBBYZoEiW3ALmAPsKV+rOn7MxvIhZ8TZFrQReBiKeUysOBY5bHxNZkE9RHwMLB5sHdH&#10;WlcNeb2/ijsVI6k9vfAcToDS8LpKwsQH5FwKT8/WyCqlTAOz5Er0LmA/cC9wsL7dQXYrpugFio3k&#10;4s+X9fY5OdfgM+DrUspF8iLs7sUIa5rmSinlc+At4AUSMkd9nSGtRHtBZRlDxcgp5OrXeQwVGk7t&#10;ZJT3gKOkoVEaSaWUSWA38ARwGHiIhIq9JGBsJ4Fjmry4trcJUg7wJAkXJ8nV7HfJuS2fkuDdnkSr&#10;0XWO9FY8Qx4rBwd7d6R11emRsqMeKqAXKi6QK13SsCikfvwoKX36imyNSiOlljttITsRzwCvAC8C&#10;D9T3byQ7Em2YmOCbk09mSZlUqX/PfcCj5OfjCPkZebuU8gVwxR2LkTVPzq34IymB2kUeHzbta5y1&#10;xxxcJK/7nRwpO+qhopCrWhdJsGjH2PnkpWFQSMPWR2Sn4nzTNPb9DIG6SG5vDb3TT5fp681yYfuX&#10;r9V24BHg5Xp7HjhU33+7z7dtjwXAVtKLsbv+PU/XtzuAX5OgMXf3914DsEim37xJGrYPkgA5M8g7&#10;Ja2xNlScIjuu9lSMqAWSCi/Qa9ae/NbPkNZeIY/Nr0gvxRd09MrFMKgTiNrm4nZK0Sy5ut5OKmoP&#10;LLpWb1dKKdfo1fp3KhD2TW3aRnYUfgz8EniOlDtNc3cXcKbILscsqb3fTa5ozwNXSynHgWsGu9HS&#10;NM1ybcB/H3iDhNHd5Ps9smPspdt0kZQ5W/40apqmKaWURVKje6G+XcRQocFrS5/eAv5E6ow7tSgd&#10;Bn1hYgNZHO8kC+IHSFnGlvqxGXLVfTO5OPExqfc/Tq48nakLpaUOLXLbHYqngB8BPyU7FHvJ12w1&#10;tMFlgvRmvEiexxeA3wJfllIWOvQ1HxeLpJesbcY/TH62DBUaVw250LJERw++gxEPFdUy1zdrL7B6&#10;L3jSnVomi6MjpJTDhdE6K6VMkRDxAPBgvR2kN61oG7ky3vYBtH0B10iQOEF2mN4jZ4x8DBwvpVwa&#10;9wlefT0UjwM/J4HiCVY3UPRr6t97EHiV3k7fNdLU7S7fCKnPdfOllFOkAf88ToLSeGvP67lZP1ln&#10;jMMP+DLZLm/Hyo71i71GxhxZiL6Ph92tuxoo9gEvAT8jtd0HSBnGVlJy0x7S9o1PJ8HjMKmNPUx2&#10;Ld6vtw9qM/HFMT5fYSOpg/8h8Nek5Gkba18Xv4n0VkyS5/SzpBTqwhh/rcfZBTI++Bj5eZzG3QqN&#10;p2Wyy32NXATpZGXCOIQKSKg4T69ZWxqkJXKl+5/IVe55dynWR73Cvgm4n5TS/Cfgb0ip0xS9K0jf&#10;diWpv6F4N7li/wDwfbJb8TppJn6nlHJ23HYt6tdwJwlTP61vd7M+i8GGXrB4hewWnSNB7qo/RyPn&#10;EhkX/C7ZHZytN2nctCV/p0mVQpdKZf9i5ENF7atYIFdEDBUaBhdJmHiXHHZn6cb6mSVnIfzvwC9I&#10;k2h7wvOdaMty9pKF9kESMA4Cfwf8vvz/7L1nkxxXmqX5eKRGJpDQBAGQoGZRgaIUS4vu6p6e6e6x&#10;td39m/tlx2xnp7tna0qwWCSLWpNQhNbIBFJnxt0P5156IAmVQEb4dffzmIUFFAlHhPu997zivCFc&#10;algUfRRldb6P+im2M/jo8gjKLv0MCfSLlBFAUx/mUQnhp6h8bj8WFaaZrKFz6MX43qQ94Z6pvaiI&#10;LKNo1kXUHGtMVSyjxsS/oI30WrWX0w5idH0rOgSn7MSzqIznQUmZi+QcNYS+5xlU5vZxw4TFbuAl&#10;VDq2j2p61AqUXXoeRbrfR2WEFhX1IvUnfYVKoJ5H36sxTaRAZ9DWztlpiqhYQbW3Z9FGv0pz/m2m&#10;XsyjwU9/A846S9F/oqDYBbyCshO/QkO3NkNQrKdAkdbHgZ9T1s1+GEK4XGfb2eiUNYGas3+ERNlU&#10;hZc0jDJETyBxcxTVLJuaEO1lb6BsxVmUxV3DDo2meSQn0jla3NvblIP3Kko3nUXRrGW0aLW2A99U&#10;QhdF5b5EA+88PbvPxIPwJJry/C/A39P/EothFG19Mf44bSYLIYT5GguLJM5eoJxFUfXhL5We7QMm&#10;QwgzbY0A1phlFPRLZSErtNwhxzSOgLLW54iB7RBC0ca1qhGiIvZVLCFBcQVFsyaqvSrTMtJcii9R&#10;qcZ5XKoxCFL9/28oXZ7SpOx+MoRmOLyEvveLSFCeCiEs1nQzGUHNtE+hz7RqQQG6pu3ourYDl0II&#10;HohXI3r252uUfY+2fTdNokBBxJPEadptXaOaZO22hpwmruG+CjN4VlGU4h0kKuZqHLGuBTFLMYXK&#10;dH6GmrIHObW3g7IkT6JyodcobTNrRU9PyjPo89xNHpHkIfQdH0DX1I+SNtN/ltGB6xu0R7fywGUa&#10;z3UU1G7t3t8kUdFFouIySrF60TKDoovuuQ+Ad1H9sLMU/aeDypAOI0emKrKTBfAQ6uf4ITr81jEK&#10;O4wyAa8gkTRBHqICJCT2otKscfK5LnPvrCJR8RXK4np/Nk0izaiYR+V9FhUNoItU4jmUfvKiZQbF&#10;MnI2eQsNR2ut88OAmUIH4FdQKVJVn/kWNCjuWSQq6miZOYr6Fp5BB/ic9oZhlEWZQlkgP1v1o4sm&#10;o3+JxEVrD12mkSyigHZyBGzt/Z3TxvGgdFHZ0zn05XrjMYPiBhITHwHn7PjUf2Lp0wFUdvQSEhVV&#10;rmejqDxnP7AthJBDP8JGmEQuS3vJL9PSQRmKEaCVzY91J5aC3kDOeCdoqYe/aSwLSDRfQKXPrV2j&#10;miQqAooYp2ZtH+zMIFhD99snqF7Y/Tx9JgqKEVTy9BLVzVLoZQQJm32ojGi02su5d2I/xU7k+rSH&#10;/HpCOuj77Z2IbupHFxkanMVDak1zSEPvziJR0WrXxyaJCijnVVym5SkoMzCuocnZ7wLni6JorT/1&#10;AEmD0Q6hsqMcmneHgG0oe7If2BoP63VgHImJR1CZUW7XPYQyKWNAJ4pKUzNi9PY6ckmbw9kK0wzW&#10;0LnzJLq3W30GyG3zuG/igrWGDnmXUKq1tSkoMxDS9Ox3UKZittrLaQ3D6OCek/VpOvjuQ/an08Bw&#10;TQ7Ak5SN0FVnfG5FcoAawfMN6s4iOnx9iDK8Fham7iQ7+Qvo3Nnqe7oxoiKyRtmsfRYtYMb0g1T2&#10;9CnKUpx1lqL/xOj/BIqqP4MO8TmsYwUqedqBsihT5CF27kgUPVvR57iL/EqfQJ9tatDuYlFRW2K/&#10;2RHgv6MJ6d6jTd3poHL7lH1rdTA7h81404jZikXgOPAesq5z7abZbFL/zgkkKD6l5SnPAVIgUfE4&#10;yggMkc8hcwg5P31bqlPt5dyZKCiSEDqAshU59oKsogz0dfTcuay13qQetOO4B83Um4B6KG4gO9nW&#10;r01Zb3r3yQoSE58BZ2h504zpG2so0vYF6uFp/WIyIEZQ/f/TKLqei6AAXctwfEE90uBpMvhDSAzl&#10;uCcsomftHLDQZmeVhjCPBMWXyILTmDpzBVnKX0bnzVavTzluIA9Kapo5hr7ohWovxzSQNdS38wXa&#10;HBc9PXtgjKOypyeRy1JuomIEZSmGqIfQ7KDPcQ/5uistoQDRRbye155YJnoZlUFZVJg606UUFZeA&#10;5bYHPRonKuIXOod6Kk6itJQxm8kiirJ9jHspBs0W4HuoQTu36cqpnGhLfO9mvsGkUrJdaMbGEPlF&#10;2QKKbKfBUi5nbQaLyOTiatUXYsx9kkqfLqEs6jU8yqB5oiKyjDrxj6Iv2pjN5DpyL/kC318DIw6U&#10;m0a9FDvIr6m4N1MxfJc/mwMFEhT7UXP5CHmJNNDh8wKKBM46I9gYVlGZ8hnkmufv1dSNLrp3T8fX&#10;DXwfN1ZUJBeo0ygS4kiy2SzmUYP2x8AlH3IGyhQaePcIqv/PkQJtLAEYytxStkCzNXagrE+O+0ES&#10;FUdx1rlJrKLv9W20lra+Ft3Uki4KLF7F/btAnpvIAxMPemkzuojKobxgmQeli+6pD5ERgOdSDJYJ&#10;lKV4FJUY5UZyn7tMPe6NDhJq0+jzzE0ApdKnc6iU1f0UzSEgkXgclUHN4z3a1It0v95AQezcy10H&#10;QiNFRSQ1gx1FGQtHQsyDsgKcAv6GshU+5AyW3vr/HIe0rSExcQ5FrtZy3WRiBmUcZSl2ocxPbqKi&#10;iz7HVHvf+nrlphCfi1XU5HqeeCir9KKM2Rir6IyZ7t8s1/pB02RRkRas94GP0KbkRcvcLwGJiOMo&#10;UzGb64GxiYQQRlBEfQ95zlKA0nnuMvlPVh1CQ+/SfIox8hMVq8hw4ytcWtBEuqiS4Bv0zOT8vBiz&#10;niU0o+oYFsXf0mRRkVLn36Av3bMEzIOwhkowPkH3lF1oBsswaih+AUXYc2QFHZKuAkuZi84CCYkD&#10;KPOTG6k85hTKCs67f6lxpD36KnL28vdr6kSaiXYJ2crnvN4PjMaKivgFr6Aa+BPx3ZEQcz90USTi&#10;beCvOOs1UGKpzhga0PYoeWYqUunTGbTRzFd7OXdlCPVT7I7vuRFQA+QF9Lm69KmZLKGKgkt4fzb1&#10;YRGV1R9HAWsHGSONFRWRNRQBOYluAG9M5n5YAr5GguJz8o9CN43kUrQLNRQPVXs5t2SVcujmBfIv&#10;1xkHDqLSp9zmfUBZvnoJN/E2mUUkwk/hg5mpD4sogHQCuFYUhQVxpOmiArQhnUGK0j7CZqOkiZlv&#10;otKnGZdhDJw0T2E3+a5ZKyiyfgxtMrkfgseAp1H2J7d+ioAOmKmJ16UxDSQ+I0vI2OAodmk09WGB&#10;cuhd7lnpgZLrBr0p9JRAXQSOoGyFo15mI8yhDe9d7Pg0cGLp0wgSFQ+hjEWO61Y6BJ9C90zuDKHP&#10;cpL8hggmS/DLaO2+gdfsprKGxPhpJB5dTWByJwnhL6hHVnqg5Lg5byoxLTWDylfeQW4iHoZn7pW0&#10;gBynHhHoprHe+nSK/NatgATFF/E96/UlhDCMnLTSfIocy8lST8UM9n9vLPF7vYGqCS7gEiiTN2so&#10;2PEFqly4hrOoN5Hb5twvFpBjzztEJ5FqL8fUhBW02b2P+nIWq72cVlKgxuyp+Bohr1KdxGVkL1iH&#10;Up1xZM37EJr9kRtdtEafR5u2aTBFUaR+pPN4jTV5E1DZ01eo1NWudOtohaiI2YqrwJdIVFyv9opM&#10;DUiR0o+APwLn4+ZnBk9vqU6OEes0aPMU2mRyvMZehtHnuY88RcUqKns6gyOBbWERPT+z5PmMGxNQ&#10;puISulcvkXlWugpaISoiy6iM5RgqUfDCZW5H6sU5iUTFEdxLURUFOgTvQeVPuRFQwOIc6qWowwE4&#10;9ansIE973jT07hI6bHqtbj7zyFnvBPXoSTLto0vZ/3MWnQns+rSONomKlK34CtXHOyJibkcXRZ4/&#10;RqLC0zKrIx2At5JnPwWoFvwoEhZZR65i4/skEmlTSLDlxhoK/FzD9s1tYQGJik+RmDQmN9IE+GPE&#10;rDQ+Q36HHDfoftFFEZCvgQ9QRCTrA4CpjGQjewT14vhgUx0dVKozgQRGTv0UKaN1FUWurlCPyNU0&#10;8Aj5WclCOWX5W1FR7eWYAbGCnqHjKKDjII7JjWUUQEqlT8s+F3yX1oiK+OUvoxviQ9S979S6WU+g&#10;rO89jhYP91JUxxDl0LscD8BLSFRcBRZz32Ti9U2ifoqxii/nVqR5H+eAWQ+Vag1d5AJ1DomKVbw3&#10;m3xI5hFnUa+XK11uQ2tERSTZyx5DUWiXtZj1rFKWPn2BDotePKojZSq2k2dT8RJyrblY9YXcC7H8&#10;aQwJizHy2wNWUJbiG2yo0SbSoe0cepbsAmVyYpVyuOlZYCH3AFJV5Lah9JWeYXgXUPnTFRyFNjez&#10;jBaNz1BDllOc1dNBPRWT5JWtSG4g59E9k/V90tNPsQ3ZyuZoz5syhWex9Xdr6Nmbr6PAn+vVTU4s&#10;o6qF47h64Y60SlQARE/hWaQ4P6dMtRqTBjF9gxpvPeyueobRwbdDngfgJXTPLFKPrOcW5Pq0jTyH&#10;3i2gNXkW97y1jS4SFen7r8PzZJpPGniXnCBnPJvi9rROVEQWUKbir6hx2xZ2BsrF4wNkJ2sb2QoJ&#10;IXSQ5ekkeR6A0yGoTs3EyUlrO3l+ptfRszePD5VtIwX8zqMqAn//JgduoCDj31DA0eeCO9BWUbGK&#10;Fq53kfq8Uu3lmExYQSVPbwEX3CSaBaPAXtRPkdshI/mWp/kUuWe1UqZnDNnJ5pb5AYmKNKsg98/T&#10;bD7zqLzEDlAmFy6jqpYv0VnRGdQ70EpREUta5lB93CdIYHgDazdpgvZxHI3IhfRM7kA9ALkdMgI6&#10;BF+jHvX/aebHeHzlKCoWiDX1FvWtI/VVXMOZCpMHAa1H3yDXJzdo34VWiorIKlq4PkXZilPUw2Pe&#10;9IdVJDB/j9xH3GdTMXHxDuggnHorcqODBMWNGmw2BSp92kaen+c8Kn9ZwM9fW1lF98BVfA+Yaumi&#10;rNmXlFkK35N3oc2iIqDN6xgqd/k0/jz3g4HZfLqo7Okd4D1grgYHxMYTQhiitD0dIr8egDV076zW&#10;KKq+FQ2/y63xPa3HM6iGuS6fp9lcVpG4TL1KXodNFSQXumNortnXaF1y9uwutFZUxEPjKlKiH6Js&#10;xQzezNpGKoX7AHgf2Q37HsiDYVT7P0x+a1UapjkLLEe71twJqOxpgvw+T9B6nMrJvHm3jLgnryEx&#10;MR9fXotNFaRhjF8je/mz2F7+nshxYxkY0RZsEbmNfIFKoDxlu12k5uz3ULbqmu3ismEIHYJzLNVJ&#10;ouIsEqW5ZVFuReqpGEMN8Dl+potIVPgw2U7SELxrOMhnqiPdh8eRccQMPhfeE60WFfCtsJhB3f1/&#10;RcKiThaR5sFIkzKPY8enHOlQCouc6MTXFWpgSR3tebcge94tSFjkJCq6qPzpOh581lpiJHiB0gFq&#10;udorMi1lHs2k+AJVL6w6S3FvtF5URBbRofJN4GPsPNEWkqD8AtVOzlZ7OWYdAX1HW1CEPTdSpnOJ&#10;/COqBcpOJOen3KZp9zpprWJR0WaW0H1wCdt3msGzDJxD5dBf4yzFhsgt+lcJRVF0QwiXkaA4AOxB&#10;B5ntlV6Y6TddJCDfRZZxi9VejllHQAffKXQgzok1yina3ZpEsQrytZMtUMbnetUXYipnFdWzX8ai&#10;wgyeG5QN2qexjeyGsKgoWaIcfPYQsBu5pOS2+ZrNI01E/gK46oUjOzqUcxVyW6uSS00SPnUgoGBJ&#10;joMEQRHCJeoxSND0jzUkMF3+ZKpgBpU+HUHnAtvIbgCXP0Vib8V1dCN9iJq3V/Dm1mQuI0FxEffR&#10;5MgIylKkMqic6KIDT+5lT70UaEbFVvISQsmJbxHV09sso9100Xo8h0WFGRypn+c0OhecxdULG8ai&#10;4mZW0dCdI2jYyXksLJrMZWQX59rdPBmhPADn+Ax20X1TB2GReipSpiKntT+gQ+QCyv4s2oGt1aSm&#10;/TSp3veCGQRraGr2Byiw7HPBfZDTxlI5sfxlGSnVdGPN4EWtiXTRpnUKmLXrU5YkS9ncmopB11Og&#10;w08dNp4CCYpkJZvb2r+KhMUSnlrbdpKouIT2X2crTL9Jcyk+A94GvgLmXRK9cXKrU86BNVQO8z7q&#10;rdiLDjZT5HewMfdHGnh3ETVqe9PKmxHymwORRMU8sBJCKDLfgDoo65Nbw3siZStWyE/wmAFSFEUI&#10;ISyitfkKKkEZr/aqTMNZQiXv7wGfoIoVBxrvAy/e6+jxyT6Bmrb/Gn+8UOV1mU0loNKnI8jloQ6R&#10;5jbSRVHrHCdqJ1aAlcwFBZTOTylbkSNzKFpY1GRCuekfyQHqCt57TX9JZe8fowqV08BSDdb0LHGm&#10;4hbESMksSoH9BWUsdqIN2dSfFGWeRX7UjkhkRjxUFkhYDJGfqEjN48Pkl0W5HV009C7XqO88OkgG&#10;b+itJ000vhzfjekXyQHyLUonSJe83ye5bdTZUBTFClrQPgE+QlMVTTNYQ4eXWdwUmjM5R6tTk/YY&#10;eQ7mW08HfZ5j5JepCOiZXKY+je+mvyQHqBnswGP6SxIVH6JGbTtBPgAWFXdmEdmKfYTcoK7ipu26&#10;E9DB5Qgqa8OlFvnRE6keIs+5BUlU5JhFuYl4fw8j16dR8rzeZfSZ5ur0ZQZLMk2ZxeVPpn+soN7K&#10;r1BPxQ1nSR+MHDeXnOiiRe1j4P9DrgBzeNOrO12UgfqEetTDt5UOiqyvkWf0OlCDtSDe3x3KyeS5&#10;XXMqdVnFe5IRyQHqAtqDHcwzm00X9U+8i4x5LuL+ygfGPRV3IPZWLKNsxTvAw8ATqNxhjLzLM8yt&#10;6SJheB1lorxZ5UuyPl0lv++pIE+r29sRKNes3D7L3jkVdmIzoHt0EdnKXkOHvWSHbMyDkoYdf4Z6&#10;KY5gC9lNwVGhuxDr7ReQS9CbwBuo7s6Ktp6sAEdRg7ZrJ/MmlRjlKCrSDI1hoFODErpAnta8iWTP&#10;60yw6Z0ZNYv6KlwCZTaLgATrMeBvqLzdg+42CYuKeyAKixlULvM/UDnUVbz51ZEVVDt5CtdP5s4q&#10;cQ4E+T1raUJ1h7IJOmdSX8VYfM/tejtoo7fTj0msIZF5jegKVu3lmIawikx43kMVKCeQYYvvr03A&#10;5U/3SFEUqyGENBTvMWAPanycJN/on/kuaeFYxNGv3FlBh4rUwJsTqTQrWcvmThI/Y/GVE13s/GS+&#10;S6oSuIpKVXzoMw9KGnx7FPXIfo4NeDYVi4qNsYQae/6ANuZV4HlgOxYWdSA5P60Bq45MZE/KVORI&#10;OqQPAUXOtsSxNCv1f+ToVtVFkehl8hOPplrW0Bowj0WFeTBS79YZ1Jz9MXAeWPZZYPOwqNgARVF0&#10;Qwg3kLodoYz8vYSyFiZv0qKSGv9M3iSv+lVKp6WcDp0d9Nznvo520Ho1Tp7Bj4BERbLoNQbKINB1&#10;dH8Y8yB0kZvYhyhLcRyYs6DYXHLfDLMjlkFdBT5Fm/RuYC+wn3q5wbSRVKN7GpgLIXRyjjAboCyN&#10;ya0sJgmcSfIbJncrhoBpdK1d8stWLKLv2OunSaRm7RlsK2sejIDuoS+Av6Lm7MtxyLHZRCwq7oMo&#10;LC5T3qB70We5D3+mObOKFpZTKPpl8qaDnqdUspbjgXML+U/UTsMDt6PMam6RuTRR2z0VZj1LKEsx&#10;h9bvOgh4kxfJ7Sn1UbyDzFo8qb0P+AB8/6yierx3gG3Azvi+lTwPP0ZRrytIUCyT3+HK3EwRXzlm&#10;KkDR/zSlOlvivJ01tN4Pkd/61KWndt7lCCYSKO2k049zK4E0+bNC2UfxFvA1Knty5qsP5JYCrw1x&#10;45sHvkHq9010s9r6Ll9Sjf4a0PXhJXuSs9ICOlDkRG+fwmgIIdu1NF7bGGWTdk6HsnRYLIA1lyOY&#10;daygoMJSfPkgaDbCGgokfoTOaJ8CV4qiyG0/aQzOVDwAPY3bX6OheNtRpuIx8i+JaCuppCangwZ6&#10;K7AAACAASURBVJW5NenAmYRgTiRHpSHKDECuB54R1PuRBEVO934qfVoFuiGEwmLfwE0Ztt5Bkznd&#10;uyZvAqpK+BqVqX8AnCmKYrnSq2o4FhUPSFEUa7Fx+yNgB2rc3oLmWLhxOy/SALDk3GXyJh04U/lD&#10;TiRxCjr0jIQQcrUpToey3LIUiWQpu4qu1VFEkwjo3t1OKYyNuRvJ6fEUmpqd3J5ytShvDBYVm0Pq&#10;r3ibclP8EXCAzOutW8RNEdGKr8XcG8n5KUdRAaVATVOqcyaJoNwOZUnoXEONkzmKHlMtnrRuNsoa&#10;pX3sm8hUZzbToE+jyH0jrAUxTbuIHAWgbDz8GfAo3ihzINXmX0NR0dzKacx3WUPf2Up85WaFOoSy&#10;khPkPV8hrUej5HmdQ+h7XiJP8WiqZQbVxfveMPdCF7k8fkaPfSzOgA4Ei4pNIvZXzKHG7YA28GnU&#10;Y7GdvA5DbSVFvheBNddvZ88a+q7SALw18nqO0pyKXIfKJVLD+xR5Zk5X48uWsmY9HbQG3MAZZnNv&#10;LFDax76LSqCWvNcPBouKTSQKi15HqB0okvl8/LF7LKojlX8MUdbpm4yJGcAkAlMZVE50kKiYIC+x&#10;cyvSteYmKlZRaUsXCN74TSKEUKD7Nk3WdqTZ3I0l4CylfewR4IbtYweHRcUmE4XFArqZ/ydaDBeB&#10;lymbt83gSRtUgQ4wtpStB2kKeo5zRQo0myZlAAryu0YoG7VzDGikEjdnKMytSPdsb39V7gLeVMMa&#10;cAm5PL2Byp+uFEXhtWWAWFT0gR6r2a/iL3VQxmILKoUy1TCMorVT+N6vCwHVx6Zodk6k8qdtlO5K&#10;uV0jqJE8lWDmJnpsoGBuScxUdtG+OYnuk9zuX5MHAfXdvA/8OxpKfA4JUTNAfLDqE1FYzKKMReqv&#10;GEGlUNPkGTVsOsNog9pGfmUgZh2x/KFAgmKR0gUql2enQAf2LURRkWmfzggSFUn45ETv1HSLCrOe&#10;lJlI90huz5apnjSI+Avgz6js6SQwn+Fa3HgsKvpIFBYzwJdoQwdFW15AwiLn5s4mkspAcm+sNTez&#10;QJ6HzjQAb4LSASrHidBp+vcE+a35vcJxJVNRZqojzRuYQ+tAbmuAqZ4l1Jj9BvAX4Bjuo6iM3DaY&#10;xhGH411DtmZrlIeOV5AzlBkcHUpbzVyi3ebOdCkzFan8IZfvrkD301bKkroc+yq6aN1JRgU5kZyp&#10;5tDhwJj1zCNb2UXye7ZMdaQ+m3PAH4HfA58C1y0oqsOiYjCsAlfRDd9BB5DdwGPk20DZRIbRZ5+j&#10;C465NavA9fjKbY5BmtC+DQmL3A7sqYRsiHwnySdnH5snmFuRjDWS3bDvDwO6D26gMqf3gP8APkZz&#10;qNyYXSEWFQMgbpQrIYQrSFikw8cvgWdQKVRum30TST0VY8BICGHIzhD5Ehs1kztQimTndqjoUJbU&#10;5Ugq0RojQ9FD6f6U3NmMAW4SxMkAIYlP024CylB8g3oo/oAatC8CKw5MVItFxQApimI1hHAJPQBp&#10;oM8cat7ehe1m+80QqiufouyrsKjImzW0gcyhZyYnUj/AODq0p0bynAiUwif9PCfSnArPIDC3okNp&#10;hjCGhafRWnEOZSj+F5qafRYPuMsCi4oBE4XFFdRjsUxZL/4iKonK0aGlSYwBO1G5ygi2nNswIYQ0&#10;SHCN/pespEbN6yiinRvpwD5Gfgf2lO1JvR85ip4uWv/myLPJ3VRLgQJBO+PLZ5Z2s0ppHftGfD8L&#10;LFpQ5IEf0AqIzdvXkSvUGhIWV4FXgUdRJN30hxEk3nYAoyEE285tgCgoptEGfx01UPazwbaLxMRV&#10;lNnLjRRJzTmKmkRFjnMqUj/ZLPllokyF9JQ/JoONZIRg2st14EPUQ/EmcApnKLLCoqIiorCYQQPy&#10;FtHDkgb9WFT0jyEkKJJvv9kY24AfAz9Bi/tfgDN9/jtTs/YN8jsUJyGRmrZzdH9KwidHg4JUN7+M&#10;MxXmu6TBiKvo/sjt2TKD4waaRfFvqJfiKDBnp6e8sKiokJ4BecfQpjpM+Z3sJ78DQBMYQlH2XSit&#10;bu6BEMIE8AjwfeAfULleQI4bff2r0YHiRnytkk/EMh2Iu/GVy3WtJ8TXGPldX/rsIL9rM9WT7gmL&#10;ivayilydvkI9FH9EQ4WvO0ORHxYVFRNTvPPIGu1PlPXFvwAOoMxFrmUVdSRlKvbF95M4QnpLetxX&#10;poBDwG+Af0RleksMruQniYpUIpPT4bhDGU3N9TktyLcReg3dS2mGRq7XaQZMXH/GUPBnnHxFu+kf&#10;qYfiQ3Q++iMqG79hQZEnFhUZEIXFIrE+EB2eZoGfA0+hkhMPbNscUk/Aoygb9FUIYc0p1JuJG3rq&#10;P3kWlTv9GjiMmtwvouhRvxvdU6biOmVfxRR5HeADOhxn5yQWQhimtJPNcRNOcypG0H7kAXgmkZq0&#10;dwMPYev1NpGyq9eBT1APxR9Q+dOsreDzxaIiE3qExXlKm8VLaJbFYWAPjtRsBh2U/XkYOIg2qkXs&#10;AvUtPW5Be4HXkJh4HXgClY6lBb3v5QjxuVihnKp7HR0ycrFfTtkc0DXmJk7T/IdhtK7kNJEcSkGW&#10;47RvUy2pT2kryipPYFHRBtIcitRz+h+UGYoZMgzemBKLioyI6bylEMJFFLGbQ84388BL6CA8SV6H&#10;grqRUurPAK+gtOpMCMFDc0omUCbnB8CvgB+h6e+TlAe/FP0eCiF0+pzpSQ5QMyhbcZA8JtGnKPsK&#10;el6XM7yH0hTiJCq65Hl4z+1zM3kwjPrfdmFB0RbW0ByKD5HD0/9CguJaURQuj8wci4oMKYpiJYRw&#10;DaX6VihLP15Bte3T5BOprSPDqF/lBeBJ1Cjf+gFcsVRmCgmIn6MMxSuoTKz3ED8Uf55KVtLE275d&#10;Gvp+LqJM3pPcLHCqIqAs1zl0bbllKZIZRJfSTjY3UdGJr2Va/vyZ75CyynuQqKg6iGD6TxdVaLwH&#10;/DsSFUdRU7bXhxpgUZEp0XJ2ltiUhB6088APge+hhTZ5z5v7YydlSc9VWnyoCSGMoHKn76H+id8B&#10;z6Gyg/X9PKnfYuQWv9cv5oDTwHEkBtN1VUlA1/U1Eqa5Rtt7xU5uB7M0g8AliGY9Q6ifcCf59VGZ&#10;zSX1zs0A76KSpz8hl6d59zzWB4uKjFnnDJWExTngAuWgPLtD3T/TSFQ8ApwNIbRyKmcIYRSV1v0S&#10;+C26tw6hWubbHdyH0b3X9zUkPgcLaHLqEZQV2I9KsKpkFYnR0+jZzHXjW6WcRp6jqBihbMw0JlGg&#10;NXoc19E3nQVkVPMu8HvKDIUFRc2wqMiceMhdjH0Wi0jJX0IHqx8id6iduBzqftiKPr9X0AC3ecrD&#10;V+OJ07G3IWH1IyQovo8Ext3sYocY7ADBZC14HH1Xz6DvryoCEvofoixFzvfNKsqo5HgwS/MHOuQn&#10;eEy1DKPs6U48s6nJLKPA6RvA/wT+hgSGB9vVEIuKmlAUxWrss0jTty/F14+B59HcBbtjbIwJ4HHg&#10;p+hgeL4t2YoQwhCKAr6AxMQvuLms7m6kBsqBDBCM2Yo5JChOUbpAVXW/L6KN8C107+Tc6B9QI3lq&#10;2s6J5HTXwXMqTCQGPCaRqNiN1hmLzmbRpexJexsJijdR5rcV+3ATsaioET22s2dQlPQKKoW6yM0l&#10;K7aevTeG0Yb1MvApSrfOhBCWmhohWTd/4mXg7+LrGTa2cafJ5IOcSr4MXEYlUCfRv2GKanor5pCR&#10;wifAhVybCKN47FC6VOV2Xydnr2R9awxobZ6mnE/hs0qzWENnmG+A95GgeAudbSwoaowf1JoRH7bk&#10;DvU5Koe6gJq4f4QGlaVos4XF3UmH45+hmv2rwMkoLJq4sI2g8qa/A/4PdM9MsfH7JUUSB1aWEJ2M&#10;ZoCPKAX0s1Rjs3wdlWKdRZH2nEk9FfOUg+ZyWRuSLW9u8zNMtQyh5/sAEhWmOayg4b5H0UC7P6M1&#10;3YKiAVhU1JToDpWauOdR1uI8EhiHUfOxHTPuToH6Al5EadgTaMG7TMNKMUIIUygj8VvgH9Bgu133&#10;+b9bRgJsicEeBueRI9pfUWnELiRsBllzfRX4GKXqz6JNMle66Prm0X29RJ6lJKvk2fNhqiGty48g&#10;pzfTDNbQmeVDtIb/FWV8L5PnnB+zQSwqakzPsLzzlMPBLiLFfxh4GqWPB1miUkdSf8HL6JC4CLwX&#10;QriUa1nLRoh2sVvR/fAr4J+RiHqQCOAaitavMcDyo9hbdBmVqz2EmsULJDD6PRBvBQn3j5Dl4fvA&#10;TM4bYSyZXKXsxVoir76KIWQKsEje4swMlmGUgZzEJiRNIGUnzgHvoAnZf0PlTzNFUTig0BAsKhpA&#10;zFrMoI35KooEHEGlLa+haE/qtTC35wAa+JYmJH8QQrha1/6K2D8xiqL5z6OG9F+i5uxtPFgWK/23&#10;I/HvGKT70RoSzm9RlvK8TOla1Q9WkOD8M5rw+hfgbFEUdTgIp/Knmfie0wC81EuxXNfnzPSFCRQo&#10;KND9a/en+tJFlRSfIiHxFspUnAQWcg7KmI3jQ2ZDiPXmyUlhFkVUTyInhR+hQWZ7KSfYuizqu0yg&#10;2R8/RunYeeDzEMJs3Q480T1lDPXXHAb+HngdTaPexoMfKpNbzygwFEIoBrU5xOj7dTR0Ll3LCMpU&#10;pHt8s0hTqC8hIfH/oE3xNPUZ1rZC2Xs1iw5pOUR/06T0KzSs1DARhT0+OG2YSWR1vZX8SvXMvZHW&#10;zmvAZ2hC9p/Ruu1yp4ZiUdEgepq4r6MDT0o3fo2Excsoar2PcrH2gn0zk6jvYI3SO/+TEMKNugiL&#10;eJCZRBmql1H25SeouXmSzYlSF2j9qGS+QBQWN1CzX5rK3EH/3t2Uk74f5NoCpUD/APhvSFCcol4R&#10;thVUFnkEZVsepf+lYvfCGvos/4RET6OIwn4Yie7GOsptNj1214+ifoocBLDZGMnG+gpyyPt9fH2F&#10;RMZqjdZPswEsKhpIT9ZiGdVRX0RZi89R9OcwOjjvxi5R6xlCG9mL6LOZQpOjPwkhXM653CWKiQl0&#10;/U+h4Yg/BV5CE6i3bOJfl/6uISqq0Y9lf7NINHdQac9FdG+noVmT3N86l6xOP0YZirdQLfA56udQ&#10;soo28mMow3KDPJpf00TyE+Q9PPB+GUU9P1vQ575U7eXUhgm0N+1kcMM1zebSO8fnDbR2HgFmm9Cn&#10;aG6PRUVDiYee5BB1Cm3ex1G0+iQ6HD1PadnnaFBJmhb9MioVmkYZnvdCCGeA+ZwOlTGyN4Ku9SAq&#10;dfsBZdnbNJtf7pYsH/vVw3BP9PQTfYqyCidQQ/ozyG42RTuTQ1TvmreGxMMaOuAuxddc/H9dQP0T&#10;f0COUzN13BBjkGEB9aGcRGvBfqpf/y+gTNMszXR+GkV9PgeBxRDCeTek3hNT6HPbh3sp6kbqnziK&#10;hMS/of6Jc9Qru2vuk6o3FdNnesTFdRQNvExZEvUK6h9Ik5THcb9FokAH5qdR1Gw/2uTeAI7Ez3Ot&#10;ykUylleMosP9HtQv8QrqnUg9NOk73Ww6SKxsoSw1qipj0Y2lUMfQQfUTJCZeRE3ph5Ao3Is+qw7K&#10;4i0jEbGAMnoXUHnQ6fj6Bm2OF9CGWOfylVW02X+DyrmepvzeBk0gOqwh0dZE++aUyTuExP0q8G4I&#10;4WLN76NBMImE2NNsbnbV9IcUmEm9W+8gd7y3cblT67CoaAk9/RbJq/4yiup+iQ6hL6Ma/O0o4u17&#10;o+wb2AX8AlmYPoamf74PXAohDNy1Jh5Yhigjes+hw/P3UNnTIfQ99rO0bQjdJ8nysdISuthjsYQ2&#10;tuvo/j6OfND3IZFxAImKdKhNsxGW0GZ4Lr4uoej5DeLAuAZsiMkC+Gx83eC7mZtBXstR5ATzBZll&#10;/h6U+HwOo2fzNTRoch+6394IITTq37tZxM9tBGUWd+PseR1IZgvnUMDiM1Qu+i7KjM7VMbtr7h8f&#10;HFtGPAAvhBBSv8Ul9PB/joTFYVQ2sh1nLBId9Hmk2Q4PA4+jQ9GxWH6zNIjShhDCMNp0U5nTD5Gg&#10;OBB/fStlr0M/SZHYKfJo+v02K4cycyvo4Jyyctvja4JyivNq/PEaEhk3UOnTUvy9blMOfz2i6xjl&#10;4MBX0eFt0PtAoBR9Z2hYliIyjtaJQ0jQ7kX/3m/QmtEEobrZpOzwM+jedOlTvqR1cw71SryDzCw+&#10;RwGD82hPdFauZVhUtJRYi76AopYpSjuD0s0HefA5Bk1kCyox2omyOk+gEo4jwIUQwiXg+mZFZnoy&#10;EqPo+9iJypweQ1mJw0gI7mPwcwcCKhtK0cXKRUUv8cC2Gl8LIYSraL1La17aFNPBrguEJh/04jN/&#10;GpUl7ESCYiq+BkVA5RAfo9KIGw38zDvoOX0aPatT8ec/Qz0ty/G9iWLqQUiudYfQOutMRb6kQaBf&#10;oWbsvyBBcQEJDZc7tRSLihazrpn7CmrgXMF9FXcilUOloYKHKaMznwAn4gF2AR0eVtGBFdZFvpOH&#10;PeWBPFmgJivKCZR52IuyRy+iQ8ohJCR2IKFTxXe1ipyWkrDImp7yv9Wen7eOoijmQggnUFTxCZS9&#10;Ochgsk0rKEPxAZr38Rnls9EkxtAz+gJ6XifQM/oKKqubQett42x0H5AOKjF9CH2G2a8rLWMN7WnJ&#10;Ze89ZAf9N7T/XaMZpaLmAbCoaDk90fBdKDr0JIqseUG/PR3K8oadyLL1CjoknUZuW5+g0o6rlPX7&#10;S7HsbA19vkPxlQbJjaDNdAId9h5GpRNPoYzEs/H3Ui18leIvRfqJ11CLjcQbHqDD7IfoPlpDtsOP&#10;o3urX8/9Kurh+iMagvU+DcxS9JQnPooOx0lQFCgz9BLKbH4eQlgsisI2s3y7D42hz+cFlJn1HpQP&#10;yejhKOqD+gLtcZ+jaod5GlQuau4fiwrTQQv4s6g+/ykU/faCfmdS2c8IEmGpuXCR0mXnBuVU82Sb&#10;OULZTLyEDuMddPjYicTdLlQusTe+p1/fOpB/2b2RhNUUOgx4M6kPK6hM4V1KB6gRlLnox57QRUL7&#10;fwH/N4psXmioveoYEhPPoCzFtp7f66AM4+vo85hHfRam3IcOonXP85PyIM3ruYBKFv+KMo3HUbYi&#10;9RN6/TeARUWr6YkOPQp8H6Xn7bpxf6SyKNDB4fH440to8Z2PPx9DYuIa5TCs3lKn6fi+FYm7UQbf&#10;L3EvdFD98zS6dlMTepq2z6ED/hjlkMd9bN6+0EXlEleBPwP/HdVfN1JQRIvnaRSgOYzME9bPcZlE&#10;/VC/A46HEK6gPqy2H8om0D70BAqiuPy2elZR8Os4yiy+Gd+PIzGx4kZssx6LinaThry9hETFIey4&#10;sRkku1fQQe0Rbt4k03yEZD05wneFQ+5RuuT+NI0OSkM0c4BZI+kRFqeRc0sqhXoFRYq3UM44ud97&#10;8Toq9fkI+G+ombORgiIygoIyL6Im7e23+DPD6ND8CmrcPg98EkJo7WCwHjH2PSTIdt35vzB9ZhmJ&#10;hovcLCg+Q/frfIOfYfOAWFS0m3F04H0Npetdx7r53CriNkqZDUrN2XWjQPdPyqyM2CazXkRhsYwO&#10;/jOoVO8DZFX8DGUj9wiluOi9V0PPe7fnlSaUfwz8X8D/QP0UTT+MjCFR8ShaS2+XYRxGZY2/QY3r&#10;14CTIYS2lpGMUbplHcCZzypIz+4y6pH4G3KJ+wTZUF9EQQKv8eaOWFS0lNhQuBMt5IewoBgkdRUS&#10;vRRIHE2he2cc1d56w6kRPa5YF9Ah4jgqUXoa+AEyCHiIsiQv1bp3UW9Gmkh+DdVdJ6OC00isfAac&#10;bElD8k4kxh5H2bvbPeOp7PQ5JCwuAX9Axg6L/b/MfIhZiikU3HqWm3tQzGBYRc/vadSI/Snqt0qZ&#10;iTlc6mTuEYuKFhJ7KbahSORryPEppyZgkz+pdGsrimZPomi3N54aEjMI10IIN9Dh9hjyoH8TNc8e&#10;iu8j6OCbLJPTAKyLSEx8gyKdqYlzseHZCQBCCCPo8zmMMhV3i7anxuQXkaiYB/4aQjhbFMVyP681&#10;M4aQaH0JCbJBzkxpOykwcBG5wb2LsotHkMC4Aiw7M2E2gkVFy4iCYgTVTb+EopH3sgkas55UC70X&#10;uV+dq/ZyzIMSBzeuxsGY5ykH5R0C9sc/NodERbJGXkbuZlfi+xKw1pbDSIy270KZh5fij+/V7GIP&#10;ct2bi6/FEMKFNnx2cS+aRp/bjyjnpZj+E5A74SkkKP6ASp5OoGd4GVvEmvvAoqJ9FCga9ARqFnwK&#10;RZmN2SjpULAf1ZLn6FJl7oOiKLqx3wKUeThP+f32Nm+neSVrtGAq+W0YRevoa2hdXe/4dCeS69FP&#10;UWZnFk2Ab7QjVBQU4+hz+wnaizwfaXAElKF4G/U8vY2yE4u08xk2m4RFRYvoyVLsB15Fm+AOfBg0&#10;988YKuOYxjaQjaLnYJFEw0r6vZ5p8K0eKBhCGEIlgM+gnoA9bHw9TcLil6i+vYscuZrchzKMmrJ/&#10;BfyczbUyNncnufeNUmYal9w3YR4UP8TtooNKGV5F6WZbyJoHZQgJizEsKlpDm4XEOlIp6TPokHw/&#10;M346SJh8L/54DZgJIRwFGmc1G4XYLjQE8NdIjG0ku2MenAIFFJ9E2bW/Ud57xtw3FhXtYgxlKV6m&#10;dNrwQdA8CMOojGECryemRcRsTRIDL6Asxf0yjA55L1BOMf5/gSMhhLmGRZC3As+jAYCHseNTVUyg&#10;PpZnUbP8BXqykcbcDz4EtITYTLgV2R0+jjIWLnsyD0Iqp0uTwMdDCJ2GHYCMuR2jwMOoJ+B5bj3s&#10;biOkfrfDKACU3LWOhhDm656xiCJsAkXGf40+t71VXpNhEtn5PoLsZOeqvRxTdxylbg/jqG71WVS/&#10;a+s+sxkMo+nLvZO1jWkDW9E8j9dRpHczmowL9Bw9Bfxd/H8fRIK9tk3M8drHgMdQH8XvkCBzYLNa&#10;xtH38DSwJ4QwWuf7zFSPH+gWELMUaS7Fy6j21zWsZjPooOhjmqw9jFPopuHEg9dOVJO+m83tTUsZ&#10;i9dQpmIM+CNlKVStMhZx/5lA+84vkFh6FgUjTLWMoLK9V9D07AvIGtq9Fea+sKhoB8OoMe4ZlKbf&#10;ibNUZvMYQ6Uf2/CaYtrBJIq6v0B/nM/SULjX0eF7G/B74MsQwtW6DBSM4msSBbR+hDIUr2K3uFzo&#10;oLX7ZeB94GtgNoTgGRXmvvABoOH01LHuR6JiH85SmM1lDB0SdmA3MdNgemy5DwHfR4exftlyd1DP&#10;wY/i37ETCYsPQwjnyXzaccxQTKLSmt+isqdX0L/JgiIfxlBJ9PeQC9QplG3O9t4y+WJR0WDiBjiM&#10;0vNPosjaCFosXDdpNotRdOjZgwWraTZD6HD/QzRf4Qnuz0Z2IyS3pN3o8PfvwJ+BE3HyeVZR5bjv&#10;pPkdz6Jyp39FQa2tuO8qR4ZQedqjwGeoYduGG2bDWFQ0m5SleAx4CW2A41hQmM1lFJXXPYzuN2Oa&#10;yhgK0PwUZSkm6X/UPa3jB5Br0j7k1vMfwOfA1RDCag4lUT2Z8QPoM/ot8AN0WJ3Ee0+udCgbtj8B&#10;LocQ1nISq6YeWFQ0mw6KID8PvIjSzv2Oqpn2kRygtuFMhWkoIYQ06O511EQ96DKeYZQN3ILq4B8C&#10;3gA+Bs6EEGbRsLyBi4s40G4cBReeRkLiF6jc6WFc7pQ7Bbq3XkR9FceBRdywbTaIRUWzScPunkGR&#10;rQkcKTKbTwdlK8aQ9eVwURSrFV+TMZtGjMBPo0PXr9Gsn6oCNJPAc+gQ+BjwNhIWJ5G4uILKV1b7&#10;HWmOfRPjKHh1AAWwXgd+jPYcD1itB8kh8lnUSP82cBUNYTTmnrGoaCghhGG0CT6GhMVkpRdkmkwH&#10;CYrJ+BoFLCpMkxhGJTy/QWVPDzro7kFJWYufoqbx14AjwBeoJOob4FoIYQ49i2uoRn7D/Rfr5hYU&#10;6HkfQs/5NJqjkZwFDyPBkwxBHMSqD6OUs6weAb4JISy6BMpsBIuK5pKG2ryABintwAu86R8jKNI1&#10;jQ4T89VejjGbQzxUb0XuOD9DDdM5RN+HUCnUIVQKdRg4g0pXTsXXJ8A5YAa4ASyEEHrLWtKecLuD&#10;Y4dSRAyjfWUalX49hqLaP4h//zSar7EFl9nWlXEkVh8HPgVm8dwhswEsKhpIz7C7p1AE6yBe5E1/&#10;GaYUFeMVX4sxm0kHRW5fRFnfnNbSNKk62TrvRIf962iI2QlUxnIm/vgKsIzKo67G13L8/6x3ZQrx&#10;1yaQqJpGB86D6PN4HPVPPI57qZpCh7LCYS9wHosKswEsKprJMNpcnkZOJduqvRzTApKF5A7sAGUa&#10;QgzQbEeC4jC6x3PIUtyOccqG6YMou7IKXEKZi1lUBnUNCY3L8fdH46/fiO/JjjyJle3o2d6FDpu7&#10;4q+lckfTHCZRqd9+4Fic4m57WXNPWFQ0jJiqH0ML/+NIUNgX3PSbIVT6sB2LCtMcJlCPwOuoZ2CK&#10;vEVFIg3pG0YZhylUJpXKnG6gbEZAImQs/toZlLkYQWVMU/E1EX8tlUMVPS/TLMaQIH0S+BIJz+VK&#10;r8jUBouKZrIVpaefxN7gZjCk6bk7gC0hhI6jW6bOhBBGkaPRr1BD9EHqF6C53cF/Gu0TUIqErSgD&#10;kYaj9vZT1EFImc1hHJU/vYaa/o+HEFbcsG3uBYuK5jFMae+3G6emzWDooIjmw8TJ2nYOMTVnEvWl&#10;/TS+59RL8aAkwdBL6p8w7WYIVTg8iYT0JMpieS03d8XRhwYR63+3IEFxCJWi1C2yZupJuvceQfef&#10;M2SmtkRL7v3AL5GDnvvSTJsYQcGhfXjWiNkAvlGaRYEWgEeRqHA/hRkUBcqK7UX2khYVps7sBL4P&#10;/A7d08a0iXSW2IfExei6eSXG3BKLimaR0pYPoYVgHB/szODooPK7SVQK5XvP1I4QwhByevonbJdq&#10;2kmBSuH2o+xzXQwKTMX4JmkIPaVP+1GmYhf+fs1gSdmKZEHpLJmpFTEauw3ZcT+HBLLXHGDV2QAA&#10;IABJREFUUdM2knvYXnSe2EmzeopMn/Bi2RxSluIZtCHuxN+vGTyjSFB4EzJ1ZARlJ55FByqbmZi2&#10;0kHr+CFkwDHuEihzN3zobA6jqOTpJeTa4PITM2hSpmI7ypSNxwyaMdkTy562A6+i5uxpvIaadpOG&#10;6D6NzxTmHvCG3xzGUOnTk+hA54ffVMEIypjtxkYBpl5MognUr6NMxQReR027mUbVD4eRVb3Xc3NH&#10;LCoaQIwGp4F30/jBN9UxTDlEaweek2JqQAhhhNJC9nVU7mFBYdpOr6PkTryem7tgUdEMRlBk+Nn4&#10;7lp2UwUFEhUTSNxOoyF4XmdM7mxF6+ffo54K91IYI8ZQafUu5ChpzG3xZt8MJpCN7CPYdcdUS3Ih&#10;24XuySl8P5qMiVmKp5CgeAmVQRljxAil+cYWB4nMnfDNUXN6Sp8ewlkKUz0Fuh8PAo+hEihHfU2W&#10;xPVzH/Ab4F/w5Gxj1jOEgkO7kOB2kMjcFouKGhPt3YZRBOEJ1KS9pdKLMkYbzwFkRbgdC12TL6Mo&#10;Q/FfUB+FD0zG3EyBzhUPoyCR13NzWywq6k8HRRF2ogff36mpmiHK7Nk2nKkwGRJCGEV9FD9AAZkR&#10;3JxtzK0YoayG8LwKc1t8AK0/I0hUbAVCfBlTNaNI6E5jxxCTGbHsaTfwYzyTwpi7MYSelx2ocduY&#10;W2JRUW8K1KS9Bz3wTt2bXBhGpU/bcWTLZES8F6fRTIqf4LJRY+5GBz0z27EDlLkDFhX1pkAP+aPI&#10;+WkcR9tMHgxTDsGzA5TJiTHU7/Nz4PvoHvW6acztSdPm03rus6O5Jb4xakqMto0Ce5F7yVZ8cDP5&#10;kBxD9hCbtZ2tMFUTQhhGteGvoUF3j+JyDmPuRspUPIzOHJ4/ZG6Jb4r6kkqf9iGnnW042mbyoUCZ&#10;s714aJLJgBDCEDoYvQD8NL5P4XXTmLuR5g/tR8JiEp8fzS3wTVFfOuigtgtFg7fi79PkxTC6P3ej&#10;DcmHN1MJMUs2gaZl/wT4Ibo3nd015u4UKKO3m7IywucN8x18U9SXNKNiDJVBeXM0udFBpU97UYTY&#10;642pilFU9vRj1EvxJLY6NmYjJKvwnbjc2twGb/L1JUUOtiBbWVvJmtwYQlm0Z9B07TH3VZhBE++5&#10;7cDLwG+B53DmzJiNUqCyp91IWFhUmO9gUVFfRtBGuT++G5MbHbT5PIfsO+0aYqog9VH8DngVlT1Z&#10;UBizcbYgUbEXWYV7PTc34RuihsTI2xhK5z+CRIU3SZMbqY79UeBpNDjJJSdmYIQQxoAnkNPTL1AQ&#10;xvegMffHGBLlD+MSKHMLLCrqyzhyfdqDDm4WFSZH0qCx/ZRWhL5XTd8JIYygNfJ1JCoO4enuxtwv&#10;BaqQ2ElZITFS6RWZ7LCoqCdpBsABFP31g21yZgzdp7vjjy0qTF+J9rE7gZ8Bf4/Knxx8MebBGKac&#10;V7ETi3SzDouKepLsZLehximn803OpPkAaZ6KU+amb8RMWBIU/4gG3bmx1JgHZwidOfagMqgJ91WY&#10;Xnwz1JNhFHUbx1kKUw+m0YyAPXiCsekvU8CLwP+GhMXDOPBizGaQ+uR2o57OrfjZMj1YVNSTEbRx&#10;TuEH2tSDKdSs/STKVhizqYQQithH8SQqefo7ZBLgEg1jNo/UrH0Ql1+bdVhU1Iwe56edqJxkotor&#10;MuaeSKLi+8DBePgzZjMZQmYAvwT+Kzr4eI8zZnPpoMDQQfSMWbSbb/GCWz+SA8MU0Su62ssx5p4Y&#10;BfYBryCLzym7QJnNIjZm70CzKP4Z3WMWrsb0h1F0/tiFzyCmB4uKepJExQh2MzH1IE1jPYQyFg/h&#10;xlmzeUygidn/hLJhdnoypn8Mo2qJPThAZHqwqKgfBRIUO+LPQ4XXYsxGSBO2XwSeBbbYOcQ8KCGE&#10;cWQC8I/AD9A9ZozpHykzuD++D1tYGLCoqCPDaOjMfmAL/g5NfeigbMUrwA9RlMvZCnPfxN6cQ6js&#10;6R9Qn5kxpr8Mob6KA6is1ZlBA/hAWitiJGCYMlMxhQ9lpl4MA48Bh1F02fW45r6IWa49wI+B/4zu&#10;K9sVG9N/krXsw0hYTOOziMGioo50KCMCfohN3ShQk98B1FsxHUKwLbLZEFFQbEVZr98ikTpV6UUZ&#10;0x7SOv4oZY/ciEugjEVF/ShQH8Uq0K34Woy5X3agvor9OLpsNkA8uIwjh6dfAa+jPgrvZ8YMjiFU&#10;+vQUspf1Om68CNeQDiohsV2iqTPTwDPx5WF4ZiMMIzvLn8fXY3gvM2bQ9JZAPRJ/7Oew5fgGqBep&#10;p2IbivQ61WjqyiSafPxjNAzPA5TMXYllTzuBV1Fj9rMoQuq10JhqmEblrFtxSXbrsaioF2nw3Q6U&#10;dvRBzNSVURTh+jHwPdRb4fXI3I1pNI/iv6B+ih13/uPGmD4zRZxXgUVF6/EmXi8CylSMx5e/P1NX&#10;0jC8x1EJ1EPo3jbmlsR5FE8Dv0G9FA/hNdCYqtmCRMU0LstuPV6Q60VA31kXWMCD70y96aA63KfQ&#10;YXG7naDMekIIRRQUj6Eeil8h1xkfYIypnnFgFxIW43aAajcWFfUilT91kPuTRYWpO2PA94FfA8+h&#10;KdvelEwvo6gR9O+BfwJewILCmFwYQQN59+ESqNZjUVEvOigqsAVttD58mbozhA6MrwIvos3Jm5IB&#10;IDbwPwn8n8B/BV5CDaFe+4zJgw4qZd2FRUXrsaioCTF6O4QExU70APvhNXUnzRx4BImKJ4CtzlaY&#10;EMIQ8r//FfCvKKO1CwsKY3JjDJ1LpnAWsdW4frleDKEHdiuK6FoUmqawA4mK48BlYBH1DZkWEgXF&#10;LuCHwH8CnkfrnjEmP0bQ87odu1K2GouKepF6KLrAWpUXYswmM4VmDiwAJ4FLIYTFoijcN9QyoqCY&#10;Rpaxv0MTs7dUelHGmDsxitzYHkLP6pVqL8dUhSPd9SJQljwt40Zt0xyGUJTrKdSIewA7ibSVLege&#10;+FdU+rQTl3oakzMjwG60bu+0i1978RdfPzpITFhUmKYxjGwJfw1cAmaBEyGEZWcs2kEIYQJZx/4G&#10;+BmyjvU+ZUzeDKPs4h5gGzASQljzut0+vFjXi4JSVNhS1jSNNBDvVeAicBqYBy4gEW0aSpymPokE&#10;xS+A36KmfddnG5M/aebQNGWz9hI+o7QOlz/Vi+QANYy+O5eGmKaR3KCeRYfLl1A63Y4iDaWnh+IF&#10;4B+AfwYO48ZsY+pCWre3oed2DJ9PWokzFfUiWcruiC+LQtNECuBxNOwMlKX4IIRwpSiKbnWXZTab&#10;mKHYipy//gWVvh3CsyiMqRNpMO8UEhbj+PltJRYV9aGIry7lA+yH1jSVMTS74kfISWQJ+Bi4VuVF&#10;mc0jNnNOA08jl6ffoqnqY7gx25i6kQIE0yj46fNJC7GoqBeB0r+/izde02zGUF39b+LPh0IIHwDX&#10;i6KwpXKNiSVPe5CIeB0JiqexdawxdSX1xG2P766kaCEWFfUhoNkUS0hYLKPvz9EA01SGkZ3oK2iT&#10;GkX3/2chhOsuhaonsT9mO/Aa8HcoG/UC7qEwps4UKCgwTRQVIYTCDlDtwqKiXgT04CZL2QksKkyz&#10;Sa5A30MZugvAHHA8hDBnYVEvoqDYhYTiv6Cyp4fwWmZM3ekVFVvx+bKV+EuvFwEdsrp4ToVpDx20&#10;WT2DDqErqPTvWAjhhkuh6kEIIZWz/QMqaXsF2I/6w4wx9SY5QCVRMYLWbq/PLcKiol54ToVpMztQ&#10;ycwQchh5A5VCXS6KYrXSKzO3JTZk70XZpp8iUXEYHT6MMc2gQH1wW+NrHLiORUWrsKioF2lOxSh2&#10;fzLtI03c/gmwO/54K/B+COEisOT63XwIISSXul1ITPwT+u72I+tJY0xzSM/7JKWtrJu1W4ZFRb0o&#10;kKBItm12fzJtI/VYvIA2rj0og/E2cDKEMO8+i+qJ8yfGUYbiZeA/A79CNsFD+LBhTBMZRqWqKVPh&#10;wGfLsKioH8sonejvzrSVDkqzP4o2sH3xx38APgkhXLWwqI6YoRhH/ROvAz9HmYpHcP+EMU2mQGeT&#10;ERz0bCU+mNaLgBxw5pGw8ENr2swo8DAqpdkRf/x74O0QwtmiKJYqvLZW0jMh+zE0Hfuf0fyJfVhQ&#10;GNMGRlDQx+fLFuIvvV50kaC4jvz63Vdh2k6ByqBeQqVQD6Ma/j+GEL4B5u0O1X/iMLtRJO4OAz9A&#10;ouKnuAzCmDaRmrXdU9FCLCpqQlEUIYSwRikqFlDph7MVxkhgH0AC4+H4/ifgSAjhKrDiJu7Np6cZ&#10;ezv63J8D/nfgVTR/woLCmHaxBQUXpvD5pHVYVNSLNJ9iDriBHlw/tMaIDhITLyIjg2eA/0BN3KdD&#10;CIsWFptDFBNDKCq5B4mIH6HP/pX4a6NYUBjTNtKsiil8xmwd/sLrRUC9FAtIVPiAZMzNFGgzexoJ&#10;jIdQPf+fga9CCDNu4t4UxlCZ2UHgedSM/UOUrZjG/RPGtJVRlK0YA0ZCCIWDOe3BoqJepKF3c6gE&#10;yocjY27NMCqHmgJ2InHxB+CjEML5oiiWq7y4uhJCSAeGQyg78RoScM+gz3usuqszxmTACDBB2azt&#10;qdotwqKifiwDs8A1/KAaczem0cH3IWRp+u/AmyGEM0icrzmKdntimVMHHRS2okzEAdSI/Vvg++gA&#10;0cFNmcYYZYvHUUDHJZAtw6KifvSKCmcqjLk7Y0hQbEPzLJ5F1rMfApdDCCtAsLgouYWYOIhcnV5H&#10;8ycOosnYk1hMGGNuZhyVR9qooWVYVNSPVeQANR9/bIy5M2kS/W4UVd+D5ij8G/AecAa4EUJYsP3s&#10;t4ygLM/DqLTpMCp3egkZRIzFP2NBYYxZzxhaJ2x73zIsKupHF4mJlfgK+KE15l5ITdxPoT6LXcCT&#10;wGdIWHwTQrgA3GibuIiZiWGUediNPpcXUe/EY/F9P/rcLCSMMXdiFNlM27ChZVhU1I9AKSyW449t&#10;K2vMvTOEDs4/R4flY8ApVA71IXA8hHANuaytNFVgrCtxmkIZnEeRm9NP4msnijp2cPDCGHPvpCZt&#10;0yIsKmpEzwC8RWQpO4+atS0qjNkYBTosP4b6AxZRec/7wAfAV8A3wPkQwhzKCnaRqK9l/0UUEaB/&#10;+xASE1uAvajE6VXUb5J6Jnbh8gVjzMZYQ+eTKyj4aWHRIiwq6kcXHYBm0IO7ilKNxpiNkaZBJwvE&#10;cZTBeAk4h4TF+8Bx4CpwGT17SyGEJUqjhGxFRhQSKSMxjNaKSdS0nuZMpJ6Jl+KvTcY/5/3BGHM/&#10;pOCnzWRahjeN+tFFZRkzaFaFm7WN2RwmUN/AXjR74TngZSQwziCRMQ9ciD+fQZvnfAhhEehWOf9i&#10;XSYiiYhhygm3+5Ed7D70b9yLGrH3x/fdOOtpjHkwhtD6Y0HRQiwq6kcAlpCl7CwWFcZsJkPxNYas&#10;VB9DvUuXUMZiCTgNHEXZixXUj3EBWI0ZjHkUpVuMfz6l/1eA5d6sxt2mzYYQUi9DEgzpz6bG6uF4&#10;vamkaTS+xpFImkQNkwdR4/XTSEDsiP++LbjEyRizuaTghNeVlmFRUU+WkKCYQQcVY8zmk8TAOIrm&#10;P4QO9bMoS5g2zE+RyAjxv7mIMhnnkfCYQOL/EnAphJCyGQHoxjkZ3fjfJlGzvmyp99eGkRCYRKIg&#10;TbEeR0Jhuue1DQmI/cjRaVf88+n/Dd74jTGbS1rLvLa0DIuKerKCDjWzSGAYY/pHygKk6NsuFP1P&#10;G+ZONFk6/fwqejZX0Oa6BQmML5GwmEDP7TmU9ZhFa/EOytKkFdTDkX5vPP5/kkjYRdlIPYcCDEPI&#10;xWkaiYtJyl6R9LLFozGm3wyjtWgYC4tWYVFRM6ID1Apq0p6lbIayw4Ixg6HDzc9bygokdqHMRMoq&#10;DKGD/7PoeR1FouJqfK1ROjFtQ5vxMhIKi/H3xijLmrb0vIr4Zxbi3z0S/9wIZXmUN3VjzCAZRWui&#10;gxgtw6KinqSp2jPosGIHKGPyIR3me5mMr8RafMGtexrS7ydhcidhMI4yJ8YYUzUBrV1dShtu0xIs&#10;KupJspWdRRmLZSwqjKkTveVU9/P7xhiTI2lORbLgtqhoES6ZqSddVD4xg4RFZTaWxhhjjDGRgtL6&#10;PuDyy1ZhUVFfVigdoBYrvhZjjDHGmGSD7dKnFmJRUUOir/0ySi9eRH0VxhhjjDFVktzy1s/WMS3A&#10;oqK+pIFcp1G2wg+uMcYYY6pmvUOeaQn+0uvLGhITZ4ErlE4yxhhjjDFVUSBXu4ADnq3CoqK+BNQI&#10;lQZtLeOH1xhjjDHVsYrcn5IzZbfayzGDxKKiphRF0eXmZu3ktGCMMcYYUwVpOO9ldC6xqGgRFhX1&#10;ZhW4jsqfbmBRYYwxxphqSZb3SzEAalqCRUW96aIH9zwSFn54jTHGGFMVw8j9aQkghOA5FS3CoqLe&#10;BJShuICcoCwqjDHGGFMVI5RN2uDhd63CoqLeBBQNuIxFhTHGGGOqZRHNzlrB7k+tw6Ki/qwgB6hL&#10;qMfCGGOMMaYKbqDzyDywEof1mpZgUVFj4sO6ivopzmIHKGOMMcZUxwLq9UzZCtMiLCrqzxpwDTiD&#10;eisWsbAwxhhjzOApUCn2Ei7Jbh0WFfUnoIjACeB94CIugzLGGGPM4BmNr7WqL8QMHouKmhNLoJaA&#10;c8DH8X250osyxhhjTNsIKDuxRtmobVqERUUzWEUlUCeQqFio9nKMMcYY0zK6lAN5l7CoaB0WFQ0g&#10;ZivmgFPxdb3aKzLGGGNMC7kMnERnEpdAtQyLiuawhBq1TyDnBWOMMcaYQdFBVrIzqAzbmYqWYVHR&#10;HNbQg3wKlUIZY4wxxgyKVRTgXMA9Fa3EoqI5dNGDfBZlLBawnZsxxhhj+k8XlTz1zqiwqGgZFhXN&#10;Yhk1an+NahqdfjTGGGNMvwlIUJxHjdrLnqbdPiwqGkJ8eFeAS8AnwKfADZytMMYYY0x/SXay11EJ&#10;tpu0W4hFRbPoIiFxAjiKH2xjjDHG9J8kKhawrX1rsahoEDFbsYyman+Dshaerm2MMcaYfpLMYmZQ&#10;s7ZpIRYVDaMoijXgKhIVp1FJlDHGGGNMP0jl1ydRT4X7OVuKRUUzmQPOAMeBxWovxRhjjDENJqCz&#10;xllUIWFR0VIsKprJMrKV/RwJi/lKr8YYY4wxTWUZZShOoonadn5qKRYVzWQNWbp9DryPHnY3bBtj&#10;jDFms1lFZ44LwCwuu24tFhUNJEYI5lFfxfuUMyuMMcYYYzaTgITFHLDoLEV7sahoKEVRrKCG7a+Q&#10;qJjDNY7GGGOM2VwCqoZYxo6TrcaiotksIQeok2hmhTHGGGPMZpEG3p0m9lNUezmmSiwqmk3qrfgC&#10;ZSxm8IRtY4wxxmwOAQ3dPU7p/GRaikVFs0kRhM+Bd1GPhRuojDHGGLMZLKPgZaqI8BmjxVhUNJjY&#10;LLWExMTHwBEcRTDGGGPM5jCDshRngAXcu9lqLCqaT6B86I/iYXjGGGOM2RzmUdnTFez81HosKhpO&#10;T7biPBIVV7E7gzHGGGMenDWUoZjDZ4vWY1HRDtZQreNR4EsUUfAwPGOMMcbcL6uob/MKatb2uaLl&#10;DFd9Aab/FEURQghzwDHgT8AuYALYWumFGWOMMaaOJNenkyhgOYtFRetxpqIlFEWxDJwD3gE+Q5kL&#10;1z4aY4wxZqOkKdoXgVPAkvspjEVFu5hHEYVPkVPDChYWxhhjjNkYq8AF4ER8dz+FsahoGWkY3qfI&#10;YvY8HoZnjDHGmI3RRXb1x5HDpEufjEVFywjIpeFL4C1UBuW5FcYYY4y5VwKqdDgFnEbnCgcojUVF&#10;m4j1jisoQ/ERylbM4xIoY4wxxtwbK6hH8xvUU+HgpAEsKlpHFBYLKMLwCXKEmsPCwhhjjDF3JiCn&#10;p8+Br1FJ9aqbtA1YVLSVLhqC9wXwNnAWRR6MMcYYY27HCpqg/RkKSs5YUJiERUUL6clWnADeQIuD&#10;B+IZY4wx5k4sIVFxDJVSL1Z7OSYnPPyupRRFsRZCuAJ8ABxAg/BGgR1AUeW1GWOMMSY7AhIRl5Gg&#10;uF4UhYOR5lssKtrNInJueBt4KL6mgaEqL8oYY4wx2bGGshRfoUnaC9VejskNlz+1mKIousB1yoF4&#10;p3EJlDHGGGO+S+rHPIpdn8wtsKgwq6if4ghq3L6Im7aNMcYYUxKQBf05lKW4jqdom3VYVBhQCvM4&#10;8A7wLnAND7IxxhhjjAioj+IoEhZLdn0y63FPRcspiiKEENaAC6hpez9wENgSX27aNsYYY9pNQFUN&#10;p1CjtkufzHewqDAURdENIcyjlOZbwKPAOPAYMFHhpRljjDGmWnobtI+gagb3X5rv4PInA3w7u2IW&#10;+BLNrvgc1Uw6vWmMMca0k9RLcQT4EJU/zeGzgbkFFhXmW4qiSJMyP0FuUOdwI5YxxhjTVroo4Pg5&#10;GpR7DlhxP4W5FRYVZj3LwBngI7SIXMVpTmOMMaaNdFFm4ijwDRp4Z0Fhbol7Ksx6uqhe8kNgF5qw&#10;/TPUtG2MMcaY9nAdlT59iSoZXL1gbotFhbmJ6Aa1hBwe3gb2AI8AjwOj2A3KGGOMaQNLqHLhfeAY&#10;cMNZCnMnLCrMd4jCYh7NrngTCYoJZDU7VOGlGWOMMWYwzAJfo8qFs8BitZdjcsc9FeZ2pDKoT4B/&#10;Q/0VN/BQPGOMMabpBNRTeTS+ZoqicOmTuSPOVJhbElOcKyGEy8B7aGbFVuB5YBsWpMYYY0wTCaj0&#10;6Rwqezoff27MHbGoMHcj9Vf8CRhDTVrPoQZu3z/G1IMu6odyT5Qx5m6sAReAL9DAOw+7M/eED4Xm&#10;jsRp2zfQ3IoOOpSMox6LqSqvzRhzV9ZQHfQyMAmMYGFhjLk9AVhAJU9p2N18URQufTZ3xaLC3JUo&#10;LK6ivoopYD9yhRpDhxRjTH50gRnk3nIDGS7swuu+Meb2rCEb2S/Rnn8B28iae8Sbi7kniqJY7REW&#10;f0L9FSPAARz5NCY3uuhg8DnqibqBMo1b0LNrjDG3Yg0FI44Ap4E528iae8WiwmyEZdS49Q46mKTh&#10;eBO4cduYnFhGVpB/BP4CrKDypyngCVTCaIwx67kK/A34CGUp3Eth7hmLCnPPxDKoOeAEEhapDOpJ&#10;JCycsTCmelaQp/z/396dPdlV13sff6+9e246ExkYQhgFDiAochSfc3xuPGWVWPVY5Y3/hTeWF97K&#10;n2CVVVrlpVX+AXrx1HNkEFFmAmHMQBIghCSdsaf0sNd6Lj5ruTeoB5J0p3vvfr+qdnWTENho77XW&#10;9/edXiJZxf0k6N8C7AS2ks+un1dJvWZJY/ZfSPnTjFkKXQmDCl2RnsDiMPA8sINun8Xoer43SZTA&#10;NDlp/AvwFhkH2SZNl7eR8dA34SJLSV0dMunxFZKlmCYHFNKXZlChK1YURaeqqnPkgWU7CSxa5EHF&#10;wEJaH83CyreA/0seDk4VRbFcVVWHlC6+AzwKPEJKoMxWSCpJ2dNBcvjwMZn4ZJZCV8SgQlelbtw+&#10;RR5cRkgN9zeAvaQZVNL1UZETxU/JCeMzwAtk6tPlnr9njiyy2k92zdxLyhYlbV4luTYcJhnOt4Gz&#10;OPFJV8GgQtdiAfiQPLA0zVyjwO3r9o6kzadDHgJeA54G/kr6nv5+0lgURVVVVdNrsR+4hwxa2INj&#10;oaXNrDlweB94g1w7FsxS6GoYVOiq1Q8q8ySwgJRTbOl5WbMtra0SuERGxz5fvw6SMZCfWVZV90Nd&#10;qn//BdJbMULKF70XSJvTMjmUOEjGyJ4ngYZ0xbyR6JrUDyoLpAbzBfKQ0iKlUDtIYGHdtrQ2mmkt&#10;z5HP31H+SUDRqPuhTpPdFbeQIQvjuLtC2owqcijxdv36FFgxS6GrZVCha1YHFvPkgaZFTk87pCF0&#10;D2YspLWwSD5zz9WvQ8ClfxVQ9GjKFl8nfRW3YVAhbUbNffuvOEJWq8CgQquiPgG9RJq9KpKxmCIL&#10;t6ZwOZ60mjrARyQ78TQ5ZTxXFMUXNlfWhwAz5GHifeB+sm9mGLOK0maxTIY5vEoOGD4lA1ekq2ZQ&#10;oVXTU7N9hPRX7Ky/foVuj4UPLdLVK0mm4RTZlP00aa6cLoriSmbKr5A59O+Qz+ceUg5lVlEafBXp&#10;nXiLXEeOkbJJsxS6JgYVWlV18/YsOQFtRs0ukhGW2/FnTroW88AHZLHdM2Sk8xmufPxj0+D9DslS&#10;7CKfz0kM/KVBVpF78hHgJTKG+iwuutMq8AFPq64uhbpIHliacbNt4KskYyHpylQkQP8QeBb4bzIa&#10;9hSwfKUnjHXwvwicIKUPe4CbyahZAwtpcK2QUqe36tcJ4LJZCq0GgwqtiXo53jngXRJQTJAdFveT&#10;iTOSvpwOcJFuD8WfSEDxaVEUV10DXZcrzpHSh5dJw/ZWUrLovUEaPB3gAunBepU0Z18imUvpmnnj&#10;0JrpCSzeIT9rJdnw+wB5eLF+W/qfNQHFO8CLZA/F68BpVqFcoQ4szpNyxVdI0L8b7w3SIJoj5ZMv&#10;kYOJk9dyMCF9njcOrak6sJgmpyIz5JRkHvgasA0nzkj/SkV3D8UzpOzpHVL/vLRa5QpFUSxXVXWm&#10;/mcfBPaSrKL3B2lwLNKd9vQSCS5m1/UdaeB409D10Jy2vktOVxfr18OklnsUAwup1wqZ8nSUTHh6&#10;hpQsnGVtllM1/64/kyziMOmxcBS01P9KMtDhANlJ8S5w0T4KrTaDCq25+sK1Uo+bPUT3gek88Bhw&#10;B+m5kNR9AHiHjHt8hjqguMKxsVeiQwKWV4EbSQnULjLBTVL/Ksmh3vukJ+tNMk7aaU9adQYVum56&#10;lm4dJr0VcyRDMUZKLnyA0WZXAudIvfNTdDfdXvgyi+2uVj0N6jKZLvUKcBMJKu4kmURJ/aci99pj&#10;ZBjDq2Tgg9OetCYMKnRdfW7iTAcYJ8FERcotxrEUSptPRTJ4zUKqp0kPxUFgpiiKNZ/OUgcWc+RE&#10;c4rsrdhKAgw/k1L/WSZjp18nQcVhYLYois66visNLIMKXXc9M/I/Jg9PF8np7H9x/ax4AAAWuklE&#10;QVSSUqhJrOXW5lGSHRQnSc3zn4HnSKngdQkoGvWOmfOk5no3mQY1ScZA+5mU+kdJBqM0k+OaIQ8G&#10;FFozBhVaFz3lFifICe0sCS4eJ9u3d+PpqAZf87P/MSlNeK7++iE5UVyP+fHN6eYrwC3kIeQBMq1t&#10;CD+XUj+YJROeXiTllJ8Ai5Y9aS0ZVGjd9GQsTpJpUHMkYzEDPEpquh05q0HVlDu9S0oTXgTeIDf/&#10;hfUqUag/lwvAcdIk3mQovkoCCz+P0sa2SA7sXiPXlmPkkMKAQmvKoELrqr7ILdW7LBbJ6co8aS57&#10;hPRZTOCiPA2OivyMnyY1zs+Shuz3WeUdFFerp/fpPdLzdAMJKEbJ59HAQtqYOmS60wEy7eld4Pw6&#10;ZT21yRhUaEOol+RdJHXkS8AlcmF8jEyg2YJZC/W/igTPR8gJ4ivkNPEwKf8r1zugaNT9FRfIZ3IX&#10;2SkzCdyGE6GkjahD7p3NOOr95PBizSbHSb0MKrRh9JyOHiWlUNPkgvjvpKa7WZRnw6j61TIJKP5M&#10;RsYeIP0L8xtxIksdWJwls+1vIFOhtpCeJ0kbyxz5rD5Ngorj2Eeh68igQhtKHVjMk3rQeTK94jRZ&#10;BvYIsI+MubQcSv1kmfwsHwP+QnoV9pOAYt3Lnb5AU5/9Khkzu4vslpnCzKG0UTT7KJ4nAcUR1m/Y&#10;gzYpgwptOD19FmfI5u0LJKiYBr4B3EMebCzB0EZXktKDT8n+ib/Vr/fIz/TyBg8oejOIHwIvkb0V&#10;28i42eH1fG+SgFxjTpCSyr/RXZhpQKHryqBCG1bPBu6jdHssTgHfIpNo9pITU8uhtBF1SDnCSbJ8&#10;6llyw/+QDCRY2egBRaP+LM6Sh5XnSfnTDmAnaeQ2YyFdfxW5zpwm15hngLeBs0VR2Eeh686gQhta&#10;z9jZ0yRrcY6c+p4iTdz3kBpvf5a1UVQkQ9E8hD9FSp7eIT+7ixuxf+JLWCGfvzfoLsP7NtllMYaB&#10;hXQ9VWSoyQVSSvkMyVScqn9duu58ENOGV5/mLteTaBbJHouzJNBoluU1Tdw+2Gg9rZCfyw/J1KTX&#10;yf6JQ6QcYXkd39s16fkcfkrKoCry3/s4cDswvo5vT9pslskhxaskoPgrWaJ5uV8yoBo8BhXqG3V9&#10;6FxVVR+RspLzJLiYplsO1Yyela6nptTpNDk1fJn0UBwhy+zmBqi++TLwEQkqChJMTAC34gAF6Xoo&#10;yf3vLeBPJBN6lEyRM6DQujGoUN8pimK5XpY3Ty6sp0iT2iPAfaTeexx7LbT2mprmCySAeIP0HOwn&#10;p4az9EEz9pWo/1sWqqpqNvZuJ+VQw8CNuE9GWksV6TF8n1xrXgSOFkUxs67vSsKgQn2qnp8/R0bo&#10;XSTzuA+SJu7ehXktfMDR6qtI+cEC+fk7SE4LX6i/P01O9DfMMrs10GQs/kzKoC6TnTI3Y7ZQWgvN&#10;8sz3SK/Wc8AHJEsqrTuDCvWtupzkcj16dpaUQp0k9eyPAQ+TkqhWz0taDQuk3OA90i/xHmnEPkaC&#10;jIHKTvwzPTtljtJtTh8iWYsd6/nepAG1Qg4tniHT5N4HZgaotFJ9zqBCfa8enTdTT4m6RJrXjpMH&#10;vIdJOdQeUqbhz7yuxRIpuTtCshKvkKDiJAkm5jfTDb7OGM6SwKIFTJKA4iGypNJAXrp2HXKQcZJk&#10;J54D3gXOOzpWG4kPWBoYRVEsVVV1lmQtTpMHvzfJdKim32In9lvoyjQL7BZIwPo2mX70Ejk1PEvG&#10;xA50ZuJf6dlhcZTs4Zgi/5s9QJbkDWEJonS1mh6Ko2TS01OkQfucAYU2GoMKDZT6Aecyad6+SEqh&#10;DpLMxWPkBLXpt/BhR19khW6Q+hEJUl8hN/WBbMS+GvU+mVlSjlGQjM4S8DWSuXAqlHTlKrqlls8C&#10;T5NhEKfJtUnaUAwqNHDqB7yVupH7MtlrcYY0tL1DgouvkxGYY80fwwBDXU2PwDmSmXiR3MwPkxKE&#10;86RhcpAbsa9IXQp1ifSXtMhnazvJDE7i50u6EiW5xhwjOyieIlPlzgBLXne0ERlUaGA1wQVwqc5e&#10;nCOnzcdIBuNBEljsIWUao+vzTrXBzJLdEsdIluvN+nWMlCEsbqa+iStRFMVKVVUXScZihAQXl0kJ&#10;4nYsO5S+jGZM9UHSu/UMCShOF0XhtmxtWAYV2hR6+i2arMXHwAHgHvLAcy8ZhTlJxmFaGrV5lGQ8&#10;7CIJKJra5VfJw/HHJCB1U+2XUGcszpNA7DL533SOLKjcheNmpS9ykWRI/0R2UbwDTBdFsbyu70r6&#10;AgYV2jTquu8lsoF7lpw8v0ECi6+TzMVtpAZ8J9kSbC34YLtMbuBn6U4Ne4ssdTtCgolFoGNA8eXV&#10;GYvz5GFongQV88A3yDS2JoshKZpFmvPkMOOp+vUucKEois46vjfpSzGo0KZSPxh26vn6zQjaU6RW&#10;/iXgLpK1eLD+emP9R9v4EDQomn6JJZKV2E8CiSMkK3GKBBlzwIrBxNWpMxYzpJepWdo1RwL4fWSf&#10;hdlAKZZJA/YhslDyKRJcXDSgUL8wqNCm1NPM3aF7Wn2CXMTfJqdDD5EgYzdZotdMjFL/Kkmm6k1y&#10;in6A1C2fJDXM8yTYMJhYBT17LI7QLS+bA75LGrj9PEnp/ZsmJZfPkcEQ75NrkgGF+oYXdG1qTeaC&#10;jO1bqKrqAjktOk4eOm8H7iZL9O4lZVGjuOui33TKsry4vLx8fGVl5dXh4eFnhoaGDrRarY/xJHBN&#10;1U3ts1VVHSMBW4dulqJ3Apu0GTVT5t4kI2P/QrJ7M1h2qT5jUCH1qGvBL5DT1JNkPObNJHPxDdJ/&#10;sQu4iSz5GiX14W0cS7thVFXF0tJSZ2VlZbGqqosjIyMny7I8eP78+RcPHDjw/IULFw5t27Zt5nvf&#10;+54jYa+ToiguV1V1gpRCtUhW6NskcLd/SZtNUxJ4npRfNk3Zh4EZJ8ypHxlUSJ9TN3QvkxrXBVIa&#10;9QkJLPaRZu47gFvqv76VBBgj9cvJUeujAsqyLFeWl5cXT5w4cfG999774IMPPnhp9+7dz957770H&#10;t2/fPj03Nzf/1ltvLT/55JPetK+/JfJZep70M50D/jfJAm7Fe5I2h4qU3X4EvEy35OkoMGtAoX7l&#10;BVz6J3pOr3uX6F0g+y22kjKo3eSU9U6SzdhLAo7dpDxK19fiysrKh5cuXXr50qVLr50+ffrYxx9/&#10;fPoXv/jF6TNnzpwi5QTerNdR/blarKrqFKkjnyOBxX+Q7ds348hZDbYVMgjiIBkO8gIpfToBLBhQ&#10;qJ8ZVEhfoGeJ3gowX4/K/ITstHiPboBxJ9l58RW65VE7SIBRYg/GqquqaqUsy0+qqnqj1Wq93+l0&#10;jpw5c+bd3/3ud+8/+eST0+T/M20w9d6YMyRzMUuyFgvAo3T3xZjt0yApyc/4GZL1fo4EFIeAM0VR&#10;LK7je5NWhRdt6SpVVdUideBDpNl0J93G7n3k4egesrF7nGwUburG26RUSlfnMnCuLMujS0tLL12+&#10;fPmPs7Ozr//+978//7Of/cweiT5RVVWbBBC3Af9OMhZfJwG609Y0KEq6izUPAK/QDSguYUO2BoRB&#10;hXSNqqoqSBaiRRq3byABxE66JVF3kCzGWP33TdENNobpNnrrHzUTupqt1wukDO0vwH+vrKy8vbCw&#10;MD09Pb141113WTrQZ+rgfJQE4Q8C36pf99PdwG2WT/2qImV+H5DpTs+TseWfkP4Js6kaGD7ESKuo&#10;J8AYIg9KE/VrCthGdxztTpLF+CrwACmXarYM+7mMZkndZXID3k+aGj+o//pT6iV1joTtb/XnZpiU&#10;C95NyqAeBx4hWT+nQ6kfNdev48D/q19vkhKoJa9bGjSmlqVV1LP3okMaUmfoBgpNNmOYlHzcSCZH&#10;3Vp/v63+/iEScGy73u9/g2jmth8n5QHHSNnA4fo1XRTF/Lq9O626+nPT9Fk0yyinyc6Yb5A+pR1Y&#10;Mqj+cpJkJV4GniW7j84URbG0ru9KWiOeiEobQFVVQ6SG/A5SW/7Vqqr2VFW1DdhVFMWeoihuZPAm&#10;4zSjFWdI1mG6/vohaYJ/iwQUZ4HL1h0PvjprMUaGH9wHPEayFv9GMnoTWA6ljatDyp3OkDGxz5Ae&#10;iqNk/4TZCQ0sgwppg6gfptpAe3l5eWh+fn68qqpbR0dHvzY8PPwf7Xb763VgMQ6MV1U1DgwVRdEP&#10;D1gl6YlYIkHEEumPmCeBxAekLOANkqE4V/99nfrPVgYUm0f9WRgi45tvJ83bj9dfb6fbxO09TBtB&#10;U6pZktHj75NG7OfJde1Tcihiz5cGmhdkaYN64oknit/85jcj27ZtmxgZGZkaGhraVhTFrrIs7+p0&#10;Ot8CHiuK4rZ2uz1RBxbNRu8myCg+9/V6qHq+Nq9lkon4hKT/D5AyppP1ry+SAGK+fi3avCj4+3So&#10;CTLU4AHgmyST92/1r1kOpfXWe407SwKK54G/kuvceXJN81BEA8+gQuojnU5ntNPpbCvLci9wS6vV&#10;2tput8dardYkaf7eR0qobiYTqCZIydTVlk01gUHzfVlV1d9/rSgKyHWkOalr9g6cJun+4ySYmCbZ&#10;h9PAqfr7GU/u9EXqrMUo+Xlumri/RbeJ+wa8l2n9LJJMxAFSrvkW6aM4TgINx8Vq0/BCLA2AqqrG&#10;q6raCdxWFMUddIOKSVIm0oytHSElJdtJzfrW+vcrMi/9HN0AYIGUH1H/flUHFE0gUBV1VEE3+Fgk&#10;QcUZukHFp8AlAwhdizprcQMZ0fwwyVg8QgKN3STw8J6m66UkAwWOk8l0TanTR6QEyh4wbTpegKUB&#10;UZZlART1g37zoudrRXfR2B1kJ8A+0hRbkozCofp1lAQZKz1/nsQUf/9nfT5T0au3/Ml+CK2KnqzF&#10;LhJMPEQauR8DbiFBx6ANM9DGUZJr4iLJQrxLypxeJKWdTe+E1zttSgYV0ibSsw9gmDycDdO9DqyQ&#10;8qUlMkPdzII2nJ4m7htIcHEfKYf6dv39jSQj1xtYS9eq2Yp9imQnjpAMxetk0MQlct00oNCm5QVX&#10;ktR36k3cI2Sfyz6ySPJxkr24lSycvAH3MenaNLuHZkkW92Uype4IKXU6jaNiJcCgQpLUx+pei2ZL&#10;/f0kqLiPlEfdQ7ffQroay8AJsjfnJTIq9iCZ9LQALJvVlcKgQpLU13p2vDQlUXeS4OLfSe/QXpK5&#10;MGuhL6NDxlvPkD6J14G/kQzFMdKI7VQn6XMMKiRJA6EuiWqCi1tIxuJh4Gv1110kq9HsdZF6NY3Y&#10;F0lm4nUyKvZdepZympmQ/jkvqpKkgdLTbzFJxibfTHou/ovsubgFS6L0WSukb+ITkpF4of56nCyw&#10;mwdWzE5I/5pBhSRpoFVVNUx6Lh4izdzfBf4Xjp9VXCaN12/QXWB3GDgJzGEwIX0p1pdK2jDq2ni8&#10;gWs1FUWxDJysquoUcKgsy0/LslwqiuL+Vqu1qyiKsfV+j7quOiSQuEimN50EXiF9EwfJAlAzE9IV&#10;MlMhaUOoA4oWLszTGqqqqnXo0KHtY2NjD+/YseP/jI6OPt5qte4EtrZarRHyM6jB04yGvUwmN30I&#10;vA28SvZMfAScISVQNmFLV8GgQtKGUVVV4c1ca+23v/1t69vf/vb4nj17dp47d+4rw8PD39myZct3&#10;t27den+r1dpKmr29Pw6Gpvl6gQQTR0lGYj/plzhFpjw5Hla6Rl40JUmb1qOPPjrxy1/+cu9NN930&#10;ULvd/tbIyMh/Tk5O3j8xMbF1aGiovd7vT9dkiUxxegN4n2QjPiVZijPU/RKYmZBWhUGFJEkw+vOf&#10;//yuJ5544vGbb7750YmJifvb7fZXxsfHbxofHx8dGrIFsU+UJFg4T3ZKPA08R8bCnqn7ayStAU9h&#10;JEmCzve///1z7Xb77T/84Q/7Dx8+fGxmZmZhYmJieWxsbH5oaGiuqqpF0u/T9P94MLcxVCSYWCTB&#10;xEESSPwB+BPJVpwvimJl3d6htAl4QZQk6bPaP/nJT8a++c1v3vDggw9OtFqtHcPDw3dOTU09vH37&#10;9kfGxsbuabfbe6qqmiCHc+060GheWhtNiVJJN5BogokZMsnpCJnk9DIpeToHLNkrIa09L36SJP3P&#10;2r/61a+2fOc737lx7969O4eHh3cuLi7etLS0dHu73f7KxMTEfWNjY7e22+2tZOmeVl9JeiRmyMjX&#10;U2QU7On6+9OkT6L5/hwwWxRFZ13erbQJGVRIknSFpqampn7961/vuf/++/ft27fvjsnJyVvKsrxp&#10;YWFh79LS0r52u33L2NjY9vHx8ZGhoSGSyNBVWgYuAB8Dh0h50wfACRJAnCc7J+bsmZDWj1c5SZKu&#10;0muvvVa0Wq3ij3/84/D09PT2bdu23XPnnXd+7Y477nhg7969e3fv3r1lbGxsBBiqqmq4LMtxYKrd&#10;bt/QarXGi6Jwq3eUZI/EMslILJKdEvMk63AceIfsljhMMhKz9d9f4m4bad05zkKSpKv06KOPVkA1&#10;MjKy9KMf/ejM+Pj4+bIs36iqarjT6QzNzMwMTU9PD09PT08uLCzsHB0dvWPbtm1f3bVr1wOTk5P7&#10;hoeHd5CSqWbpXtME3qLu1+j5vp97Nio+2wfRvJpAYh64RHZJNFuuT5JsxCf199Ok/Gmp/nMGEtIG&#10;0q8XJ0mS+snQD3/4w5Ef//jHE/fdd9/Wffv2TW3ZsmViaGhotCzL4bIsR1qt1mir1RpvtVpbgJ3A&#10;rcC++utNwBSfDUD6Qe9kpjlSxnSW9D+crl9n69dFElhcIlmIebKUbqH+8ys2XEsbl0GFJEnrr7V/&#10;//6he++9d2R8fHyCBBA3kuBiB7ANGCdlVC2gKMuyqKqqVZZlM962KIqiaLVaFEXRvJrMRxsYrf8Z&#10;k/XXYYCyLKuyLKmqqirLsvefUbXb7SaT0DQ8T9TvZ0/93qZI1UNJt1zpEgkQLtSvi6Tv4QIpZTpf&#10;f21+b5YEHIskC2HwIPUhgwpJkvpPb5lUqyiK1k9/+tPioYceKu6++2727dvH1q1bi8nJyWJoaKhF&#10;HvzHSRAwBdxAPalqaWmJxcXFanl5uVpZWSlarVZreHiYkZGRamxsbLlufl6u/503ALcAdwN3kgzK&#10;KAkoLvKPWYhpulmIWbpZhw4JRD77H2U5k9S37KmQJKn/VEDnBz/4QTk1NcXu3bu5/fbb2b59O+Pj&#10;4wwNDdFkLGoFyRAUn3v9g38yqar3Qb8gQcRWkknZTrIgcyRDMUMChyVghbr3ge5uiQp7IaSBZKZC&#10;kiR9aVVVNVmSofpV0G267gAdgwZJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJ&#10;kiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJ&#10;kiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJ&#10;kiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJ&#10;kiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJ&#10;kiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJ&#10;kiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJktbC&#10;/wcO9A7eMaXQEQAAAABJRU5ErkJggg==&#10;"
-
id="image1"
-
x="-233.6257"
-
y="10.383364"
-
style="display:none" />
-
<path
-
fill="currentColor"
-
style="stroke-width:0.111183"
-
d="m 16.348974,24.09935 -0.06485,-0.03766 -0.202005,-0.0106 -0.202008,-0.01048 -0.275736,-0.02601 -0.275734,-0.02602 v -0.02649 -0.02648 l -0.204577,-0.04019 -0.204578,-0.04019 -0.167616,-0.08035 -0.167617,-0.08035 -0.0014,-0.04137 -0.0014,-0.04137 -0.266473,-0.143735 -0.266475,-0.143735 -0.276098,-0.20335 -0.2761,-0.203347 -0.262064,-0.251949 -0.262064,-0.25195 -0.22095,-0.284628 -0.220948,-0.284629 -0.170253,-0.284631 -0.170252,-0.284628 -0.01341,-0.0144 -0.0134,-0.0144 -0.141982,0.161297 -0.14198,0.1613 -0.22313,0.21426 -0.223132,0.214264 -0.186025,0.146053 -0.186023,0.14605 -0.252501,0.163342 -0.252502,0.163342 -0.249014,0.115348 -0.249013,0.115336 0.0053,0.03241 0.0053,0.03241 -0.1716725,0.04599 -0.171669,0.046 -0.3379966,0.101058 -0.3379972,0.101058 -0.1778925,0.04506 -0.1778935,0.04508 -0.3913655,0.02601 -0.3913643,0.02603 -0.3557868,-0.03514 -0.3557863,-0.03514 -0.037426,-0.03029 -0.037427,-0.03029 -0.076924,0.02011 -0.076924,0.02011 -0.050508,-0.05051 -0.050405,-0.05056 L 6.6604532,23.110188 6.451745,23.063961 6.1546135,22.960559 5.8574835,22.857156 5.5319879,22.694039 5.2064938,22.530922 4.8793922,22.302961 4.5522905,22.075005 4.247598,21.786585 3.9429055,21.49817 3.7185335,21.208777 3.4941628,20.919385 3.3669822,20.705914 3.239803,20.492443 3.1335213,20.278969 3.0272397,20.065499 2.9015252,19.7275 2.7758105,19.389504 2.6925225,18.998139 2.6092345,18.606774 2.6096814,17.91299 2.6101284,17.219208 2.6744634,16.90029 2.7387984,16.581374 2.8474286,16.242088 2.9560588,15.9028 3.1137374,15.583492 3.2714148,15.264182 3.3415068,15.150766 3.4115988,15.03735 3.3127798,14.96945 3.2139618,14.90157 3.0360685,14.800239 2.8581753,14.698908 2.5913347,14.503228 2.3244955,14.307547 2.0621238,14.055599 1.7997507,13.803651 1.6111953,13.56878 1.4226411,13.333906 1.2632237,13.087474 1.1038089,12.841042 0.97442,12.575195 0.8450307,12.30935 0.724603,11.971351 0.6041766,11.633356 0.52150365,11.241991 0.4388285,10.850626 0.44091592,10.156842 0.44300333,9.4630594 0.54235911,9.0369608 0.6417149,8.6108622 0.7741173,8.2694368 0.9065196,7.9280115 1.0736303,7.6214262 1.2407515,7.3148397 1.45931,7.0191718 1.6778685,6.7235039 1.9300326,6.4611321 2.1821966,6.1987592 2.4134579,6.0137228 2.6447193,5.8286865 2.8759792,5.6776409 3.1072406,5.526594 3.4282004,5.3713977 3.7491603,5.2162016 3.9263009,5.1508695 4.1034416,5.0855373 4.2813348,4.7481598 4.4592292,4.4107823 4.6718,4.108422 4.8843733,3.8060618 5.198353,3.4805372 5.5123313,3.155014 5.7685095,2.9596425 6.0246877,2.7642722 6.329187,2.5851365 6.6336863,2.406002 6.9497657,2.2751596 7.2658453,2.1443184 7.4756394,2.0772947 7.6854348,2.01027 8.0825241,1.931086 8.4796139,1.851902 l 0.5870477,0.00291 0.5870469,0.00291 0.4447315,0.092455 0.444734,0.092455 0.302419,0.1105495 0.302417,0.1105495 0.329929,0.1646046 0.32993,0.1646033 0.239329,-0.2316919 0.239329,-0.2316919 0.160103,-0.1256767 0.160105,-0.1256767 0.160102,-0.1021909 0.160105,-0.1021899 0.142315,-0.082328 0.142314,-0.082328 0.231262,-0.1090091 0.231259,-0.1090091 0.26684,-0.098743 0.266839,-0.098743 0.320208,-0.073514 0.320209,-0.073527 0.355787,-0.041833 0.355785,-0.041834 0.426942,0.023827 0.426945,0.023828 0.355785,0.071179 0.355788,0.0711791 0.284627,0.09267 0.284629,0.09267 0.28514,0.1310267 0.28514,0.1310255 0.238179,0.1446969 0.238174,0.1446979 0.259413,0.1955332 0.259413,0.1955319 0.290757,0.296774 0.290758,0.2967753 0.151736,0.1941581 0.151734,0.1941594 0.135326,0.2149951 0.135327,0.2149952 0.154755,0.3202073 0.154758,0.3202085 0.09409,0.2677358 0.09409,0.267737 0.06948,0.3319087 0.06948,0.3319099 0.01111,0.00808 0.01111,0.00808 0.444734,0.2173653 0.444734,0.2173665 0.309499,0.2161102 0.309497,0.2161101 0.309694,0.2930023 0.309694,0.2930037 0.18752,0.2348726 0.187524,0.2348727 0.166516,0.2574092 0.166519,0.2574108 0.15273,0.3260252 0.152734,0.3260262 0.08972,0.2668403 0.08971,0.2668391 0.08295,0.3913655 0.08295,0.3913652 -6.21e-4,0.6582049 -6.21e-4,0.658204 -0.06362,0.315725 -0.06362,0.315725 -0.09046,0.289112 -0.09046,0.289112 -0.122759,0.281358 -0.12276,0.281356 -0.146626,0.252323 -0.146629,0.252322 -0.190443,0.258668 -0.190448,0.258671 -0.254911,0.268356 -0.254911,0.268355 -0.286872,0.223127 -0.286874,0.223127 -0.320203,0.187693 -0.320209,0.187693 -0.04347,0.03519 -0.04347,0.03521 0.0564,0.12989 0.0564,0.129892 0.08728,0.213472 0.08728,0.213471 0.189755,0.729363 0.189753,0.729362 0.0652,0.302417 0.0652,0.302419 -0.0018,0.675994 -0.0018,0.675995 -0.0801,0.373573 -0.08009,0.373577 -0.09,0.266839 -0.09,0.26684 -0.190389,0.391364 -0.19039,0.391366 -0.223169,0.320207 -0.223167,0.320209 -0.303585,0.315294 -0.303584,0.315291 -0.284631,0.220665 -0.284629,0.220663 -0.220128,0.132359 -0.220127,0.132358 -0.242395,0.106698 -0.242394,0.106699 -0.08895,0.04734 -0.08895,0.04733 -0.249052,0.07247 -0.24905,0.07247 -0.322042,0.0574 -0.322044,0.0574 -0.282794,-0.003 -0.282795,-0.003 -0.07115,-0.0031 -0.07115,-0.0031 -0.177894,-0.0033 -0.177893,-0.0033 -0.124528,0.02555 -0.124528,0.02555 z m -4.470079,-5.349839 0.214838,-0.01739 0.206601,-0.06782 0.206602,-0.06782 0.244389,-0.117874 0.244393,-0.11786 0.274473,-0.206822 0.27447,-0.20682 0.229308,-0.257201 0.229306,-0.2572 0.219161,-0.28463 0.219159,-0.284629 0.188541,-0.284628 0.188543,-0.28463 0.214594,-0.373574 0.214593,-0.373577 0.133861,-0.312006 0.133865,-0.312007 0.02861,-0.01769 0.02861,-0.01769 0.197275,0.26212 0.197278,0.262119 0.163613,0.150814 0.163614,0.150814 0.201914,0.09276 0.201914,0.09276 0.302417,0.01421 0.302418,0.01421 0.213472,-0.08025 0.213471,-0.08025 0.200606,-0.204641 0.200606,-0.204642 0.09242,-0.278887 0.09241,-0.278888 0.05765,-0.302418 0.05764,-0.302416 L 18.41327,13.768114 18.39502,13.34117 18.31849,12.915185 18.24196,12.4892 18.15595,12.168033 18.06994,11.846867 17.928869,11.444534 17.787801,11.042201 17.621278,10.73296 17.454757,10.423723 17.337388,10.263619 17.220021,10.103516 17.095645,9.9837986 16.971268,9.8640816 16.990048,9.6813736 17.008828,9.4986654 16.947568,9.249616 16.886308,9.0005655 16.752419,8.7159355 16.618521,8.4313217 16.435707,8.2294676 16.252892,8.0276114 16.079629,7.9004245 15.906366,7.773238 l -0.20429,0.1230127 -0.204289,0.1230121 -0.26702,0.059413 -0.267022,0.059413 -0.205761,-0.021508 -0.205766,-0.021508 -0.23495,-0.08844 -0.234953,-0.08844 -0.118429,-0.090334 -0.118428,-0.090333 h -0.03944 -0.03944 L 13.711268,7.8540732 13.655958,7.9706205 13.497227,8.1520709 13.338499,8.3335203 13.168394,8.4419112 12.998289,8.550301 12.777045,8.624223 12.5558,8.698155 H 12.275611 11.995429 L 11.799973,8.6309015 11.604513,8.5636472 11.491311,8.5051061 11.37811,8.446565 11.138172,8.2254579 10.898231,8.0043497 l -0.09565,-0.084618 -0.09565,-0.084613 -0.218822,0.198024 -0.218822,0.1980231 -0.165392,0.078387 -0.1653925,0.078387 -0.177894,0.047948 -0.177892,0.047948 L 9.3635263,8.4842631 9.144328,8.4846889 8.9195029,8.4147138 8.6946778,8.3447386 8.5931214,8.4414036 8.491565,8.5380686 8.3707618,8.7019598 8.2499597,8.8658478 8.0802403,8.9290726 7.9105231,8.9922974 7.7952769,9.0780061 7.6800299,9.1637148 7.5706169,9.2778257 7.4612038,9.3919481 7.1059768,9.9205267 6.7507497,10.449105 l -0.2159851,0.449834 -0.2159839,0.449834 -0.2216572,0.462522 -0.2216559,0.462523 -0.1459343,0.337996 -0.1459342,0.337998 -0.055483,0.220042 -0.055483,0.220041 -0.015885,0.206903 -0.015872,0.206901 0.034307,0.242939 0.034307,0.24294 0.096281,0.196632 0.096281,0.196634 0.143607,0.125222 0.1436071,0.125222 0.1873143,0.08737 0.1873141,0.08737 0.2752084,0.002 0.2752084,0.002 0.2312297,-0.09773 0.231231,-0.09772 0.1067615,-0.07603 0.1067614,-0.07603 0.3679062,-0.29377 0.3679065,-0.293771 0.026804,0.01656 0.026804,0.01656 0.023626,0.466819 0.023626,0.466815 0.088326,0.513195 0.088326,0.513193 0.08897,0.364413 0.08897,0.364411 0.1315362,0.302418 0.1315352,0.302418 0.1051964,0.160105 0.1051954,0.160103 0.1104741,0.11877 0.1104731,0.118769 0.2846284,0.205644 0.2846305,0.205642 0.144448,0.07312 0.144448,0.07312 0.214787,0.05566 0.214787,0.05566 0.245601,0.03075 0.245602,0.03075 0.204577,-0.0125 0.204578,-0.0125 z m 0.686342,-3.497495 -0.11281,-0.06077 -0.106155,-0.134033 -0.106155,-0.134031 -0.04406,-0.18371 -0.04406,-0.183707 0.02417,-0.553937 0.02417,-0.553936 0.03513,-0.426945 0.03513,-0.426942 0.07225,-0.373576 0.07225,-0.373575 0.05417,-0.211338 0.05417,-0.211339 0.0674,-0.132112 0.0674,-0.132112 0.132437,-0.10916 0.132437,-0.109161 0.187436,-0.04195 0.187438,-0.04195 0.170366,0.06469 0.170364,0.06469 0.114312,0.124073 0.114313,0.124086 0.04139,0.18495 0.04139,0.184951 -0.111218,0.459845 -0.111219,0.459844 -0.03383,0.26584 -0.03382,0.265841 -0.03986,0.818307 -0.03986,0.818309 -0.0378,0.15162 -0.03779,0.151621 -0.11089,0.110562 -0.110891,0.110561 -0.114489,0.04913 -0.114489,0.04913 -0.187932,-0.0016 -0.187929,-0.0016 z m -2.8087655,-0.358124 -0.146445,-0.06848 -0.088025,-0.119502 -0.088024,-0.119502 -0.038581,-0.106736 -0.038581,-0.106736 -0.02237,-0.134956 -0.02239,-0.134957 -0.031955,-0.46988 -0.031955,-0.469881 0.036203,-0.444733 0.036203,-0.444731 0.048862,-0.215257 0.048862,-0.215255 0.076082,-0.203349 0.076081,-0.203348 0.0936,-0.111244 0.0936,-0.111245 0.143787,-0.06531 0.1437865,-0.06532 h 0.142315 0.142314 l 0.142314,0.06588 0.142316,0.06588 0.093,0.102325 0.093,0.102325 0.04042,0.120942 0.04042,0.120942 v 0.152479 0.152477 l -0.03347,0.08804 -0.03347,0.08805 -0.05693,0.275653 -0.05693,0.275651 2.11e-4,0.430246 2.12e-4,0.430243 0.04294,0.392646 0.04295,0.392647 -0.09189,0.200702 -0.09189,0.200702 -0.148688,0.0984 -0.148687,0.0984 -0.20136,0.01212 -0.2013595,0.01212 z"
-
id="path4" />
-
</g>
-
</svg>
+
<svg
+
version="1.1"
+
id="svg1"
+
class="{{ . }}"
+
width="25"
+
height="25"
+
viewBox="0 0 25 25"
+
sodipodi:docname="tangled_dolly_face_only_black_on_trans.svg"
+
inkscape:export-filename="tangled_logotype_black_on_trans.svg"
+
inkscape:export-xdpi="96"
+
inkscape:export-ydpi="96"
+
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
+
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+
xmlns="http://www.w3.org/2000/svg"
+
xmlns:svg="http://www.w3.org/2000/svg"
+
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+
xmlns:cc="http://creativecommons.org/ns#">
+
<sodipodi:namedview
+
id="namedview1"
+
pagecolor="#ffffff"
+
bordercolor="#000000"
+
borderopacity="0.25"
+
inkscape:showpageshadow="2"
+
inkscape:pageopacity="0.0"
+
inkscape:pagecheckerboard="true"
+
inkscape:deskcolor="#d5d5d5"
+
inkscape:zoom="45.254834"
+
inkscape:cx="3.1377863"
+
inkscape:cy="8.9382717"
+
inkscape:window-width="3840"
+
inkscape:window-height="2160"
+
inkscape:window-x="0"
+
inkscape:window-y="0"
+
inkscape:window-maximized="0"
+
inkscape:current-layer="g1"
+
borderlayer="true">
+
<inkscape:page
+
x="0"
+
y="0"
+
width="25"
+
height="25"
+
id="page2"
+
margin="0"
+
bleed="0" />
+
</sodipodi:namedview>
+
<g
+
inkscape:groupmode="layer"
+
inkscape:label="Image"
+
id="g1"
+
transform="translate(-0.42924038,-0.87777209)">
+
<path
+
fill="currentColor"
+
style="stroke-width:0.111183;"
+
d="m 16.775491,24.987061 c -0.78517,-0.0064 -1.384202,-0.234614 -2.033994,-0.631295 -0.931792,-0.490188 -1.643475,-1.31368 -2.152014,-2.221647 C 11.781409,23.136647 10.701392,23.744942 9.4922931,24.0886 8.9774725,24.238111 8.0757679,24.389777 6.5811304,23.84827 4.4270703,23.124679 2.8580086,20.883331 3.0363279,18.599583 3.0037061,17.652919 3.3488675,16.723769 3.8381157,15.925061 2.5329485,15.224503 1.4686756,14.048584 1.0611184,12.606459 0.81344502,11.816973 0.82385989,10.966486 0.91519098,10.154906 1.2422711,8.2387903 2.6795811,6.5725716 4.5299585,5.9732484 5.2685364,4.290122 6.8802592,3.0349975 8.706276,2.7794663 c 1.2124148,-0.1688264 2.46744,0.084987 3.52811,0.7011837 1.545426,-1.7139736 4.237779,-2.2205077 6.293579,-1.1676231 1.568222,0.7488935 2.689625,2.3113526 2.961888,4.0151464 1.492195,0.5977882 2.749007,1.8168898 3.242225,3.3644951 0.329805,0.9581836 0.340709,2.0135956 0.127128,2.9974286 -0.381606,1.535184 -1.465322,2.842146 -2.868035,3.556463 0.0034,0.273204 0.901506,2.243045 0.751284,3.729647 -0.03281,1.858525 -1.211631,3.619894 -2.846433,4.475452 -0.953967,0.556812 -2.084452,0.546309 -3.120531,0.535398 z m -4.470079,-5.349839 c 1.322246,-0.147248 2.189053,-1.300106 2.862307,-2.338363 0.318287,-0.472954 0.561404,-1.002348 0.803,-1.505815 0.313265,0.287151 0.578698,0.828085 1.074141,0.956909 0.521892,0.162542 1.133743,0.03052 1.45325,-0.443554 0.611414,-1.140449 0.31004,-2.516537 -0.04602,-3.698347 C 18.232844,11.92927 17.945151,11.232927 17.397785,10.751793 17.514522,9.9283111 17.026575,9.0919791 16.332883,8.6609491 15.741721,9.1323278 14.842258,9.1294949 14.271975,8.6252369 13.178927,9.7400102 12.177239,9.7029996 11.209704,8.8195135 10.992255,8.6209543 10.577326,10.031484 9.1211947,9.2324497 8.2846288,9.9333947 7.6359672,10.607693 7.0611981,11.578553 6.5026891,12.62523 5.9177873,13.554793 5.867393,14.69141 c -0.024234,0.66432 0.4948601,1.360337 1.1982269,1.306329 0.702996,0.06277 1.1815208,-0.629091 1.7138087,-0.916491 0.079382,0.927141 0.1688108,1.923227 0.4821259,2.828358 0.3596254,1.171275 1.6262605,1.915695 2.8251855,1.745211 0.08481,-0.0066 0.218672,-0.01769 0.218672,-0.0176 z m 0.686342,-3.497495 c -0.643126,-0.394168 -0.33365,-1.249599 -0.359402,-1.870938 0.064,-0.749774 0.115321,-1.538054 0.452402,-2.221125 0.356724,-0.487008 1.226721,-0.299139 1.265134,0.325689 -0.02558,0.628509 -0.314101,1.25416 -0.279646,1.9057 -0.07482,0.544043 0.05418,1.155133 -0.186476,1.652391 -0.197455,0.275121 -0.599638,0.355105 -0.892012,0.208283 z m -2.808766,-0.358124 c -0.605767,-0.328664 -0.4133176,-1.155655 -0.5083256,-1.73063 0.078762,-0.66567 0.013203,-1.510085 0.5705316,-1.976886 0.545037,-0.380109 1.286917,0.270803 1.029164,0.868384 -0.274913,0.755214 -0.09475,1.580345 -0.08893,2.34609 -0.104009,0.451702 -0.587146,0.691508 -1.002445,0.493042 z"
+
id="path4"
+
sodipodi:nodetypes="sccccccccccccccccccsscccccccccsccccccccccccccccccccccc" />
+
</g>
+
<metadata
+
id="metadata1">
+
<rdf:RDF>
+
<cc:Work
+
rdf:about="">
+
<cc:license
+
rdf:resource="http://creativecommons.org/licenses/by/4.0/" />
+
</cc:Work>
+
<cc:License
+
rdf:about="http://creativecommons.org/licenses/by/4.0/">
+
<cc:permits
+
rdf:resource="http://creativecommons.org/ns#Reproduction" />
+
<cc:permits
+
rdf:resource="http://creativecommons.org/ns#Distribution" />
+
<cc:requires
+
rdf:resource="http://creativecommons.org/ns#Notice" />
+
<cc:requires
+
rdf:resource="http://creativecommons.org/ns#Attribution" />
+
<cc:permits
+
rdf:resource="http://creativecommons.org/ns#DerivativeWorks" />
+
</cc:License>
+
</rdf:RDF>
+
</metadata>
+
</svg>
{{ end }}
+60 -22
appview/pages/templates/fragments/dolly/silhouette.html
···
<svg
version="1.1"
id="svg1"
-
width="32"
-
height="32"
+
width="25"
+
height="25"
viewBox="0 0 25 25"
-
sodipodi:docname="tangled_dolly_silhouette.png"
+
sodipodi:docname="tangled_dolly_face_only_black_on_trans.svg"
+
inkscape:export-filename="tangled_dolly_silhouette_black_on_trans.svg"
+
inkscape:export-xdpi="96"
+
inkscape:export-ydpi="96"
+
inkscape:version="1.4 (e7c3feb100, 2024-10-09)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
-
xmlns:svg="http://www.w3.org/2000/svg">
-
<style>
-
.dolly {
-
color: #000000;
-
}
+
xmlns:svg="http://www.w3.org/2000/svg"
+
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+
xmlns:cc="http://creativecommons.org/ns#">
+
<style>
+
.dolly {
+
color: #000000;
+
}
-
@media (prefers-color-scheme: dark) {
-
.dolly {
-
color: #ffffff;
-
}
-
}
-
</style>
-
<title>Dolly</title>
-
<defs
-
id="defs1" />
+
@media (prefers-color-scheme: dark) {
+
.dolly {
+
color: #ffffff;
+
}
+
}
+
</style>
<sodipodi:namedview
id="namedview1"
pagecolor="#ffffff"
···
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="true"
-
inkscape:deskcolor="#d1d1d1">
+
inkscape:deskcolor="#d5d5d5"
+
inkscape:zoom="64"
+
inkscape:cx="4.96875"
+
inkscape:cy="13.429688"
+
inkscape:window-width="3840"
+
inkscape:window-height="2160"
+
inkscape:window-x="0"
+
inkscape:window-y="0"
+
inkscape:window-maximized="0"
+
inkscape:current-layer="g1"
+
borderlayer="true">
<inkscape:page
x="0"
y="0"
···
<g
inkscape:groupmode="layer"
inkscape:label="Image"
-
id="g1">
+
id="g1"
+
transform="translate(-0.42924038,-0.87777209)">
<path
class="dolly"
fill="currentColor"
-
style="stroke-width:1.12248"
-
d="m 16.208435,23.914069 c -0.06147,-0.02273 -0.147027,-0.03034 -0.190158,-0.01691 -0.197279,0.06145 -1.31068,-0.230493 -1.388819,-0.364153 -0.01956,-0.03344 -0.163274,-0.134049 -0.319377,-0.223561 -0.550395,-0.315603 -1.010951,-0.696643 -1.428383,-1.181771 -0.264598,-0.307509 -0.597257,-0.785384 -0.597257,-0.857979 0,-0.0216 -0.02841,-0.06243 -0.06313,-0.0907 -0.04977,-0.04053 -0.160873,0.0436 -0.52488,0.397463 -0.479803,0.466432 -0.78924,0.689475 -1.355603,0.977118 -0.183693,0.0933 -0.323426,0.179989 -0.310516,0.192658 0.02801,0.02748 -0.7656391,0.270031 -1.209129,0.369517 -0.5378332,0.120647 -1.6341809,0.08626 -1.9721503,-0.06186 C 6.7977157,23.031391 6.56735,22.957551 6.3371134,22.889782 4.9717169,22.487902 3.7511914,21.481518 3.1172396,20.234838 2.6890391,19.392772 2.5582276,18.827446 2.5610489,17.831154 2.5639589,16.802192 2.7366641,16.125844 3.2142117,15.273187 3.3040457,15.112788 3.3713143,14.976533 3.3636956,14.9704 3.3560756,14.9643 3.2459634,14.90305 3.1189994,14.834381 1.7582586,14.098312 0.77760984,12.777439 0.44909837,11.23818 0.33531456,10.705039 0.33670119,9.7067968 0.45195381,9.1778795 0.72259241,7.9359287 1.3827188,6.8888436 2.4297498,6.0407205 2.6856126,5.8334648 3.2975489,5.4910878 3.6885849,5.3364049 L 4.0584319,5.190106 4.2333984,4.860432 C 4.8393906,3.7186139 5.8908314,2.7968028 7.1056396,2.3423025 7.7690673,2.0940921 8.2290216,2.0150935 9.01853,2.0137575 c 0.9625627,-0.00163 1.629181,0.1532762 2.485864,0.5776514 l 0.271744,0.1346134 0.42911,-0.3607688 c 1.082666,-0.9102346 2.185531,-1.3136811 3.578383,-1.3090327 0.916696,0.00306 1.573918,0.1517893 2.356121,0.5331927 1.465948,0.7148 2.54506,2.0625628 2.865177,3.57848 l 0.07653,0.362429 0.515095,0.2556611 c 1.022872,0.5076874 1.756122,1.1690944 2.288361,2.0641468 0.401896,0.6758594 0.537303,1.0442682 0.675505,1.8378683 0.288575,1.6570823 -0.266229,3.3548023 -1.490464,4.5608743 -0.371074,0.36557 -0.840205,0.718265 -1.203442,0.904754 -0.144112,0.07398 -0.271303,0.15826 -0.282647,0.187269 -0.01134,0.02901 0.02121,0.142764 0.07234,0.25279 0.184248,0.396467 0.451371,1.331823 0.619371,2.168779 0.463493,2.30908 -0.754646,4.693707 -2.92278,5.721632 -0.479538,0.227352 -0.717629,0.309322 -1.144194,0.39393 -0.321869,0.06383 -1.850573,0.09139 -2.000174,0.03604 z M 12.25443,18.636956 c 0.739923,-0.24652 1.382521,-0.718922 1.874623,-1.37812 0.0752,-0.100718 0.213883,-0.275851 0.308198,-0.389167 0.09432,-0.113318 0.210136,-0.271056 0.257381,-0.350531 0.416347,-0.700389 0.680936,-1.176102 0.766454,-1.378041 0.05594,-0.132087 0.114653,-0.239607 0.130477,-0.238929 0.01583,6.79e-4 0.08126,0.08531 0.145412,0.188069 0.178029,0.285173 0.614305,0.658998 0.868158,0.743878 0.259802,0.08686 0.656158,0.09598 0.911369,0.02095 0.213812,-0.06285 0.507296,-0.298016 0.645179,-0.516947 0.155165,-0.246374 0.327989,-0.989595 0.327989,-1.410501 0,-1.26718 -0.610975,-3.143405 -1.237774,-3.801045 -0.198483,-0.2082486 -0.208557,-0.2319396 -0.208557,-0.4904655 0,-0.2517771 -0.08774,-0.5704927 -0.258476,-0.938956 C 16.694963,8.50313 16.375697,8.1377479 16.135846,7.9543702 L 15.932296,7.7987471 15.683004,7.9356529 C 15.131767,8.2383821 14.435638,8.1945733 13.943459,7.8261812 L 13.782862,7.7059758 13.686773,7.8908012 C 13.338849,8.5600578 12.487087,8.8811064 11.743178,8.6233891 11.487199,8.5347109 11.358897,8.4505994 11.063189,8.1776138 L 10.69871,7.8411436 10.453484,8.0579255 C 10.318608,8.1771557 10.113778,8.3156283 9.9983037,8.3656417 9.7041488,8.4930449 9.1808299,8.5227884 8.8979004,8.4281886 8.7754792,8.3872574 8.6687415,8.3537661 8.6607053,8.3537661 c -0.03426,0 -0.3092864,0.3066098 -0.3791974,0.42275 -0.041935,0.069664 -0.1040482,0.1266636 -0.1380294,0.1266636 -0.1316419,0 -0.4197402,0.1843928 -0.6257041,0.4004735 -0.1923125,0.2017571 -0.6853701,0.9036038 -0.8926582,1.2706578 -0.042662,0.07554 -0.1803555,0.353687 -0.3059848,0.618091 -0.1256293,0.264406 -0.3270073,0.686768 -0.4475067,0.938581 -0.1204992,0.251816 -0.2469926,0.519654 -0.2810961,0.595199 -0.2592829,0.574347 -0.285919,1.391094 -0.057822,1.77304 0.1690683,0.283105 0.4224039,0.480895 0.7285507,0.568809 0.487122,0.139885 0.9109638,-0.004 1.6013422,-0.543768 l 0.4560939,-0.356568 0.0036,0.172041 c 0.01635,0.781837 0.1831084,1.813183 0.4016641,2.484154 0.1160449,0.356262 0.3781448,0.83968 0.5614081,1.035462 0.2171883,0.232025 0.7140951,0.577268 1.0100284,0.701749 0.121485,0.0511 0.351032,0.110795 0.510105,0.132647 0.396966,0.05452 1.2105,0.02265 1.448934,-0.05679 z"
-
id="path1" />
+
style="stroke-width:0.111183"
+
d="m 16.775491,24.987061 c -0.78517,-0.0064 -1.384202,-0.234614 -2.033994,-0.631295 -0.931792,-0.490188 -1.643475,-1.31368 -2.152014,-2.221647 C 11.781409,23.136647 10.701392,23.744942 9.4922931,24.0886 8.9774725,24.238111 8.0757679,24.389777 6.5811304,23.84827 4.4270703,23.124679 2.8580086,20.883331 3.0363279,18.599583 3.0037061,17.652919 3.3488675,16.723769 3.8381157,15.925061 2.5329485,15.224503 1.4686756,14.048584 1.0611184,12.606459 0.81344502,11.816973 0.82385989,10.966486 0.91519098,10.154906 1.2422711,8.2387903 2.6795811,6.5725716 4.5299585,5.9732484 5.2685364,4.290122 6.8802592,3.0349975 8.706276,2.7794663 c 1.2124148,-0.1688264 2.46744,0.084987 3.52811,0.7011837 1.545426,-1.7139736 4.237779,-2.2205077 6.293579,-1.1676231 1.568222,0.7488935 2.689625,2.3113526 2.961888,4.0151464 1.492195,0.5977882 2.749007,1.8168898 3.242225,3.3644951 0.329805,0.9581836 0.340709,2.0135956 0.127128,2.9974286 -0.381606,1.535184 -1.465322,2.842146 -2.868035,3.556463 0.0034,0.273204 0.901506,2.243045 0.751284,3.729647 -0.03281,1.858525 -1.211631,3.619894 -2.846433,4.475452 -0.953967,0.556812 -2.084452,0.546309 -3.120531,0.535398 z m -4.470079,-5.349839 c 1.322246,-0.147248 2.189053,-1.300106 2.862307,-2.338363 0.318287,-0.472954 0.561404,-1.002348 0.803,-1.505815 0.313265,0.287151 0.578698,0.828085 1.074141,0.956909 0.521892,0.162542 1.133743,0.03052 1.45325,-0.443554 0.611414,-1.140449 0.31004,-2.516537 -0.04602,-3.698347 C 18.232844,11.92927 17.945151,11.232927 17.397785,10.751793 17.514522,9.9283111 17.026575,9.0919791 16.332883,8.6609491 15.741721,9.1323278 14.842258,9.1294949 14.271975,8.6252369 13.178927,9.7400102 12.177239,9.7029996 11.209704,8.8195135 10.992255,8.6209543 10.577326,10.031484 9.1211947,9.2324497 8.2846288,9.9333947 7.6359672,10.607693 7.0611981,11.578553 6.5026891,12.62523 5.9177873,13.554793 5.867393,14.69141 c -0.024234,0.66432 0.4948601,1.360337 1.1982269,1.306329 0.702996,0.06277 1.1815208,-0.629091 1.7138087,-0.916491 0.079382,0.927141 0.1688108,1.923227 0.4821259,2.828358 0.3596254,1.171275 1.6262605,1.915695 2.8251855,1.745211 0.08481,-0.0066 0.218672,-0.01769 0.218672,-0.0176 z"
+
id="path7"
+
sodipodi:nodetypes="sccccccccccccccccccsscccccccccscccccccsc" />
</g>
+
<metadata
+
id="metadata1">
+
<rdf:RDF>
+
<cc:Work
+
rdf:about="">
+
<cc:license
+
rdf:resource="http://creativecommons.org/licenses/by/4.0/" />
+
</cc:Work>
+
<cc:License
+
rdf:about="http://creativecommons.org/licenses/by/4.0/">
+
<cc:permits
+
rdf:resource="http://creativecommons.org/ns#Reproduction" />
+
<cc:permits
+
rdf:resource="http://creativecommons.org/ns#Distribution" />
+
<cc:requires
+
rdf:resource="http://creativecommons.org/ns#Notice" />
+
<cc:requires
+
rdf:resource="http://creativecommons.org/ns#Attribution" />
+
<cc:permits
+
rdf:resource="http://creativecommons.org/ns#DerivativeWorks" />
+
</cc:License>
+
</rdf:RDF>
+
</metadata>
</svg>
{{ end }}
-44
appview/pages/templates/fragments/dolly/silhouette.svg
···
-
<svg
-
version="1.1"
-
id="svg1"
-
width="32"
-
height="32"
-
viewBox="0 0 25 25"
-
sodipodi:docname="tangled_dolly_silhouette.png"
-
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
-
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
-
xmlns="http://www.w3.org/2000/svg"
-
xmlns:svg="http://www.w3.org/2000/svg">
-
<title>Dolly</title>
-
<defs
-
id="defs1" />
-
<sodipodi:namedview
-
id="namedview1"
-
pagecolor="#ffffff"
-
bordercolor="#000000"
-
borderopacity="0.25"
-
inkscape:showpageshadow="2"
-
inkscape:pageopacity="0.0"
-
inkscape:pagecheckerboard="true"
-
inkscape:deskcolor="#d1d1d1">
-
<inkscape:page
-
x="0"
-
y="0"
-
width="25"
-
height="25"
-
id="page2"
-
margin="0"
-
bleed="0" />
-
</sodipodi:namedview>
-
<g
-
inkscape:groupmode="layer"
-
inkscape:label="Image"
-
id="g1">
-
<path
-
class="dolly"
-
fill="currentColor"
-
style="stroke-width:1.12248"
-
d="m 16.208435,23.914069 c -0.06147,-0.02273 -0.147027,-0.03034 -0.190158,-0.01691 -0.197279,0.06145 -1.31068,-0.230493 -1.388819,-0.364153 -0.01956,-0.03344 -0.163274,-0.134049 -0.319377,-0.223561 -0.550395,-0.315603 -1.010951,-0.696643 -1.428383,-1.181771 -0.264598,-0.307509 -0.597257,-0.785384 -0.597257,-0.857979 0,-0.0216 -0.02841,-0.06243 -0.06313,-0.0907 -0.04977,-0.04053 -0.160873,0.0436 -0.52488,0.397463 -0.479803,0.466432 -0.78924,0.689475 -1.355603,0.977118 -0.183693,0.0933 -0.323426,0.179989 -0.310516,0.192658 0.02801,0.02748 -0.7656391,0.270031 -1.209129,0.369517 -0.5378332,0.120647 -1.6341809,0.08626 -1.9721503,-0.06186 C 6.7977157,23.031391 6.56735,22.957551 6.3371134,22.889782 4.9717169,22.487902 3.7511914,21.481518 3.1172396,20.234838 2.6890391,19.392772 2.5582276,18.827446 2.5610489,17.831154 2.5639589,16.802192 2.7366641,16.125844 3.2142117,15.273187 3.3040457,15.112788 3.3713143,14.976533 3.3636956,14.9704 3.3560756,14.9643 3.2459634,14.90305 3.1189994,14.834381 1.7582586,14.098312 0.77760984,12.777439 0.44909837,11.23818 0.33531456,10.705039 0.33670119,9.7067968 0.45195381,9.1778795 0.72259241,7.9359287 1.3827188,6.8888436 2.4297498,6.0407205 2.6856126,5.8334648 3.2975489,5.4910878 3.6885849,5.3364049 L 4.0584319,5.190106 4.2333984,4.860432 C 4.8393906,3.7186139 5.8908314,2.7968028 7.1056396,2.3423025 7.7690673,2.0940921 8.2290216,2.0150935 9.01853,2.0137575 c 0.9625627,-0.00163 1.629181,0.1532762 2.485864,0.5776514 l 0.271744,0.1346134 0.42911,-0.3607688 c 1.082666,-0.9102346 2.185531,-1.3136811 3.578383,-1.3090327 0.916696,0.00306 1.573918,0.1517893 2.356121,0.5331927 1.465948,0.7148 2.54506,2.0625628 2.865177,3.57848 l 0.07653,0.362429 0.515095,0.2556611 c 1.022872,0.5076874 1.756122,1.1690944 2.288361,2.0641468 0.401896,0.6758594 0.537303,1.0442682 0.675505,1.8378683 0.288575,1.6570823 -0.266229,3.3548023 -1.490464,4.5608743 -0.371074,0.36557 -0.840205,0.718265 -1.203442,0.904754 -0.144112,0.07398 -0.271303,0.15826 -0.282647,0.187269 -0.01134,0.02901 0.02121,0.142764 0.07234,0.25279 0.184248,0.396467 0.451371,1.331823 0.619371,2.168779 0.463493,2.30908 -0.754646,4.693707 -2.92278,5.721632 -0.479538,0.227352 -0.717629,0.309322 -1.144194,0.39393 -0.321869,0.06383 -1.850573,0.09139 -2.000174,0.03604 z M 12.25443,18.636956 c 0.739923,-0.24652 1.382521,-0.718922 1.874623,-1.37812 0.0752,-0.100718 0.213883,-0.275851 0.308198,-0.389167 0.09432,-0.113318 0.210136,-0.271056 0.257381,-0.350531 0.416347,-0.700389 0.680936,-1.176102 0.766454,-1.378041 0.05594,-0.132087 0.114653,-0.239607 0.130477,-0.238929 0.01583,6.79e-4 0.08126,0.08531 0.145412,0.188069 0.178029,0.285173 0.614305,0.658998 0.868158,0.743878 0.259802,0.08686 0.656158,0.09598 0.911369,0.02095 0.213812,-0.06285 0.507296,-0.298016 0.645179,-0.516947 0.155165,-0.246374 0.327989,-0.989595 0.327989,-1.410501 0,-1.26718 -0.610975,-3.143405 -1.237774,-3.801045 -0.198483,-0.2082486 -0.208557,-0.2319396 -0.208557,-0.4904655 0,-0.2517771 -0.08774,-0.5704927 -0.258476,-0.938956 C 16.694963,8.50313 16.375697,8.1377479 16.135846,7.9543702 L 15.932296,7.7987471 15.683004,7.9356529 C 15.131767,8.2383821 14.435638,8.1945733 13.943459,7.8261812 L 13.782862,7.7059758 13.686773,7.8908012 C 13.338849,8.5600578 12.487087,8.8811064 11.743178,8.6233891 11.487199,8.5347109 11.358897,8.4505994 11.063189,8.1776138 L 10.69871,7.8411436 10.453484,8.0579255 C 10.318608,8.1771557 10.113778,8.3156283 9.9983037,8.3656417 9.7041488,8.4930449 9.1808299,8.5227884 8.8979004,8.4281886 8.7754792,8.3872574 8.6687415,8.3537661 8.6607053,8.3537661 c -0.03426,0 -0.3092864,0.3066098 -0.3791974,0.42275 -0.041935,0.069664 -0.1040482,0.1266636 -0.1380294,0.1266636 -0.1316419,0 -0.4197402,0.1843928 -0.6257041,0.4004735 -0.1923125,0.2017571 -0.6853701,0.9036038 -0.8926582,1.2706578 -0.042662,0.07554 -0.1803555,0.353687 -0.3059848,0.618091 -0.1256293,0.264406 -0.3270073,0.686768 -0.4475067,0.938581 -0.1204992,0.251816 -0.2469926,0.519654 -0.2810961,0.595199 -0.2592829,0.574347 -0.285919,1.391094 -0.057822,1.77304 0.1690683,0.283105 0.4224039,0.480895 0.7285507,0.568809 0.487122,0.139885 0.9109638,-0.004 1.6013422,-0.543768 l 0.4560939,-0.356568 0.0036,0.172041 c 0.01635,0.781837 0.1831084,1.813183 0.4016641,2.484154 0.1160449,0.356262 0.3781448,0.83968 0.5614081,1.035462 0.2171883,0.232025 0.7140951,0.577268 1.0100284,0.701749 0.121485,0.0511 0.351032,0.110795 0.510105,0.132647 0.396966,0.05452 1.2105,0.02265 1.448934,-0.05679 z"
-
id="path1" />
-
</g>
-
</svg>
+28
appview/pages/templates/fragments/starBtn.html
···
+
{{ define "fragments/starBtn" }}
+
<button
+
id="starBtn"
+
class="btn disabled:opacity-50 disabled:cursor-not-allowed flex gap-2 items-center group"
+
data-star-subject-at="{{ .SubjectAt }}"
+
{{ if .IsStarred }}
+
hx-delete="/star?subject={{ .SubjectAt }}&countHint={{ .StarCount }}"
+
{{ else }}
+
hx-post="/star?subject={{ .SubjectAt }}&countHint={{ .StarCount }}"
+
{{ end }}
+
+
hx-trigger="click"
+
hx-target="this"
+
hx-swap="outerHTML"
+
hx-swap-oob='outerHTML:#starBtn[data-star-subject-at="{{ .SubjectAt }}"]'
+
hx-disabled-elt="#starBtn"
+
>
+
{{ if .IsStarred }}
+
{{ i "star" "w-4 h-4 fill-current" }}
+
{{ else }}
+
{{ i "star" "w-4 h-4" }}
+
{{ end }}
+
<span class="text-sm">
+
{{ .StarCount }}
+
</span>
+
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
+
</button>
+
{{ end }}
+33
appview/pages/templates/fragments/tabSelector.html
···
+
{{ define "fragments/tabSelector" }}
+
{{ $name := .Name }}
+
{{ $all := .Values }}
+
{{ $active := .Active }}
+
{{ $include := .Include }}
+
<div class="flex justify-between divide-x divide-gray-200 dark:divide-gray-700 rounded border border-gray-200 dark:border-gray-700 overflow-hidden">
+
{{ $activeTab := "bg-white dark:bg-gray-700 shadow-sm" }}
+
{{ $inactiveTab := "bg-gray-100 dark:bg-gray-800 shadow-inner" }}
+
{{ range $index, $value := $all }}
+
{{ $isActive := eq $value.Key $active }}
+
<a href="?{{ $name }}={{ $value.Key }}"
+
{{ if $include }}
+
hx-get="?{{ $name }}={{ $value.Key }}"
+
hx-include="{{ $include }}"
+
hx-push-url="true"
+
hx-target="body"
+
hx-on:htmx:config-request="if(!event.detail.parameters.q) delete event.detail.parameters.q"
+
{{ end }}
+
class="p-2 whitespace-nowrap flex justify-center items-center gap-2 text-sm w-full block hover:no-underline text-center {{ if $isActive }} {{$activeTab }} {{ else }} {{ $inactiveTab }} {{ end }}">
+
{{ if $value.Icon }}
+
{{ i $value.Icon "size-4" }}
+
{{ end }}
+
+
{{ with $value.Meta }}
+
{{ . }}
+
{{ end }}
+
+
{{ $value.Value }}
+
</a>
+
{{ end }}
+
</div>
+
{{ end }}
+
+22
appview/pages/templates/fragments/tinyAvatarList.html
···
+
{{ define "fragments/tinyAvatarList" }}
+
{{ $all := .all }}
+
{{ $classes := .classes }}
+
{{ $ps := take $all 5 }}
+
<div class="inline-flex items-center -space-x-3">
+
{{ $c := "z-50 z-40 z-30 z-20 z-10" }}
+
{{ range $i, $p := $ps }}
+
<img
+
src="{{ tinyAvatar . }}"
+
alt=""
+
class="rounded-full size-8 mr-1 border-2 border-gray-100 dark:border-gray-900 z-{{sub 5 $i}}0 {{ $classes }}"
+
/>
+
{{ end }}
+
+
{{ if gt (len $all) 5 }}
+
<span class="pl-4 text-gray-500 dark:text-gray-400 text-sm">
+
+{{ sub (len $all) 5 }}
+
</span>
+
{{ end }}
+
</div>
+
{{ end }}
+
+36
appview/pages/templates/fragments/workflow-timers.html
···
+
{{ define "fragments/workflow-timers" }}
+
<script>
+
function formatElapsed(seconds) {
+
if (seconds < 1) return '0s';
+
if (seconds < 60) return `${seconds}s`;
+
const minutes = Math.floor(seconds / 60);
+
const secs = seconds % 60;
+
if (seconds < 3600) return `${minutes}m ${secs}s`;
+
const hours = Math.floor(seconds / 3600);
+
const mins = Math.floor((seconds % 3600) / 60);
+
return `${hours}h ${mins}m`;
+
}
+
+
function updateTimers() {
+
const now = Math.floor(Date.now() / 1000);
+
+
document.querySelectorAll('[data-timer]').forEach(el => {
+
const startTime = parseInt(el.dataset.start);
+
const endTime = el.dataset.end ? parseInt(el.dataset.end) : null;
+
+
if (endTime) {
+
// Step is complete, show final time
+
const elapsed = endTime - startTime;
+
el.textContent = formatElapsed(elapsed);
+
} else {
+
// Step is running, update live
+
const elapsed = now - startTime;
+
el.textContent = formatElapsed(elapsed);
+
}
+
});
+
}
+
+
setInterval(updateTimers, 1000);
+
updateTimers();
+
</script>
+
{{ end }}
+23 -7
appview/pages/templates/knots/dashboard.html
···
-
{{ define "title" }}{{ .Registration.Domain }} &middot; knots{{ end }}
+
{{ define "title" }}{{ .Registration.Domain }} &middot; {{ .Tab }} settings{{ end }}
{{ define "content" }}
-
<div class="px-6 py-4">
+
<div class="p-6">
+
<p class="text-xl font-bold dark:text-white">Settings</p>
+
</div>
+
<div class="bg-white dark:bg-gray-800 p-6 rounded relative w-full mx-auto drop-shadow-sm dark:text-white">
+
<section class="w-full grid grid-cols-1 md:grid-cols-4 gap-6">
+
<div class="col-span-1">
+
{{ template "user/settings/fragments/sidebar" . }}
+
</div>
+
<div class="col-span-1 md:col-span-3 flex flex-col gap-6">
+
{{ template "knotDash" . }}
+
</div>
+
</section>
+
</div>
+
{{ end }}
+
+
{{ define "knotDash" }}
+
<div>
<div class="flex justify-between items-center">
-
<h1 class="text-xl font-bold dark:text-white">{{ .Registration.Domain }}</h1>
+
<h2 class="text-sm pb-2 uppercase font-bold">{{ .Tab }} &middot; {{ .Registration.Domain }}</h2>
<div id="right-side" class="flex gap-2">
{{ $style := "px-2 py-1 rounded flex items-center flex-shrink-0 gap-2" }}
{{ $isOwner := and .LoggedInUser (eq .LoggedInUser.Did .Registration.ByDid) }}
···
</div>
{{ if .Members }}
-
<section class="bg-white dark:bg-gray-800 p-6 rounded relative w-full mx-auto drop-shadow-sm dark:text-white">
+
<section class="bg-white dark:bg-gray-800 rounded relative w-full mx-auto drop-shadow-sm dark:text-white">
<div class="flex flex-col gap-2">
{{ block "member" . }} {{ end }}
</div>
···
<button
class="btn text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300 gap-2 group"
title="Delete knot"
-
hx-delete="/knots/{{ .Domain }}"
+
hx-delete="/settings/knots/{{ .Domain }}"
hx-swap="outerHTML"
hx-confirm="Are you sure you want to delete the knot '{{ .Domain }}'?"
hx-headers='{"shouldRedirect": "true"}'
···
<button
class="btn gap-2 group"
title="Retry knot verification"
-
hx-post="/knots/{{ .Domain }}/retry"
+
hx-post="/settings/knots/{{ .Domain }}/retry"
hx-swap="none"
hx-headers='{"shouldRefresh": "true"}'
>
···
<button
class="btn text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300 gap-2 group"
title="Remove member"
-
hx-post="/knots/{{ $root.Registration.Domain }}/remove"
+
hx-post="/settings/knots/{{ $root.Registration.Domain }}/remove"
hx-swap="none"
hx-vals='{"member": "{{$member}}" }'
hx-confirm="Are you sure you want to remove {{ $memberHandle }} from this knot?"
+18 -10
appview/pages/templates/knots/fragments/addMemberModal.html
···
<div
id="add-member-{{ .Id }}"
popover
-
class="bg-white w-full md:w-96 dark:bg-gray-800 p-4 rounded border border-gray-200 dark:border-gray-700 drop-shadow dark:text-white backdrop:bg-gray-400/50 dark:backdrop:bg-gray-800/50">
+
class="
+
bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 dark:text-white backdrop:bg-gray-400/50 dark:backdrop:bg-gray-800/50
+
w-full md:w-96 p-4 rounded drop-shadow overflow-visible">
{{ block "addKnotMemberPopover" . }} {{ end }}
</div>
{{ end }}
{{ define "addKnotMemberPopover" }}
<form
-
hx-post="/knots/{{ .Domain }}/add"
+
hx-post="/settings/knots/{{ .Domain }}/add"
hx-indicator="#spinner"
hx-swap="none"
class="flex flex-col gap-2"
···
ADD MEMBER
</label>
<p class="text-sm text-gray-500 dark:text-gray-400">Members can create repositories and run workflows on this knot.</p>
-
<input
-
type="text"
-
id="member-did-{{ .Id }}"
-
name="member"
-
required
-
placeholder="@foo.bsky.social"
-
/>
+
<actor-typeahead>
+
<input
+
autocapitalize="none"
+
autocorrect="off"
+
autocomplete="off"
+
type="text"
+
id="member-did-{{ .Id }}"
+
name="member"
+
required
+
placeholder="user.tngl.sh"
+
class="w-full"
+
/>
+
</actor-typeahead>
<div class="flex gap-2 pt-2">
<button
type="button"
···
</div>
<div id="add-member-error-{{ .Id }}" class="text-red-500 dark:text-red-400"></div>
</form>
-
{{ end }}
+
{{ end }}
+3 -3
appview/pages/templates/knots/fragments/knotListing.html
···
{{ define "knotLeftSide" }}
{{ if .Registered }}
-
<a href="/knots/{{ .Domain }}" class="hover:no-underline flex items-center gap-2 min-w-0 max-w-[60%]">
+
<a href="/settings/knots/{{ .Domain }}" class="hover:no-underline flex items-center gap-2 min-w-0 max-w-[60%]">
{{ i "hard-drive" "w-4 h-4" }}
<span class="hover:underline">
{{ .Domain }}
···
<button
class="btn text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300 gap-2 group"
title="Delete knot"
-
hx-delete="/knots/{{ .Domain }}"
+
hx-delete="/settings/knots/{{ .Domain }}"
hx-swap="outerHTML"
hx-target="#knot-{{.Id}}"
hx-confirm="Are you sure you want to delete the knot '{{ .Domain }}'?"
···
<button
class="btn gap-2 group"
title="Retry knot verification"
-
hx-post="/knots/{{ .Domain }}/retry"
+
hx-post="/settings/knots/{{ .Domain }}/retry"
hx-swap="none"
hx-target="#knot-{{.Id}}"
>
+42 -11
appview/pages/templates/knots/index.html
···
-
{{ define "title" }}knots{{ end }}
+
{{ define "title" }}{{ .Tab }} settings{{ end }}
{{ define "content" }}
-
<div class="px-6 py-4 flex items-center justify-between gap-4 align-bottom">
-
<h1 class="text-xl font-bold dark:text-white">Knots</h1>
-
<span class="flex items-center gap-1">
-
{{ i "book" "w-3 h-3" }}
-
<a href="https://tangled.org/@tangled.org/core/blob/master/docs/knot-hosting.md">docs</a>
-
</span>
-
</div>
+
<div class="p-6">
+
<p class="text-xl font-bold dark:text-white">Settings</p>
+
</div>
+
<div class="bg-white dark:bg-gray-800 p-6 rounded relative w-full mx-auto drop-shadow-sm dark:text-white">
+
<section class="w-full grid grid-cols-1 md:grid-cols-4 gap-6">
+
<div class="col-span-1">
+
{{ template "user/settings/fragments/sidebar" . }}
+
</div>
+
<div class="col-span-1 md:col-span-3 flex flex-col gap-6">
+
{{ template "knotsList" . }}
+
</div>
+
</section>
+
</div>
+
{{ end }}
+
+
{{ define "knotsList" }}
+
<div class="grid grid-cols-1 md:grid-cols-3 gap-4 items-center">
+
<div class="col-span-1 md:col-span-2">
+
<h2 class="text-sm pb-2 uppercase font-bold">Knots</h2>
+
{{ block "about" . }} {{ end }}
+
</div>
+
<div class="col-span-1 md:col-span-1 md:justify-self-end">
+
{{ template "docsButton" . }}
+
</div>
+
</div>
-
<section class="bg-white dark:bg-gray-800 p-6 rounded relative w-full mx-auto drop-shadow-sm dark:text-white">
+
<section>
<div class="flex flex-col gap-6">
-
{{ block "about" . }} {{ end }}
{{ block "list" . }} {{ end }}
{{ block "register" . }} {{ end }}
</div>
···
<h2 class="text-sm font-bold py-2 uppercase dark:text-gray-300">register a knot</h2>
<p class="mb-2 dark:text-gray-300">Enter the hostname of your knot to get started.</p>
<form
-
hx-post="/knots/register"
+
hx-post="/settings/knots/register"
class="max-w-2xl mb-2 space-y-4"
hx-indicator="#register-button"
hx-swap="none"
···
</section>
{{ end }}
+
+
{{ define "docsButton" }}
+
<a
+
class="btn flex items-center gap-2"
+
href="https://tangled.org/@tangled.org/core/blob/master/docs/spindle/hosting.md">
+
{{ i "book" "size-4" }}
+
docs
+
</a>
+
<div
+
id="add-email-modal"
+
popover
+
class="bg-white w-full md:w-96 dark:bg-gray-800 p-4 rounded border border-gray-200 dark:border-gray-700 drop-shadow dark:text-white backdrop:bg-gray-400/50 dark:backdrop:bg-gray-800/50">
+
</div>
+
{{ end }}
+1
appview/pages/templates/layouts/base.html
···
<script defer src="/static/htmx.min.js"></script>
<script defer src="/static/htmx-ext-ws.min.js"></script>
+
<script defer src="/static/actor-typeahead.js" type="module"></script>
<!-- preconnect to image cdn -->
<link rel="preconnect" href="https://avatar.tangled.sh" />
+5 -11
appview/pages/templates/layouts/fragments/topbar.html
···
{{ with .LoggedInUser }}
{{ block "newButton" . }} {{ end }}
{{ template "notifications/fragments/bell" }}
-
{{ block "dropDown" . }} {{ end }}
+
{{ block "profileDropdown" . }} {{ end }}
{{ else }}
<a href="/login">login</a>
<span class="text-gray-500 dark:text-gray-400">or</span>
···
<summary class="btn-create py-0 cursor-pointer list-none flex items-center gap-2">
{{ i "plus" "w-4 h-4" }} <span class="hidden md:inline">new</span>
</summary>
-
<div class="absolute flex flex-col right-0 mt-4 p-4 rounded w-48 bg-white dark:bg-gray-800 dark:text-white border border-gray-200 dark:border-gray-700">
+
<div class="absolute flex flex-col right-0 mt-3 p-4 rounded w-48 bg-white dark:bg-gray-800 dark:text-white border border-gray-200 dark:border-gray-700">
<a href="/repo/new" class="flex items-center gap-2">
{{ i "book-plus" "w-4 h-4" }}
new repository
···
</details>
{{ end }}
-
{{ define "dropDown" }}
+
{{ define "profileDropdown" }}
<details class="relative inline-block text-left nav-dropdown">
-
<summary
-
class="cursor-pointer list-none flex items-center gap-1"
-
>
+
<summary class="cursor-pointer list-none flex items-center gap-1">
{{ $user := .Did }}
<img
src="{{ tinyAvatar $user }}"
···
/>
<span class="hidden md:inline">{{ $user | resolve | truncateAt30 }}</span>
</summary>
-
<div
-
class="absolute flex flex-col right-0 mt-4 p-4 rounded w-48 bg-white dark:bg-gray-800 dark:text-white border border-gray-200 dark:border-gray-700"
-
>
+
<div class="absolute flex flex-col right-0 mt-4 p-4 rounded w-48 bg-white dark:bg-gray-800 dark:text-white border border-gray-200 dark:border-gray-700">
<a href="/{{ $user }}">profile</a>
<a href="/{{ $user }}?tab=repos">repositories</a>
<a href="/{{ $user }}?tab=strings">strings</a>
-
<a href="/knots">knots</a>
-
<a href="/spindles">spindles</a>
<a href="/settings">settings</a>
<a href="#"
hx-post="/logout"
+8 -7
appview/pages/templates/layouts/profilebase.html
···
-
{{ define "title" }}{{ or .Card.UserHandle .Card.UserDid }}{{ end }}
+
{{ define "title" }}{{ resolve .Card.UserDid }}{{ end }}
{{ define "extrameta" }}
-
{{ $avatarUrl := fullAvatar .Card.UserHandle }}
-
<meta property="og:title" content="{{ or .Card.UserHandle .Card.UserDid }}" />
+
{{ $handle := resolve .Card.UserDid }}
+
{{ $avatarUrl := fullAvatar $handle }}
+
<meta property="og:title" content="{{ $handle }}" />
<meta property="og:type" content="profile" />
-
<meta property="og:url" content="https://tangled.org/{{ or .Card.UserHandle .Card.UserDid }}?tab={{ .Active }}" />
-
<meta property="og:description" content="{{ or .Card.Profile.Description .Card.UserHandle .Card.UserDid }}" />
+
<meta property="og:url" content="https://tangled.org/{{ $handle }}?tab={{ .Active }}" />
+
<meta property="og:description" content="{{ or .Card.Profile.Description $handle }}" />
<meta property="og:image" content="{{ $avatarUrl }}" />
<meta property="og:image:width" content="512" />
<meta property="og:image:height" content="512" />
<meta name="twitter:card" content="summary" />
-
<meta name="twitter:title" content="{{ or .Card.UserHandle .Card.UserDid }}" />
-
<meta name="twitter:description" content="{{ or .Card.Profile.Description .Card.UserHandle .Card.UserDid }}" />
+
<meta name="twitter:title" content="{{ $handle }}" />
+
<meta name="twitter:description" content="{{ or .Card.Profile.Description $handle }}" />
<meta name="twitter:image" content="{{ $avatarUrl }}" />
{{ end }}
+57 -26
appview/pages/templates/layouts/repobase.html
···
{{ define "title" }}{{ .RepoInfo.FullName }}{{ end }}
{{ define "content" }}
-
<section id="repo-header" class="mb-4 py-2 px-6 dark:text-white">
-
{{ if .RepoInfo.Source }}
-
<p class="text-sm">
-
<div class="flex items-center">
-
{{ i "git-fork" "w-3 h-3 mr-1 shrink-0" }}
-
forked from
-
{{ $sourceOwner := didOrHandle .RepoInfo.Source.Did .RepoInfo.SourceHandle }}
-
<a class="ml-1 underline" href="/{{ $sourceOwner }}/{{ .RepoInfo.Source.Name }}">{{ $sourceOwner }}/{{ .RepoInfo.Source.Name }}</a>
-
</div>
-
</p>
-
{{ end }}
-
<div class="text-lg flex items-center justify-between">
-
<div>
-
<a href="/{{ .RepoInfo.OwnerWithAt }}">{{ .RepoInfo.OwnerWithAt }}</a>
-
<span class="select-none">/</span>
-
<a href="/{{ .RepoInfo.FullName }}" class="font-bold">{{ .RepoInfo.Name }}</a>
+
<section id="repo-header" class="mb-4 p-2 dark:text-white">
+
<div class="text-lg flex flex-col sm:flex-row items-start gap-4 justify-between">
+
<!-- left items -->
+
<div class="flex flex-col gap-2">
+
<!-- repo owner / repo name -->
+
<div class="flex items-center gap-2 flex-wrap">
+
{{ template "user/fragments/picHandleLink" .RepoInfo.OwnerDid }}
+
<span class="select-none">/</span>
+
<a href="/{{ .RepoInfo.FullName }}" class="font-bold">{{ .RepoInfo.Name }}</a>
+
</div>
+
+
{{ if .RepoInfo.Source }}
+
{{ $sourceOwner := resolve .RepoInfo.Source.Did }}
+
<div class="flex items-center gap-1 text-sm flex-wrap">
+
{{ i "git-fork" "w-3 h-3 shrink-0" }}
+
<span>forked from</span>
+
<a class="underline" href="/{{ $sourceOwner }}/{{ .RepoInfo.Source.Name }}">
+
{{ $sourceOwner }}/{{ .RepoInfo.Source.Name }}
+
</a>
+
</div>
+
{{ end }}
+
+
<span class="flex flex-wrap items-center gap-x-4 gap-y-2 text-sm text-gray-600 dark:text-gray-300">
+
{{ if .RepoInfo.Description }}
+
{{ .RepoInfo.Description | description }}
+
{{ else }}
+
<span class="italic">this repo has no description</span>
+
{{ end }}
+
+
{{ with .RepoInfo.Website }}
+
<span class="flex items-center gap-1">
+
<span class="flex-shrink-0">{{ i "globe" "size-4" }}</span>
+
<a href="{{ . }}">{{ . | trimUriScheme }}</a>
+
</span>
+
{{ end }}
+
+
{{ if .RepoInfo.Topics }}
+
<div class="flex items-center gap-1 text-sm text-gray-600 dark:text-gray-300">
+
{{ range .RepoInfo.Topics }}
+
<span class="bg-gray-200 dark:bg-gray-700 rounded py-1/2 px-1 text-sm">{{ . }}</span>
+
{{ end }}
+
</div>
+
{{ end }}
+
+
</span>
</div>
-
<div class="flex items-center gap-2 z-auto">
-
<a
-
class="btn text-sm no-underline hover:no-underline flex items-center gap-2 group"
-
href="/{{ .RepoInfo.FullName }}/feed.atom"
-
>
-
{{ i "rss" "size-4" }}
-
</a>
-
{{ template "repo/fragments/repoStar" .RepoInfo }}
+
<div class="w-full sm:w-fit grid grid-cols-3 gap-2 z-auto">
+
{{ template "fragments/starBtn"
+
(dict "SubjectAt" .RepoInfo.RepoAt
+
"IsStarred" .RepoInfo.IsStarred
+
"StarCount" .RepoInfo.Stats.StarCount) }}
<a
class="btn text-sm no-underline hover:no-underline flex items-center gap-2 group"
hx-boost="true"
···
fork
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
</a>
+
<a
+
class="btn text-sm no-underline hover:no-underline flex items-center gap-2 group"
+
href="/{{ .RepoInfo.FullName }}/feed.atom">
+
{{ i "rss" "size-4" }}
+
<span class="md:hidden">atom</span>
+
</a>
</div>
</div>
-
{{ template "repo/fragments/repoDescription" . }}
</section>
<section class="w-full flex flex-col" >
···
</div>
</nav>
{{ block "repoContentLayout" . }}
-
<section class="bg-white dark:bg-gray-800 p-6 rounded relative w-full mx-auto dark:text-white">
+
<section class="bg-white dark:bg-gray-800 px-6 py-4 rounded relative w-full mx-auto dark:text-white">
{{ block "repoContent" . }}{{ end }}
</section>
{{ block "repoAfter" . }}{{ end }}
+6
appview/pages/templates/notifications/fragments/item.html
···
commented on an issue
{{ else if eq .Type "issue_closed" }}
closed an issue
+
{{ else if eq .Type "issue_reopen" }}
+
reopened an issue
{{ else if eq .Type "pull_created" }}
created a pull request
{{ else if eq .Type "pull_commented" }}
···
merged a pull request
{{ else if eq .Type "pull_closed" }}
closed a pull request
+
{{ else if eq .Type "pull_reopen" }}
+
reopened a pull request
{{ else if eq .Type "followed" }}
followed you
+
{{ else if eq .Type "user_mentioned" }}
+
mentioned you
{{ else }}
{{ end }}
{{ end }}
+64 -39
appview/pages/templates/repo/blob.html
···
{{ end }}
{{ define "repoContent" }}
-
{{ $lines := split .Contents }}
-
{{ $tot_lines := len $lines }}
-
{{ $tot_chars := len (printf "%d" $tot_lines) }}
-
{{ $code_number_style := "text-gray-400 dark:text-gray-500 left-0 bg-white dark:bg-gray-800 text-right mr-6 select-none inline-block w-12" }}
{{ $linkstyle := "no-underline hover:underline" }}
<div class="pb-2 mb-3 text-base border-b border-gray-200 dark:border-gray-700">
<div class="flex flex-col md:flex-row md:justify-between gap-2">
···
</div>
<div id="file-info" class="text-gray-500 dark:text-gray-400 text-xs md:text-sm flex flex-wrap items-center gap-1 md:gap-0">
<span>at <a href="/{{ .RepoInfo.FullName }}/tree/{{ .Ref }}">{{ .Ref }}</a></span>
-
<span class="select-none px-1 md:px-2 [&:before]:content-['·']"></span>
-
<span>{{ .Lines }} lines</span>
-
<span class="select-none px-1 md:px-2 [&:before]:content-['·']"></span>
-
<span>{{ byteFmt .SizeHint }}</span>
-
<span class="select-none px-1 md:px-2 [&:before]:content-['·']"></span>
-
<a href="/{{ .RepoInfo.FullName }}/raw/{{ .Ref }}/{{ .Path }}">view raw</a>
-
{{ if .RenderToggle }}
-
<span class="select-none px-1 md:px-2 [&:before]:content-['·']"></span>
-
<a
-
href="/{{ .RepoInfo.FullName }}/blob/{{ .Ref }}/{{ .Path }}?code={{ .ShowRendered }}"
-
hx-boost="true"
-
>view {{ if .ShowRendered }}code{{ else }}rendered{{ end }}</a>
+
+
{{ if .BlobView.ShowingText }}
+
<span class="select-none px-1 md:px-2 [&:before]:content-['·']"></span>
+
<span>{{ .Lines }} lines</span>
+
{{ end }}
+
+
{{ if .BlobView.SizeHint }}
+
<span class="select-none px-1 md:px-2 [&:before]:content-['·']"></span>
+
<span>{{ byteFmt .BlobView.SizeHint }}</span>
+
{{ end }}
+
+
{{ if .BlobView.HasRawView }}
+
<span class="select-none px-1 md:px-2 [&:before]:content-['·']"></span>
+
<a href="/{{ .RepoInfo.FullName }}/raw/{{ .Ref }}/{{ .Path }}">view raw</a>
+
{{ end }}
+
+
{{ if .BlobView.ShowToggle }}
+
<span class="select-none px-1 md:px-2 [&:before]:content-['·']"></span>
+
<a href="/{{ .RepoInfo.FullName }}/blob/{{ .Ref }}/{{ .Path }}?code={{ .BlobView.ShowingRendered }}" hx-boost="true">
+
view {{ if .BlobView.ShowingRendered }}code{{ else }}rendered{{ end }}
+
</a>
{{ end }}
</div>
</div>
</div>
-
{{ if and .IsBinary .Unsupported }}
-
<p class="text-center text-gray-400 dark:text-gray-500">
-
Previews are not supported for this file type.
-
</p>
-
{{ else if .IsBinary }}
-
<div class="text-center">
-
{{ if .IsImage }}
-
<img src="{{ .ContentSrc }}"
-
alt="{{ .Path }}"
-
class="max-w-full h-auto mx-auto border border-gray-200 dark:border-gray-700 rounded" />
-
{{ else if .IsVideo }}
-
<video controls class="max-w-full h-auto mx-auto border border-gray-200 dark:border-gray-700 rounded">
-
<source src="{{ .ContentSrc }}">
-
Your browser does not support the video tag.
-
</video>
-
{{ end }}
-
</div>
-
{{ else }}
-
<div class="overflow-auto relative">
-
{{ if .ShowRendered }}
-
<div id="blob-contents" class="prose dark:prose-invert">{{ .RenderedContents }}</div>
+
{{ if .BlobView.IsUnsupported }}
+
<p class="text-center text-gray-400 dark:text-gray-500">
+
Previews are not supported for this file type.
+
</p>
+
{{ else if .BlobView.ContentType.IsSubmodule }}
+
<p class="text-center text-gray-400 dark:text-gray-500">
+
This directory is a git submodule of <a href="{{ .BlobView.ContentSrc }}">{{ .BlobView.ContentSrc }}</a>.
+
</p>
+
{{ else if .BlobView.ContentType.IsImage }}
+
<div class="text-center">
+
<img src="{{ .BlobView.ContentSrc }}"
+
alt="{{ .Path }}"
+
class="max-w-full h-auto mx-auto border border-gray-200 dark:border-gray-700 rounded" />
+
</div>
+
{{ else if .BlobView.ContentType.IsVideo }}
+
<div class="text-center">
+
<video controls class="max-w-full h-auto mx-auto border border-gray-200 dark:border-gray-700 rounded">
+
<source src="{{ .BlobView.ContentSrc }}">
+
Your browser does not support the video tag.
+
</video>
+
</div>
+
{{ else if .BlobView.ContentType.IsSvg }}
+
<div class="overflow-auto relative">
+
{{ if .BlobView.ShowingRendered }}
+
<div class="text-center">
+
<img src="{{ .BlobView.ContentSrc }}"
+
alt="{{ .Path }}"
+
class="max-w-full h-auto mx-auto border border-gray-200 dark:border-gray-700 rounded" />
+
</div>
+
{{ else }}
+
<div id="blob-contents" class="whitespace-pre peer-target:bg-yellow-200 dark:peer-target:bg-yellow-900">{{ code .BlobView.Contents .Path | escapeHtml }}</div>
+
{{ end }}
+
</div>
+
{{ else if .BlobView.ContentType.IsMarkup }}
+
<div class="overflow-auto relative">
+
{{ if .BlobView.ShowingRendered }}
+
<div id="blob-contents" class="prose dark:prose-invert">{{ .BlobView.Contents | readme }}</div>
{{ else }}
-
<div id="blob-contents" class="whitespace-pre peer-target:bg-yellow-200 dark:peer-target:bg-yellow-900">{{ $.Contents | escapeHtml }}</div>
+
<div id="blob-contents" class="whitespace-pre peer-target:bg-yellow-200 dark:peer-target:bg-yellow-900">{{ code .BlobView.Contents .Path | escapeHtml }}</div>
{{ end }}
-
</div>
+
</div>
+
{{ else if .BlobView.ContentType.IsCode }}
+
<div class="overflow-auto relative">
+
<div id="blob-contents" class="whitespace-pre peer-target:bg-yellow-200 dark:peer-target:bg-yellow-900">{{ code .BlobView.Contents .Path | escapeHtml }}</div>
+
</div>
{{ end }}
{{ template "fragments/multiline-select" }}
{{ end }}
+41 -16
appview/pages/templates/repo/commit.html
···
</div>
</div>
-
<div class="flex items-center space-x-2">
-
<p class="text-sm text-gray-500 dark:text-gray-300">
-
{{ $didOrHandle := index $.EmailToDidOrHandle $commit.Author.Email }}
+
<div class="flex flex-wrap items-center space-x-2">
+
<p class="flex flex-wrap items-center gap-1 text-sm text-gray-500 dark:text-gray-300">
+
{{ template "attribution" . }}
-
{{ if $didOrHandle }}
-
<a href="/{{ $didOrHandle }}" class="no-underline hover:underline text-gray-500 dark:text-gray-300">{{ $didOrHandle }}</a>
-
{{ else }}
-
<a href="mailto:{{ $commit.Author.Email }}" class="no-underline hover:underline text-gray-500 dark:text-gray-300">{{ $commit.Author.Name }}</a>
-
{{ end }}
<span class="px-1 select-none before:content-['\00B7']"></span>
-
{{ template "repo/fragments/time" $commit.Author.When }}
+
{{ template "repo/fragments/time" $commit.Committer.When }}
<span class="px-1 select-none before:content-['\00B7']"></span>
-
</p>
-
<p class="flex items-center text-sm text-gray-500 dark:text-gray-300">
<a href="/{{ $repo }}/commit/{{ $commit.This }}" class="no-underline hover:underline text-gray-500 dark:text-gray-300">{{ slice $commit.This 0 8 }}</a>
+
{{ if $commit.Parent }}
-
{{ i "arrow-left" "w-3 h-3 mx-1" }}
-
<a href="/{{ $repo }}/commit/{{ $commit.Parent }}" class="no-underline hover:underline text-gray-500 dark:text-gray-300">{{ slice $commit.Parent 0 8 }}</a>
+
{{ i "arrow-left" "w-3 h-3 mx-1" }}
+
<a href="/{{ $repo }}/commit/{{ $commit.Parent }}" class="no-underline hover:underline text-gray-500 dark:text-gray-300">{{ slice $commit.Parent 0 8 }}</a>
{{ end }}
</p>
···
<div class="mb-1">This commit was signed with the committer's <span class="text-green-600 font-semibold">known signature</span>.</div>
<div class="flex items-center gap-2 my-2">
{{ i "user" "w-4 h-4" }}
-
{{ $committerDidOrHandle := index $.EmailToDidOrHandle $commit.Committer.Email }}
-
<a href="/{{ $committerDidOrHandle }}">{{ template "user/fragments/picHandleLink" $committerDidOrHandle }}</a>
+
{{ $committerDid := index $.EmailToDid $commit.Committer.Email }}
+
{{ template "user/fragments/picHandleLink" $committerDid }}
</div>
<div class="my-1 pt-2 text-xs border-t border-gray-200 dark:border-gray-700">
<div class="text-gray-600 dark:text-gray-300">SSH Key Fingerprint:</div>
···
</section>
{{end}}
+
{{ define "attribution" }}
+
{{ $commit := .Diff.Commit }}
+
{{ $showCommitter := true }}
+
{{ if eq $commit.Author.Email $commit.Committer.Email }}
+
{{ $showCommitter = false }}
+
{{ end }}
+
+
{{ if $showCommitter }}
+
authored by {{ template "attributedUser" (list $commit.Author.Email $commit.Author.Name $.EmailToDid) }}
+
{{ range $commit.CoAuthors }}
+
{{ template "attributedUser" (list .Email .Name $.EmailToDid) }}
+
{{ end }}
+
and committed by {{ template "attributedUser" (list $commit.Committer.Email $commit.Committer.Name $.EmailToDid) }}
+
{{ else }}
+
{{ template "attributedUser" (list $commit.Author.Email $commit.Author.Name $.EmailToDid )}}
+
{{ end }}
+
{{ end }}
+
+
{{ define "attributedUser" }}
+
{{ $email := index . 0 }}
+
{{ $name := index . 1 }}
+
{{ $map := index . 2 }}
+
{{ $did := index $map $email }}
+
+
{{ if $did }}
+
{{ template "user/fragments/picHandleLink" $did }}
+
{{ else }}
+
<a href="mailto:{{ $email }}" class="no-underline hover:underline text-gray-500 dark:text-gray-300">{{ $name }}</a>
+
{{ end }}
+
{{ end }}
+
{{ define "topbarLayout" }}
<header class="col-span-full" style="z-index: 20;">
{{ template "layouts/fragments/topbar" . }}
···
{{ end }}
{{ define "contentAfter" }}
-
{{ template "repo/fragments/diff" (list .RepoInfo.FullName .Diff .DiffOpts) }}
+
{{ template "repo/fragments/diff" (list .Diff .DiffOpts) }}
{{end}}
{{ define "contentAfterLeft" }}
+2 -2
appview/pages/templates/repo/compare/compare.html
···
{{ end }}
{{ define "mainLayout" }}
-
<div class="px-1 col-span-full flex flex-col gap-4">
+
<div class="px-1 flex-grow col-span-full flex flex-col gap-4">
{{ block "contentLayout" . }}
{{ block "content" . }}{{ end }}
{{ end }}
···
{{ end }}
{{ define "contentAfter" }}
-
{{ template "repo/fragments/diff" (list .RepoInfo.FullName .Diff .DiffOpts) }}
+
{{ template "repo/fragments/diff" (list .Diff .DiffOpts) }}
{{end}}
{{ define "contentAfterLeft" }}
+1 -1
appview/pages/templates/repo/empty.html
···
<p><span class="{{$bullet}}">1</span>First, generate a new <a href="https://git-scm.com/book/en/v2/Git-on-the-Server-Generating-Your-SSH-Public-Key" class="underline">SSH key pair</a>.</p>
<p><span class="{{$bullet}}">2</span>Then add the public key to your account from the <a href="/settings" class="underline">settings</a> page.</p>
-
<p><span class="{{$bullet}}">3</span>Configure your remote to <code>git@{{ $knot }}:{{ .RepoInfo.OwnerHandle }}/{{ .RepoInfo.Name }}</code></p>
+
<p><span class="{{$bullet}}">3</span>Configure your remote to <code>git@{{ $knot | stripPort }}:{{ resolve .RepoInfo.OwnerDid }}/{{ .RepoInfo.Name }}</code></p>
<p><span class="{{$bullet}}">4</span>Push!</p>
</div>
</div>
+2 -1
appview/pages/templates/repo/fork.html
···
value="{{ . }}"
class="mr-2"
id="domain-{{ . }}"
+
{{if eq (len $.Knots) 1}}checked{{end}}
/>
<label for="domain-{{ . }}" class="dark:text-white">{{ . }}</label>
</div>
···
{{ end }}
</div>
</div>
-
<p class="text-sm text-gray-500 dark:text-gray-400">A knot hosts repository data. <a href="/knots" class="underline">Learn how to register your own knot.</a></p>
+
<p class="text-sm text-gray-500 dark:text-gray-400">A knot hosts repository data. <a href="/settings/knots" class="underline">Learn how to register your own knot.</a></p>
</fieldset>
<div class="space-y-2">
+49
appview/pages/templates/repo/fragments/backlinks.html
···
+
{{ define "repo/fragments/backlinks" }}
+
{{ if .Backlinks }}
+
<div id="at-uri-panel" class="px-2 md:px-0">
+
<div>
+
<span class="text-sm py-1 font-bold text-gray-500 dark:text-gray-400">Referenced by</span>
+
</div>
+
<ul>
+
{{ range .Backlinks }}
+
<li>
+
{{ $repoOwner := resolve .Handle }}
+
{{ $repoName := .Repo }}
+
{{ $repoUrl := printf "%s/%s" $repoOwner $repoName }}
+
<div class="flex flex-col">
+
<div class="flex gap-2 items-center">
+
{{ if .State.IsClosed }}
+
<span class="text-gray-500 dark:text-gray-400">
+
{{ i "ban" "w-4 h-4" }}
+
</span>
+
{{ else if eq .Kind.String "issues" }}
+
<span class="text-green-600 dark:text-green-500">
+
{{ i "circle-dot" "w-4 h-4" }}
+
</span>
+
{{ else if .State.IsOpen }}
+
<span class="text-green-600 dark:text-green-500">
+
{{ i "git-pull-request" "w-4 h-4" }}
+
</span>
+
{{ else if .State.IsMerged }}
+
<span class="text-purple-600 dark:text-purple-500">
+
{{ i "git-merge" "w-4 h-4" }}
+
</span>
+
{{ else }}
+
<span class="text-gray-600 dark:text-gray-300">
+
{{ i "git-pull-request-closed" "w-4 h-4" }}
+
</span>
+
{{ end }}
+
<a href="{{ . }}"><span class="text-gray-500 dark:text-gray-400">#{{ .SubjectId }}</span> {{ .Title }}</a>
+
</div>
+
{{ if not (eq $.RepoInfo.FullName $repoUrl) }}
+
<div>
+
<span>on <a href="/{{ $repoUrl }}">{{ $repoUrl }}</a></span>
+
</div>
+
{{ end }}
+
</div>
+
</li>
+
{{ end }}
+
</ul>
+
</div>
+
{{ end }}
+
{{ end }}
+5 -4
appview/pages/templates/repo/fragments/cloneDropdown.html
···
<code
class="flex-1 px-3 py-2 text-sm bg-gray-50 dark:bg-gray-700 text-gray-900 dark:text-gray-100 rounded-l select-all cursor-pointer whitespace-nowrap overflow-x-auto"
onclick="window.getSelection().selectAllChildren(this)"
-
data-url="https://tangled.org/{{ .RepoInfo.OwnerWithAt }}/{{ .RepoInfo.Name }}"
-
>https://tangled.org/{{ .RepoInfo.OwnerWithAt }}/{{ .RepoInfo.Name }}</code>
+
data-url="https://tangled.org/{{ resolve .RepoInfo.OwnerDid }}/{{ .RepoInfo.Name }}"
+
>https://tangled.org/{{ resolve .RepoInfo.OwnerDid }}/{{ .RepoInfo.Name }}</code>
<button
onclick="copyToClipboard(this, this.previousElementSibling.getAttribute('data-url'))"
class="px-3 py-2 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200 border-l border-gray-300 dark:border-gray-600"
···
<!-- SSH Clone -->
<div class="mb-3">
+
{{ $repoOwnerHandle := resolve .RepoInfo.OwnerDid }}
<label class="block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1">SSH</label>
<div class="flex items-center border border-gray-300 dark:border-gray-600 rounded">
<code
class="flex-1 px-3 py-2 text-sm bg-gray-50 dark:bg-gray-700 text-gray-900 dark:text-gray-100 rounded-l select-all cursor-pointer whitespace-nowrap overflow-x-auto"
onclick="window.getSelection().selectAllChildren(this)"
-
data-url="git@{{ $knot }}:{{ .RepoInfo.OwnerHandle }}/{{ .RepoInfo.Name }}"
-
>git@{{ $knot }}:{{ .RepoInfo.OwnerHandle }}/{{ .RepoInfo.Name }}</code>
+
data-url="git@{{ $knot | stripPort }}:{{ $repoOwnerHandle }}/{{ .RepoInfo.Name }}"
+
>git@{{ $knot | stripPort }}:{{ $repoOwnerHandle }}/{{ .RepoInfo.Name }}</code>
<button
onclick="copyToClipboard(this, this.previousElementSibling.getAttribute('data-url'))"
class="px-3 py-2 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200 border-l border-gray-300 dark:border-gray-600"
+2 -3
appview/pages/templates/repo/fragments/diff.html
···
{{ define "repo/fragments/diff" }}
-
{{ $repo := index . 0 }}
-
{{ $diff := index . 1 }}
-
{{ $opts := index . 2 }}
+
{{ $diff := index . 0 }}
+
{{ $opts := index . 1 }}
{{ $commit := $diff.Commit }}
{{ $diff := $diff.Diff }}
+20 -18
appview/pages/templates/repo/fragments/diffOpts.html
···
{{ if .Split }}
{{ $active = "split" }}
{{ end }}
-
{{ $values := list "unified" "split" }}
-
{{ block "tabSelector" (dict "Name" "diff" "Values" $values "Active" $active) }} {{ end }}
+
+
{{ $unified :=
+
(dict
+
"Key" "unified"
+
"Value" "unified"
+
"Icon" "square-split-vertical"
+
"Meta" "") }}
+
{{ $split :=
+
(dict
+
"Key" "split"
+
"Value" "split"
+
"Icon" "square-split-horizontal"
+
"Meta" "") }}
+
{{ $values := list $unified $split }}
+
+
{{ template "fragments/tabSelector"
+
(dict
+
"Name" "diff"
+
"Values" $values
+
"Active" $active) }}
</section>
{{ end }}
-
{{ define "tabSelector" }}
-
{{ $name := .Name }}
-
{{ $all := .Values }}
-
{{ $active := .Active }}
-
<div class="flex justify-between divide-x divide-gray-200 dark:divide-gray-700 rounded border border-gray-200 dark:border-gray-700 overflow-hidden">
-
{{ $activeTab := "bg-white dark:bg-gray-700 shadow-sm" }}
-
{{ $inactiveTab := "bg-gray-100 dark:bg-gray-800 shadow-inner" }}
-
{{ range $index, $value := $all }}
-
{{ $isActive := eq $value $active }}
-
<a href="?{{ $name }}={{ $value }}"
-
class="py-2 text-sm w-full block hover:no-underline text-center {{ if $isActive }} {{$activeTab }} {{ else }} {{ $inactiveTab }} {{ end }}">
-
{{ $value }}
-
</a>
-
{{ end }}
-
</div>
-
{{ end }}
+15 -1
appview/pages/templates/repo/fragments/editLabelPanel.html
···
{{ $fieldName := $def.AtUri }}
{{ $valueType := $def.ValueType }}
{{ $value := .value }}
+
{{ if $valueType.IsDidFormat }}
{{ $value = trimPrefix (resolve .value) "@" }}
+
<actor-typeahead>
+
<input
+
autocapitalize="none"
+
autocorrect="off"
+
autocomplete="off"
+
placeholder="user.tngl.sh"
+
value="{{$value}}"
+
name="{{$fieldName}}"
+
type="text"
+
class="p-1 w-full text-sm"
+
/>
+
</actor-typeahead>
+
{{ else }}
+
<input class="p-1 w-full" type="text" name="{{$fieldName}}" value="{{$value}}">
{{ end }}
-
<input class="p-1 w-full" type="text" name="{{$fieldName}}" value="{{$value}}">
{{ end }}
{{ define "nullTypeInput" }}
-11
appview/pages/templates/repo/fragments/editRepoDescription.html
···
-
{{ define "repo/fragments/editRepoDescription" }}
-
<form hx-put="/{{ .RepoInfo.FullName }}/description" hx-target="this" hx-swap="outerHTML" class="flex flex-wrap gap-2">
-
<input type="text" class="p-1" name="description" value="{{ .RepoInfo.Description }}">
-
<button type="submit" class="btn p-1 flex items-center gap-2 no-underline text-sm">
-
{{ i "check" "w-3 h-3" }} save
-
</button>
-
<button type="button" class="btn p-1 flex items-center gap-2 no-underline text-sm" hx-get="/{{ .RepoInfo.FullName }}/description" >
-
{{ i "x" "w-3 h-3" }} cancel
-
</button>
-
</form>
-
{{ end }}
+48
appview/pages/templates/repo/fragments/externalLinkPanel.html
···
+
{{ define "repo/fragments/externalLinkPanel" }}
+
<div id="at-uri-panel" class="px-2 md:px-0">
+
<div class="flex justify-between items-center gap-2">
+
<span class="text-sm py-1 font-bold text-gray-500 dark:text-gray-400 capitalize">AT URI</span>
+
<div class="flex items-center gap-2">
+
<button
+
onclick="copyToClipboard(this)"
+
class="text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200"
+
title="Copy to clipboard">
+
{{ i "copy" "w-4 h-4" }}
+
</button>
+
<a
+
href="https://pdsls.dev/{{.}}"
+
class="text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200"
+
title="View in PDSls">
+
{{ i "arrow-up-right" "w-4 h-4" }}
+
</a>
+
</div>
+
</div>
+
<span
+
class="font-mono text-sm select-all cursor-pointer block max-w-full overflow-x-auto whitespace-nowrap scrollbar-thin scrollbar-thumb-gray-300 dark:scrollbar-thumb-gray-600"
+
onclick="window.getSelection().selectAllChildren(this)"
+
title="{{.}}"
+
data-aturi="{{ . | string | safeUrl }}"
+
>{{.}}</span>
+
+
+
</div>
+
+
<script>
+
function copyToClipboard(button) {
+
const container = document.getElementById("at-uri-panel");
+
const urlSpan = container?.querySelector('[data-aturi]');
+
const text = urlSpan?.getAttribute('data-aturi');
+
console.log("copying to clipboard", text)
+
if (!text) return;
+
+
navigator.clipboard.writeText(text).then(() => {
+
const originalContent = button.innerHTML;
+
button.innerHTML = `{{ i "check" "w-4 h-4" }}`;
+
setTimeout(() => {
+
button.innerHTML = originalContent;
+
}, 2000);
+
});
+
}
+
</script>
+
{{ end }}
+
+1 -16
appview/pages/templates/repo/fragments/participants.html
···
<span class="font-bold text-gray-500 dark:text-gray-400 capitalize">Participants</span>
<span class="bg-gray-200 dark:bg-gray-700 rounded py-1/2 px-1 ml-1">{{ len $all }}</span>
</div>
-
<div class="flex items-center -space-x-3 mt-2">
-
{{ $c := "z-50 z-40 z-30 z-20 z-10" }}
-
{{ range $i, $p := $ps }}
-
<img
-
src="{{ tinyAvatar . }}"
-
alt=""
-
class="rounded-full h-8 w-8 mr-1 border-2 border-gray-100 dark:border-gray-900 z-{{sub 5 $i}}0"
-
/>
-
{{ end }}
-
-
{{ if gt (len $all) 5 }}
-
<span class="pl-4 text-gray-500 dark:text-gray-400 text-sm">
-
+{{ sub (len $all) 5 }}
-
</span>
-
{{ end }}
-
</div>
+
{{ template "fragments/tinyAvatarList" (dict "all" $all "classes" "w-8 h-8") }}
</div>
{{ end }}
-15
appview/pages/templates/repo/fragments/repoDescription.html
···
-
{{ define "repo/fragments/repoDescription" }}
-
<span id="repo-description" class="flex flex-wrap items-center gap-2 text-sm" hx-target="this" hx-swap="outerHTML">
-
{{ if .RepoInfo.Description }}
-
{{ .RepoInfo.Description | description }}
-
{{ else }}
-
<span class="italic">this repo has no description</span>
-
{{ end }}
-
-
{{ if .RepoInfo.Roles.IsOwner }}
-
<button class="flex items-center gap-2 no-underline text-sm" hx-get="/{{ .RepoInfo.FullName }}/description/edit">
-
{{ i "pencil" "w-3 h-3" }}
-
</button>
-
{{ end }}
-
</span>
-
{{ end }}
-26
appview/pages/templates/repo/fragments/repoStar.html
···
-
{{ define "repo/fragments/repoStar" }}
-
<button
-
id="starBtn"
-
class="btn disabled:opacity-50 disabled:cursor-not-allowed flex gap-2 items-center group"
-
{{ if .IsStarred }}
-
hx-delete="/star?subject={{ .RepoAt }}&countHint={{ .Stats.StarCount }}"
-
{{ else }}
-
hx-post="/star?subject={{ .RepoAt }}&countHint={{ .Stats.StarCount }}"
-
{{ end }}
-
-
hx-trigger="click"
-
hx-target="this"
-
hx-swap="outerHTML"
-
hx-disabled-elt="#starBtn"
-
>
-
{{ if .IsStarred }}
-
{{ i "star" "w-4 h-4 fill-current" }}
-
{{ else }}
-
{{ i "star" "w-4 h-4" }}
-
{{ end }}
-
<span class="text-sm">
-
{{ .Stats.StarCount }}
-
</span>
-
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
-
</button>
-
{{ end }}
+39 -20
appview/pages/templates/repo/index.html
···
{{ end }}
<div class="flex items-center justify-between pb-5">
{{ block "branchSelector" . }}{{ end }}
-
<div class="flex md:hidden items-center gap-2">
+
<div class="flex md:hidden items-center gap-3">
<a href="/{{ .RepoInfo.FullName }}/commits/{{ .Ref | urlquery }}" class="inline-flex items-center text-sm gap-1 font-bold">
{{ i "git-commit-horizontal" "w-4" "h-4" }} {{ .TotalCommits }}
</a>
···
{{ end }}
{{ define "repoLanguages" }}
-
<details class="group -m-6 mb-4">
+
<details class="group -my-4 -m-6 mb-4">
<summary class="flex gap-[1px] h-4 scale-y-50 hover:scale-y-100 origin-top group-open:scale-y-100 transition-all hover:cursor-pointer overflow-hidden rounded-t">
{{ range $value := .Languages }}
<div
···
<div class="px-4 py-2 border-b border-gray-200 dark:border-gray-600 flex items-center gap-4 flex-wrap">
{{ range $value := .Languages }}
<div
-
class="flex flex-grow items-center gap-2 text-xs align-items-center justify-center"
+
class="flex items-center gap-2 text-xs align-items-center justify-center"
>
{{ template "repo/fragments/colorBall" (dict "color" (langColor $value.Name)) }}
<div>{{ or $value.Name "Other" }}
···
{{ define "branchSelector" }}
<div class="flex gap-2 items-center justify-between w-full">
-
<div class="flex gap-2 items-center">
+
<div class="flex gap-2 items-stretch">
<select
onchange="window.location.href = '/{{ .RepoInfo.FullName }}/tree/' + encodeURIComponent(this.value)"
class="p-1 border max-w-32 border-gray-200 bg-white dark:bg-gray-800 dark:text-white dark:border-gray-700"
···
{{ $icon := "folder" }}
{{ $iconStyle := "size-4 fill-current" }}
+
{{ if .IsSubmodule }}
+
{{ $link = printf "/%s/%s/%s/%s" $.RepoInfo.FullName "blob" (urlquery $.Ref) .Name }}
+
{{ $icon = "folder-input" }}
+
{{ $iconStyle = "size-4" }}
+
{{ end }}
+
{{ if .IsFile }}
{{ $link = printf "/%s/%s/%s/%s" $.RepoInfo.FullName "blob" (urlquery $.Ref) .Name }}
{{ $icon = "file" }}
{{ $iconStyle = "size-4" }}
{{ end }}
+
<a href="{{ $link }}" class="{{ $linkstyle }}">
<div class="flex items-center gap-2">
{{ i $icon $iconStyle "flex-shrink-0" }}
···
<span
class="mx-1 before:content-['·'] before:select-none"
></span>
-
<span>
-
{{ $didOrHandle := index $.EmailToDidOrHandle .Author.Email }}
-
<a
-
href="{{ if $didOrHandle }}
-
/{{ $didOrHandle }}
-
{{ else }}
-
mailto:{{ .Author.Email }}
-
{{ end }}"
-
class="text-gray-500 dark:text-gray-400 no-underline hover:underline"
-
>{{ if $didOrHandle }}
-
{{ template "user/fragments/picHandleLink" $didOrHandle }}
-
{{ else }}
-
{{ .Author.Name }}
-
{{ end }}</a
-
>
-
</span>
+
{{ template "attribution" (list . $.EmailToDid) }}
<div class="inline-block px-1 select-none after:content-['·']"></div>
{{ template "repo/fragments/time" .Committer.When }}
···
{{ end }}
</div>
</div>
+
{{ end }}
+
+
{{ define "attribution" }}
+
{{ $commit := index . 0 }}
+
{{ $map := index . 1 }}
+
<span class="flex items-center">
+
{{ $author := index $map $commit.Author.Email }}
+
{{ $coauthors := $commit.CoAuthors }}
+
{{ $all := list }}
+
+
{{ if $author }}
+
{{ $all = append $all $author }}
+
{{ end }}
+
{{ range $coauthors }}
+
{{ $co := index $map .Email }}
+
{{ if $co }}
+
{{ $all = append $all $co }}
+
{{ end }}
+
{{ end }}
+
+
{{ template "fragments/tinyAvatarList" (dict "all" $all "classes" "size-6") }}
+
<a href="{{ if $author }}/{{ $author }}{{ else }}mailto:{{ $commit.Author.Email }}{{ end }}"
+
class="no-underline hover:underline">
+
{{ if $author }}{{ resolve $author }}{{ else }}{{ $commit.Author.Name }}{{ end }}
+
{{ if $coauthors }} +{{ length $coauthors }}{{ end }}
+
</a>
+
</span>
{{ end }}
{{ define "branchList" }}
+4 -4
appview/pages/templates/repo/issues/fragments/issueCommentHeader.html
···
{{ end }}
{{ define "timestamp" }}
-
<a href="#{{ .Comment.Id }}"
+
<a href="#comment-{{ .Comment.Id }}"
class="text-gray-500 dark:text-gray-400 hover:text-gray-500 dark:hover:text-gray-400 hover:underline no-underline"
-
id="{{ .Comment.Id }}">
+
id="comment-{{ .Comment.Id }}">
{{ if .Comment.Deleted }}
{{ template "repo/fragments/shortTimeAgo" .Comment.Deleted }}
{{ else if .Comment.Edited }}
···
{{ define "editIssueComment" }}
<a
-
class="text-gray-500 dark:text-gray-400 flex gap-1 items-center group"
+
class="text-gray-500 dark:text-gray-400 flex gap-1 items-center group cursor-pointer"
hx-get="/{{ .RepoInfo.FullName }}/issues/{{ .Issue.IssueId }}/comment/{{ .Comment.Id }}/edit"
hx-swap="outerHTML"
hx-target="#comment-body-{{.Comment.Id}}">
···
{{ define "deleteIssueComment" }}
<a
-
class="text-gray-500 dark:text-gray-400 flex gap-1 items-center group"
+
class="text-gray-500 dark:text-gray-400 flex gap-1 items-center group cursor-pointer"
hx-delete="/{{ .RepoInfo.FullName }}/issues/{{ .Issue.IssueId }}/comment/{{ .Comment.Id }}/"
hx-confirm="Are you sure you want to delete your comment?"
hx-swap="outerHTML"
+1 -1
appview/pages/templates/repo/issues/fragments/issueListing.html
···
class="no-underline hover:underline"
>
{{ .Title | description }}
-
<span class="text-gray-500">#{{ .IssueId }}</span>
+
<span class="text-gray-500 dark:text-gray-400">#{{ .IssueId }}</span>
</a>
</div>
<div class="text-sm text-gray-500 dark:text-gray-400 flex flex-wrap items-center gap-1">
+6 -2
appview/pages/templates/repo/issues/issue.html
···
"Subject" $.Issue.AtUri
"State" $.Issue.Labels) }}
{{ template "repo/fragments/participants" $.Issue.Participants }}
+
{{ template "repo/fragments/backlinks"
+
(dict "RepoInfo" $.RepoInfo
+
"Backlinks" $.Backlinks) }}
+
{{ template "repo/fragments/externalLinkPanel" $.Issue.AtUri }}
</div>
</div>
{{ end }}
···
{{ define "editIssue" }}
<a
-
class="text-gray-500 dark:text-gray-400 flex gap-1 items-center group"
+
class="text-gray-500 dark:text-gray-400 flex gap-1 items-center group cursor-pointer"
hx-get="/{{ .RepoInfo.FullName }}/issues/{{ .Issue.IssueId }}/edit"
hx-swap="innerHTML"
hx-target="#issue-{{.Issue.IssueId}}">
···
{{ define "deleteIssue" }}
<a
-
class="text-gray-500 dark:text-gray-400 flex gap-1 items-center group"
+
class="text-gray-500 dark:text-gray-400 flex gap-1 items-center group cursor-pointer"
hx-delete="/{{ .RepoInfo.FullName }}/issues/{{ .Issue.IssueId }}/"
hx-confirm="Are you sure you want to delete your issue?"
hx-swap="none">
+147 -47
appview/pages/templates/repo/issues/issues.html
···
{{ end }}
{{ define "repoContent" }}
-
<div class="flex justify-between items-center gap-4">
-
<div class="flex gap-4">
-
<a
-
href="?state=open"
-
class="flex items-center gap-2 {{ if .FilteringByOpen }}font-bold {{ else }}text-gray-500 dark:text-gray-400{{ end }}"
+
{{ $active := "closed" }}
+
{{ if .FilteringByOpen }}
+
{{ $active = "open" }}
+
{{ end }}
+
+
{{ $open :=
+
(dict
+
"Key" "open"
+
"Value" "open"
+
"Icon" "circle-dot"
+
"Meta" (string .RepoInfo.Stats.IssueCount.Open)) }}
+
{{ $closed :=
+
(dict
+
"Key" "closed"
+
"Value" "closed"
+
"Icon" "ban"
+
"Meta" (string .RepoInfo.Stats.IssueCount.Closed)) }}
+
{{ $values := list $open $closed }}
+
+
<div class="grid gap-2 grid-cols-[auto_1fr_auto] grid-row-2">
+
<form class="flex relative col-span-3 sm:col-span-1 sm:col-start-2" method="GET">
+
<input type="hidden" name="state" value="{{ if .FilteringByOpen }}open{{ else }}closed{{ end }}">
+
<div class="flex-1 flex relative">
+
<input
+
id="search-q"
+
class="flex-1 py-1 pl-2 pr-10 mr-[-1px] rounded-r-none focus:border-0 focus:outline-none focus:ring focus:ring-blue-400 ring-inset peer"
+
type="text"
+
name="q"
+
value="{{ .FilterQuery }}"
+
placeholder=" "
>
-
{{ i "circle-dot" "w-4 h-4" }}
-
<span>{{ .RepoInfo.Stats.IssueCount.Open }} open</span>
-
</a>
+
<a
+
href="?state={{ if .FilteringByOpen }}open{{ else }}closed{{ end }}"
+
class="absolute right-3 top-1/2 -translate-y-1/2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300 hidden peer-[:not(:placeholder-shown)]:block"
+
>
+
{{ i "x" "w-4 h-4" }}
+
</a>
+
</div>
+
<button
+
type="submit"
+
class="p-2 text-gray-400 border rounded-r border-gray-400 dark:border-gray-600"
+
>
+
{{ i "search" "w-4 h-4" }}
+
</button>
+
</form>
+
<div class="sm:row-start-1">
+
{{ template "fragments/tabSelector" (dict "Name" "state" "Values" $values "Active" $active "Include" "#search-q") }}
+
</div>
<a
-
href="?state=closed"
-
class="flex items-center gap-2 {{ if not .FilteringByOpen }}font-bold {{ else }}text-gray-500 dark:text-gray-400{{ end }}"
-
>
-
{{ i "ban" "w-4 h-4" }}
-
<span>{{ .RepoInfo.Stats.IssueCount.Closed }} closed</span>
-
</a>
-
</div>
-
<a
href="/{{ .RepoInfo.FullName }}/issues/new"
-
class="btn-create text-sm flex items-center justify-center gap-2 no-underline hover:no-underline hover:text-white"
-
>
+
class="col-start-3 btn-create text-sm flex items-center justify-center gap-2 no-underline hover:no-underline hover:text-white"
+
>
{{ i "circle-plus" "w-4 h-4" }}
<span>new</span>
-
</a>
-
</div>
-
<div class="error" id="issues"></div>
+
</a>
+
</div>
+
<div class="error" id="issues"></div>
{{ end }}
{{ define "repoAfter" }}
<div class="mt-2">
{{ template "repo/issues/fragments/issueListing" (dict "Issues" .Issues "RepoPrefix" .RepoInfo.FullName "LabelDefs" .LabelDefs) }}
</div>
-
{{ block "pagination" . }} {{ end }}
+
{{if gt .IssueCount .Page.Limit }}
+
{{ block "pagination" . }} {{ end }}
+
{{ end }}
{{ end }}
{{ define "pagination" }}
-
<div class="flex justify-end mt-4 gap-2">
-
{{ $currentState := "closed" }}
-
{{ if .FilteringByOpen }}
-
{{ $currentState = "open" }}
-
{{ end }}
+
<div class="flex justify-center items-center mt-4 gap-2">
+
{{ $currentState := "closed" }}
+
{{ if .FilteringByOpen }}
+
{{ $currentState = "open" }}
+
{{ end }}
+
{{ $prev := .Page.Previous.Offset }}
+
{{ $next := .Page.Next.Offset }}
+
{{ $lastPage := sub .IssueCount (mod .IssueCount .Page.Limit) }}
+
+
<a
+
class="
+
btn flex items-center gap-2 no-underline hover:no-underline
+
dark:text-white dark:hover:bg-gray-700
+
{{ if le .Page.Offset 0 }}
+
cursor-not-allowed opacity-50
+
{{ end }}
+
"
{{ if gt .Page.Offset 0 }}
-
{{ $prev := .Page.Previous }}
-
<a
-
class="btn flex items-center gap-2 no-underline hover:no-underline dark:text-white dark:hover:bg-gray-700"
-
hx-boost="true"
-
href = "/{{ $.RepoInfo.FullName }}/issues?state={{ $currentState }}&offset={{ $prev.Offset }}&limit={{ $prev.Limit }}"
-
>
-
{{ i "chevron-left" "w-4 h-4" }}
-
previous
-
</a>
-
{{ else }}
-
<div></div>
+
hx-boost="true"
+
href = "/{{ $.RepoInfo.FullName }}/issues?state={{ $currentState }}&q={{ .FilterQuery }}&offset={{ $prev }}&limit={{ .Page.Limit }}"
{{ end }}
+
>
+
{{ i "chevron-left" "w-4 h-4" }}
+
previous
+
</a>
+
+
<!-- dont show first page if current page is first page -->
+
{{ if gt .Page.Offset 0 }}
+
<a
+
hx-boost="true"
+
href = "/{{ $.RepoInfo.FullName }}/issues?state={{ $currentState }}&q={{ .FilterQuery }}&offset=0&limit={{ .Page.Limit }}"
+
>
+
1
+
</a>
+
{{ end }}
+
+
<!-- if previous page is not first or second page (prev > limit) -->
+
{{ if gt $prev .Page.Limit }}
+
<span>...</span>
+
{{ end }}
+
+
<!-- if previous page is not the first page -->
+
{{ if gt $prev 0 }}
+
<a
+
hx-boost="true"
+
href = "/{{ $.RepoInfo.FullName }}/issues?state={{ $currentState }}&q={{ .FilterQuery }}&offset={{ $prev }}&limit={{ .Page.Limit }}"
+
>
+
{{ add (div $prev .Page.Limit) 1 }}
+
</a>
+
{{ end }}
+
+
<!-- current page. this is always visible -->
+
<span class="font-bold">
+
{{ add (div .Page.Offset .Page.Limit) 1 }}
+
</span>
+
<!-- if next page is not last page -->
+
{{ if lt $next $lastPage }}
+
<a
+
hx-boost="true"
+
href = "/{{ $.RepoInfo.FullName }}/issues?state={{ $currentState }}&q={{ .FilterQuery }}&offset={{ $next }}&limit={{ .Page.Limit }}"
+
>
+
{{ add (div $next .Page.Limit) 1 }}
+
</a>
+
{{ end }}
+
+
<!-- if next page is not second last or last page (next < issues - 2 * limit) -->
+
{{ if lt ($next) (sub .IssueCount (mul (2) .Page.Limit)) }}
+
<span>...</span>
+
{{ end }}
+
+
<!-- if its not the last page -->
+
{{ if lt .Page.Offset $lastPage }}
+
<a
+
hx-boost="true"
+
href = "/{{ $.RepoInfo.FullName }}/issues?state={{ $currentState }}&q={{ .FilterQuery }}&offset={{ $lastPage }}&limit={{ .Page.Limit }}"
+
>
+
{{ add (div $lastPage .Page.Limit) 1 }}
+
</a>
+
{{ end }}
+
+
<a
+
class="
+
btn flex items-center gap-2 no-underline hover:no-underline
+
dark:text-white dark:hover:bg-gray-700
+
{{ if ne (len .Issues) .Page.Limit }}
+
cursor-not-allowed opacity-50
+
{{ end }}
+
"
{{ if eq (len .Issues) .Page.Limit }}
-
{{ $next := .Page.Next }}
-
<a
-
class="btn flex items-center gap-2 no-underline hover:no-underline dark:text-white dark:hover:bg-gray-700"
-
hx-boost="true"
-
href = "/{{ $.RepoInfo.FullName }}/issues?state={{ $currentState }}&offset={{ $next.Offset }}&limit={{ $next.Limit }}"
-
>
-
next
-
{{ i "chevron-right" "w-4 h-4" }}
-
</a>
+
hx-boost="true"
+
href="/{{ $.RepoInfo.FullName }}/issues?state={{ $currentState }}&q={{ .FilterQuery }}&offset={{ $next }}&limit={{ .Page.Limit }}"
{{ end }}
+
>
+
next
+
{{ i "chevron-right" "w-4 h-4" }}
+
</a>
</div>
{{ end }}
+40 -23
appview/pages/templates/repo/log.html
···
<div class="hidden md:flex md:flex-col divide-y divide-gray-200 dark:divide-gray-700">
{{ $grid := "grid grid-cols-14 gap-4" }}
<div class="{{ $grid }}">
-
<div class="py-2 text-sm text-left text-gray-700 dark:text-gray-300 uppercase font-bold col-span-2">Author</div>
+
<div class="py-2 text-sm text-left text-gray-700 dark:text-gray-300 uppercase font-bold col-span-3">Author</div>
<div class="py-2 text-sm text-left text-gray-700 dark:text-gray-300 uppercase font-bold col-span-3">Commit</div>
<div class="py-2 text-sm text-left text-gray-700 dark:text-gray-300 uppercase font-bold col-span-6">Message</div>
-
<div class="py-2 text-sm text-left text-gray-700 dark:text-gray-300 uppercase font-bold col-span-1"></div>
<div class="py-2 text-sm text-left text-gray-700 dark:text-gray-300 uppercase font-bold col-span-2 justify-self-end">Date</div>
</div>
{{ range $index, $commit := .Commits }}
{{ $messageParts := splitN $commit.Message "\n\n" 2 }}
<div class="{{ $grid }} py-3">
-
<div class="align-top truncate col-span-2">
-
{{ $didOrHandle := index $.EmailToDidOrHandle $commit.Author.Email }}
-
{{ if $didOrHandle }}
-
{{ template "user/fragments/picHandleLink" $didOrHandle }}
-
{{ else }}
-
<a href="mailto:{{ $commit.Author.Email }}" class="text-gray-700 dark:text-gray-300 no-underline hover:underline">{{ $commit.Author.Name }}</a>
-
{{ end }}
+
<div class="align-top col-span-3">
+
{{ template "attribution" (list $commit $.EmailToDid) }}
</div>
<div class="align-top font-mono flex items-start col-span-3">
{{ $verified := $.VerifiedCommits.IsVerified $commit.Hash.String }}
···
<div class="align-top col-span-6">
<div>
<a href="/{{ $.RepoInfo.FullName }}/commit/{{ $commit.Hash.String }}" class="dark:text-white no-underline hover:underline">{{ index $messageParts 0 }}</a>
+
{{ if gt (len $messageParts) 1 }}
<button class="py-1/2 px-1 bg-gray-200 hover:bg-gray-400 dark:bg-gray-700 dark:hover:bg-gray-600 rounded" hx-on:click="this.parentElement.nextElementSibling.classList.toggle('hidden')">{{ i "ellipsis" "w-3 h-3" }}</button>
{{ end }}
···
</span>
{{ end }}
{{ end }}
+
+
<!-- ci status -->
+
<span class="text-xs">
+
{{ $pipeline := index $.Pipelines .Hash.String }}
+
{{ if and $pipeline (gt (len $pipeline.Statuses) 0) }}
+
{{ template "repo/pipelines/fragments/pipelineSymbolLong" (dict "Pipeline" $pipeline "RepoInfo" $.RepoInfo) }}
+
{{ end }}
+
</span>
</div>
{{ if gt (len $messageParts) 1 }}
<p class="hidden mt-1 text-sm text-gray-600 dark:text-gray-400">{{ nl2br (index $messageParts 1) }}</p>
{{ end }}
-
</div>
-
<div class="align-top col-span-1">
-
<!-- ci status -->
-
{{ $pipeline := index $.Pipelines .Hash.String }}
-
{{ if and $pipeline (gt (len $pipeline.Statuses) 0) }}
-
{{ template "repo/pipelines/fragments/pipelineSymbolLong" (dict "Pipeline" $pipeline "RepoInfo" $.RepoInfo) }}
-
{{ end }}
</div>
<div class="align-top justify-self-end text-gray-500 dark:text-gray-400 col-span-2">{{ template "repo/fragments/shortTimeAgo" $commit.Committer.When }}</div>
</div>
···
</a>
</span>
<span class="mx-2 before:content-['·'] before:select-none"></span>
-
<span>
-
{{ $didOrHandle := index $.EmailToDidOrHandle $commit.Author.Email }}
-
<a href="{{ if $didOrHandle }}/{{ $didOrHandle }}{{ else }}mailto:{{ $commit.Author.Email }}{{ end }}"
-
class="text-gray-500 dark:text-gray-400 no-underline hover:underline">
-
{{ if $didOrHandle }}{{ template "user/fragments/picHandleLink" $didOrHandle }}{{ else }}{{ $commit.Author.Name }}{{ end }}
-
</a>
-
</span>
+
{{ template "attribution" (list $commit $.EmailToDid) }}
<div class="inline-block px-1 select-none after:content-['·']"></div>
<span>{{ template "repo/fragments/shortTime" $commit.Committer.When }}</span>
···
</div>
</section>
+
{{ end }}
+
+
{{ define "attribution" }}
+
{{ $commit := index . 0 }}
+
{{ $map := index . 1 }}
+
<span class="flex items-center gap-1">
+
{{ $author := index $map $commit.Author.Email }}
+
{{ $coauthors := $commit.CoAuthors }}
+
{{ $all := list }}
+
+
{{ if $author }}
+
{{ $all = append $all $author }}
+
{{ end }}
+
{{ range $coauthors }}
+
{{ $co := index $map .Email }}
+
{{ if $co }}
+
{{ $all = append $all $co }}
+
{{ end }}
+
{{ end }}
+
+
{{ template "fragments/tinyAvatarList" (dict "all" $all "classes" "size-6") }}
+
<a href="{{ if $author }}/{{ $author }}{{ else }}mailto:{{ $commit.Author.Email }}{{ end }}"
+
class="no-underline hover:underline">
+
{{ if $author }}{{ resolve $author }}{{ else }}{{ $commit.Author.Name }}{{ end }}
+
{{ if $coauthors }} +{{ length $coauthors }}{{ end }}
+
</a>
+
</span>
{{ end }}
{{ define "repoAfter" }}
+2 -1
appview/pages/templates/repo/new.html
···
class="mr-2"
id="domain-{{ . }}"
required
+
{{if eq (len $.Knots) 1}}checked{{end}}
/>
<label for="domain-{{ . }}" class="dark:text-white lowercase">{{ . }}</label>
</div>
···
</div>
<p class="text-sm text-gray-500 dark:text-gray-400 mt-1">
A knot hosts repository data and handles Git operations.
-
You can also <a href="/knots" class="underline">register your own knot</a>.
+
You can also <a href="/settings/knots" class="underline">register your own knot</a>.
</p>
</div>
{{ end }}
+7 -6
appview/pages/templates/repo/pipelines/fragments/logBlock.html
···
<div id="lines" hx-swap-oob="beforeend">
<details id="step-{{ .Id }}" {{if not .Collapsed}}open{{end}} class="group pb-2 rounded-sm border border-gray-200 dark:border-gray-700">
<summary class="sticky top-0 pt-2 px-2 group-open:pb-2 group-open:mb-2 list-none cursor-pointer group-open:border-b border-gray-200 dark:border-gray-700 bg-white dark:bg-gray-800 hover:text-gray-500 hover:dark:text-gray-400">
-
<div class="group-open:hidden flex items-center gap-1">
-
{{ i "chevron-right" "w-4 h-4" }} {{ .Name }}
-
</div>
-
<div class="hidden group-open:flex items-center gap-1">
-
{{ i "chevron-down" "w-4 h-4" }} {{ .Name }}
-
</div>
+
<div class="group-open:hidden flex items-center gap-1">{{ i "chevron-right" "w-4 h-4" }} {{ template "stepHeader" . }}</div>
+
<div class="hidden group-open:flex items-center gap-1">{{ i "chevron-down" "w-4 h-4" }} {{ template "stepHeader" . }}</div>
</summary>
<div class="font-mono whitespace-pre overflow-x-auto px-2"><div class="text-blue-600 dark:text-blue-300">{{ .Command }}</div><div id="step-body-{{ .Id }}"></div></div>
</details>
</div>
{{ end }}
+
+
{{ define "stepHeader" }}
+
{{ .Name }}
+
<span class="ml-auto text-sm text-gray-500 tabular-nums" data-timer="{{ .Id }}" data-start="{{ .StartTime.Unix }}"></span>
+
{{ end }}
+9
appview/pages/templates/repo/pipelines/fragments/logBlockEnd.html
···
+
{{ define "repo/pipelines/fragments/logBlockEnd" }}
+
<span
+
class="ml-auto text-sm text-gray-500 tabular-nums"
+
data-timer="{{ .Id }}"
+
data-start="{{ .StartTime.Unix }}"
+
data-end="{{ .EndTime.Unix }}"
+
hx-swap-oob="outerHTML:[data-timer='{{ .Id }}']"></span>
+
{{ end }}
+
+15 -3
appview/pages/templates/repo/pipelines/pipelines.html
···
{{ range .Pipelines }}
{{ block "pipeline" (list $ .) }} {{ end }}
{{ else }}
-
<p class="text-center pt-5 text-gray-400 dark:text-gray-500">
-
No pipelines run for this repository.
-
</p>
+
<div class="py-6 w-fit flex flex-col gap-4 mx-auto">
+
<p>
+
No pipelines have been run for this repository yet. To get started:
+
</p>
+
{{ $bullet := "mx-2 text-xs bg-gray-200 dark:bg-gray-600 rounded-full size-5 flex items-center justify-center font-mono inline-flex align-middle" }}
+
<p>
+
<span class="{{ $bullet }}">1</span>First, choose a spindle in your
+
<a href="/{{ .RepoInfo.FullName }}/settings?tab=pipelines" class="underline">repository settings</a>.
+
</p>
+
<p>
+
<span class="{{ $bullet }}">2</span>Configure your CI/CD
+
<a href="https://tangled.org/@tangled.org/core/blob/master/docs/spindle/pipeline.md" class="underline">pipeline</a>.
+
</p>
+
<p><span class="{{ $bullet }}">3</span>Trigger a workflow with a push or a pull-request!</p>
+
</div>
{{ end }}
</div>
</div>
+6
appview/pages/templates/repo/pipelines/workflow.html
···
{{ block "logs" . }} {{ end }}
</div>
</section>
+
{{ template "fragments/workflow-timers" }}
{{ end }}
{{ define "sidebar" }}
···
hx-ext="ws"
ws-connect="/{{ $.RepoInfo.FullName }}/pipelines/{{ .Pipeline.Id }}/workflow/{{ .Workflow }}/logs">
<div id="lines" class="flex flex-col gap-2">
+
<div class="text-base text-gray-500 flex items-center justify-center italic p-12 only:flex hidden border border-gray-200 dark:border-gray-700 rounded">
+
<span class="flex items-center gap-2">
+
{{ i "triangle-alert" "size-4" }} No logs for this workflow
+
</span>
+
</div>
</div>
</div>
{{ end }}
+81 -83
appview/pages/templates/repo/pulls/fragments/pullActions.html
···
{{ $isLastRound := eq $roundNumber $lastIdx }}
{{ $isSameRepoBranch := .Pull.IsBranchBased }}
{{ $isUpToDate := .ResubmitCheck.No }}
-
<div class="relative w-fit">
-
<div id="actions-{{$roundNumber}}" class="flex flex-wrap gap-2">
-
<button
-
hx-get="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/round/{{ $roundNumber }}/comment"
-
hx-target="#actions-{{$roundNumber}}"
-
hx-swap="outerHtml"
-
class="btn p-2 flex items-center gap-2 no-underline hover:no-underline group">
-
{{ i "message-square-plus" "w-4 h-4" }}
-
<span>comment</span>
-
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
-
</button>
-
{{ if .BranchDeleteStatus }}
-
<button
-
hx-delete="/{{ .BranchDeleteStatus.Repo.Did }}/{{ .BranchDeleteStatus.Repo.Name }}/branches"
-
hx-vals='{"branch": "{{ .BranchDeleteStatus.Branch }}" }'
-
hx-swap="none"
-
class="btn p-2 flex items-center gap-2 no-underline hover:no-underline group text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300">
-
{{ i "git-branch" "w-4 h-4" }}
-
<span>delete branch</span>
-
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
-
</button>
-
{{ end }}
-
{{ if and $isPushAllowed $isOpen $isLastRound }}
-
{{ $disabled := "" }}
-
{{ if $isConflicted }}
-
{{ $disabled = "disabled" }}
-
{{ end }}
-
<button
-
hx-post="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/merge"
-
hx-swap="none"
-
hx-confirm="Are you sure you want to merge pull #{{ .Pull.PullId }} into the `{{ .Pull.TargetBranch }}` branch?"
-
class="btn p-2 flex items-center gap-2 group" {{ $disabled }}>
-
{{ i "git-merge" "w-4 h-4" }}
-
<span>merge{{if $stackCount}} {{$stackCount}}{{end}}</span>
-
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
-
</button>
-
{{ end }}
+
<div id="actions-{{$roundNumber}}" class="flex flex-wrap gap-2 relative">
+
<button
+
hx-get="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/round/{{ $roundNumber }}/comment"
+
hx-target="#actions-{{$roundNumber}}"
+
hx-swap="outerHtml"
+
class="btn p-2 flex items-center gap-2 no-underline hover:no-underline group">
+
{{ i "message-square-plus" "w-4 h-4" }}
+
<span>comment</span>
+
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
+
</button>
+
{{ if .BranchDeleteStatus }}
+
<button
+
hx-delete="/{{ .BranchDeleteStatus.Repo.Did }}/{{ .BranchDeleteStatus.Repo.Name }}/branches"
+
hx-vals='{"branch": "{{ .BranchDeleteStatus.Branch }}" }'
+
hx-swap="none"
+
class="btn p-2 flex items-center gap-2 no-underline hover:no-underline group text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300">
+
{{ i "git-branch" "w-4 h-4" }}
+
<span>delete branch</span>
+
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
+
</button>
+
{{ end }}
+
{{ if and $isPushAllowed $isOpen $isLastRound }}
+
{{ $disabled := "" }}
+
{{ if $isConflicted }}
+
{{ $disabled = "disabled" }}
+
{{ end }}
+
<button
+
hx-post="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/merge"
+
hx-swap="none"
+
hx-confirm="Are you sure you want to merge pull #{{ .Pull.PullId }} into the `{{ .Pull.TargetBranch }}` branch?"
+
class="btn p-2 flex items-center gap-2 group" {{ $disabled }}>
+
{{ i "git-merge" "w-4 h-4" }}
+
<span>merge{{if $stackCount}} {{$stackCount}}{{end}}</span>
+
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
+
</button>
+
{{ end }}
-
{{ if and $isPullAuthor $isOpen $isLastRound }}
-
{{ $disabled := "" }}
-
{{ if $isUpToDate }}
-
{{ $disabled = "disabled" }}
+
{{ if and $isPullAuthor $isOpen $isLastRound }}
+
{{ $disabled := "" }}
+
{{ if $isUpToDate }}
+
{{ $disabled = "disabled" }}
+
{{ end }}
+
<button id="resubmitBtn"
+
{{ if not .Pull.IsPatchBased }}
+
hx-post="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/resubmit"
+
{{ else }}
+
hx-get="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/resubmit"
+
hx-target="#actions-{{$roundNumber}}"
+
hx-swap="outerHtml"
{{ end }}
-
<button id="resubmitBtn"
-
{{ if not .Pull.IsPatchBased }}
-
hx-post="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/resubmit"
-
{{ else }}
-
hx-get="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/resubmit"
-
hx-target="#actions-{{$roundNumber}}"
-
hx-swap="outerHtml"
-
{{ end }}
-
hx-disabled-elt="#resubmitBtn"
-
class="btn p-2 flex items-center gap-2 disabled:opacity-50 disabled:cursor-not-allowed group" {{ $disabled }}
+
hx-disabled-elt="#resubmitBtn"
+
class="btn p-2 flex items-center gap-2 disabled:opacity-50 disabled:cursor-not-allowed group" {{ $disabled }}
-
{{ if $disabled }}
-
title="Update this branch to resubmit this pull request"
-
{{ else }}
-
title="Resubmit this pull request"
-
{{ end }}
-
>
-
{{ i "rotate-ccw" "w-4 h-4" }}
-
<span>resubmit</span>
-
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
-
</button>
-
{{ end }}
+
{{ if $disabled }}
+
title="Update this branch to resubmit this pull request"
+
{{ else }}
+
title="Resubmit this pull request"
+
{{ end }}
+
>
+
{{ i "rotate-ccw" "w-4 h-4" }}
+
<span>resubmit</span>
+
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
+
</button>
+
{{ end }}
-
{{ if and (or $isPullAuthor $isPushAllowed) $isOpen $isLastRound }}
-
<button
-
hx-post="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/close"
-
hx-swap="none"
-
class="btn p-2 flex items-center gap-2 group">
-
{{ i "ban" "w-4 h-4" }}
-
<span>close</span>
-
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
-
</button>
-
{{ end }}
+
{{ if and (or $isPullAuthor $isPushAllowed) $isOpen $isLastRound }}
+
<button
+
hx-post="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/close"
+
hx-swap="none"
+
class="btn p-2 flex items-center gap-2 group">
+
{{ i "ban" "w-4 h-4" }}
+
<span>close</span>
+
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
+
</button>
+
{{ end }}
-
{{ if and (or $isPullAuthor $isPushAllowed) $isClosed $isLastRound }}
-
<button
-
hx-post="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/reopen"
-
hx-swap="none"
-
class="btn p-2 flex items-center gap-2 group">
-
{{ i "refresh-ccw-dot" "w-4 h-4" }}
-
<span>reopen</span>
-
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
-
</button>
-
{{ end }}
-
</div>
+
{{ if and (or $isPullAuthor $isPushAllowed) $isClosed $isLastRound }}
+
<button
+
hx-post="/{{ .RepoInfo.FullName }}/pulls/{{ .Pull.PullId }}/reopen"
+
hx-swap="none"
+
class="btn p-2 flex items-center gap-2 group">
+
{{ i "refresh-ccw-dot" "w-4 h-4" }}
+
<span>reopen</span>
+
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
+
</button>
+
{{ end }}
</div>
{{ end }}
+1 -1
appview/pages/templates/repo/pulls/fragments/pullHeader.html
···
"Kind" $kind
"Count" $reactionData.Count
"IsReacted" (index $.UserReacted $kind)
-
"ThreadAt" $.Pull.PullAt
+
"ThreadAt" $.Pull.AtUri
"Users" $reactionData.Users)
}}
{{ end }}
+1 -1
appview/pages/templates/repo/pulls/patch.html
···
{{ end }}
{{ define "contentAfter" }}
-
{{ template "repo/fragments/diff" (list .RepoInfo.FullName .Diff .DiffOpts) }}
+
{{ template "repo/fragments/diff" (list .Diff .DiffOpts) }}
{{end}}
{{ define "contentAfterLeft" }}
+5 -1
appview/pages/templates/repo/pulls/pull.html
···
{{ template "repo/fragments/labelPanel"
(dict "RepoInfo" $.RepoInfo
"Defs" $.LabelDefs
-
"Subject" $.Pull.PullAt
+
"Subject" $.Pull.AtUri
"State" $.Pull.Labels) }}
{{ template "repo/fragments/participants" $.Pull.Participants }}
+
{{ template "repo/fragments/backlinks"
+
(dict "RepoInfo" $.RepoInfo
+
"Backlinks" $.Backlinks) }}
+
{{ template "repo/fragments/externalLinkPanel" $.Pull.AtUri }}
</div>
</div>
{{ end }}
+61 -31
appview/pages/templates/repo/pulls/pulls.html
···
{{ end }}
{{ define "repoContent" }}
-
<div class="flex justify-between items-center">
-
<div class="flex gap-4">
-
<a
-
href="?state=open"
-
class="flex items-center gap-2 {{ if .FilteringBy.IsOpen }}font-bold {{ else }}text-gray-500 dark:text-gray-400{{ end }}"
-
>
-
{{ i "git-pull-request" "w-4 h-4" }}
-
<span>{{ .RepoInfo.Stats.PullCount.Open }} open</span>
-
</a>
-
<a
-
href="?state=merged"
-
class="flex items-center gap-2 {{ if .FilteringBy.IsMerged }}font-bold {{ else }}text-gray-500 dark:text-gray-400{{ end }}"
-
>
-
{{ i "git-merge" "w-4 h-4" }}
-
<span>{{ .RepoInfo.Stats.PullCount.Merged }} merged</span>
-
</a>
-
<a
-
href="?state=closed"
-
class="flex items-center gap-2 {{ if .FilteringBy.IsClosed }}font-bold {{ else }}text-gray-500 dark:text-gray-400{{ end }}"
-
>
-
{{ i "ban" "w-4 h-4" }}
-
<span>{{ .RepoInfo.Stats.PullCount.Closed }} closed</span>
-
</a>
-
</div>
+
{{ $active := "closed" }}
+
{{ if .FilteringBy.IsOpen }}
+
{{ $active = "open" }}
+
{{ else if .FilteringBy.IsMerged }}
+
{{ $active = "merged" }}
+
{{ end }}
+
{{ $open :=
+
(dict
+
"Key" "open"
+
"Value" "open"
+
"Icon" "git-pull-request"
+
"Meta" (string .RepoInfo.Stats.PullCount.Open)) }}
+
{{ $merged :=
+
(dict
+
"Key" "merged"
+
"Value" "merged"
+
"Icon" "git-merge"
+
"Meta" (string .RepoInfo.Stats.PullCount.Merged)) }}
+
{{ $closed :=
+
(dict
+
"Key" "closed"
+
"Value" "closed"
+
"Icon" "ban"
+
"Meta" (string .RepoInfo.Stats.PullCount.Closed)) }}
+
{{ $values := list $open $merged $closed }}
+
<div class="grid gap-2 grid-cols-[auto_1fr_auto] grid-row-2">
+
<form class="flex relative col-span-3 sm:col-span-1 sm:col-start-2" method="GET">
+
<input type="hidden" name="state" value="{{ .FilteringBy.String }}">
+
<div class="flex-1 flex relative">
+
<input
+
id="search-q"
+
class="flex-1 py-1 pl-2 pr-10 mr-[-1px] rounded-r-none focus:border-0 focus:outline-none focus:ring focus:ring-blue-400 ring-inset peer"
+
type="text"
+
name="q"
+
value="{{ .FilterQuery }}"
+
placeholder=" "
+
>
<a
-
href="/{{ .RepoInfo.FullName }}/pulls/new"
-
class="btn-create text-sm flex items-center gap-2 no-underline hover:no-underline hover:text-white"
+
href="?state={{ .FilteringBy.String }}"
+
class="absolute right-3 top-1/2 -translate-y-1/2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300 hidden peer-[:not(:placeholder-shown)]:block"
>
-
{{ i "git-pull-request-create" "w-4 h-4" }}
-
<span>new</span>
+
{{ i "x" "w-4 h-4" }}
</a>
+
</div>
+
<button
+
type="submit"
+
class="p-2 text-gray-400 border rounded-r border-gray-400 dark:border-gray-600"
+
>
+
{{ i "search" "w-4 h-4" }}
+
</button>
+
</form>
+
<div class="sm:row-start-1">
+
{{ template "fragments/tabSelector" (dict "Name" "state" "Values" $values "Active" $active "Include" "#search-q") }}
</div>
-
<div class="error" id="pulls"></div>
+
<a
+
href="/{{ .RepoInfo.FullName }}/pulls/new"
+
class="col-start-3 btn-create text-sm flex items-center gap-2 no-underline hover:no-underline hover:text-white"
+
>
+
{{ i "git-pull-request-create" "w-4 h-4" }}
+
<span>new</span>
+
</a>
+
</div>
+
<div class="error" id="pulls"></div>
{{ end }}
{{ define "repoAfter" }}
···
{{ i "chevrons-down-up" "w-4 h-4" }} hide {{ len $otherPulls }} pull{{$s}} in this stack
</div>
</summary>
-
{{ block "pullList" (list $otherPulls $) }} {{ end }}
+
{{ block "stackedPullList" (list $otherPulls $) }} {{ end }}
</details>
{{ end }}
{{ end }}
···
</div>
{{ end }}
-
{{ define "pullList" }}
+
{{ define "stackedPullList" }}
{{ $list := index . 0 }}
{{ $root := index . 1 }}
<div class="grid grid-cols-1 rounded-b border-b border-t border-gray-200 dark:border-gray-900 divide-y divide-gray-200 dark:divide-gray-900">
+22 -12
appview/pages/templates/repo/settings/access.html
···
{{ template "addCollaboratorButton" . }}
{{ end }}
{{ range .Collaborators }}
+
{{ $handle := resolve .Did }}
<div class="border border-gray-200 dark:border-gray-700 rounded p-4">
<div class="flex items-center gap-3">
<img
-
src="{{ fullAvatar .Handle }}"
-
alt="{{ .Handle }}"
+
src="{{ fullAvatar $handle }}"
+
alt="{{ $handle }}"
class="rounded-full h-10 w-10 border border-gray-300 dark:border-gray-600 flex-shrink-0"/>
<div class="flex-1 min-w-0">
-
<a href="/{{ .Handle }}" class="block truncate">
-
{{ didOrHandle .Did .Handle }}
+
<a href="/{{ $handle }}" class="block truncate">
+
{{ $handle }}
</a>
<p class="text-sm text-gray-500 dark:text-gray-400">{{ .Role }}</p>
</div>
···
<div
id="add-collaborator-modal"
popover
-
class="bg-white w-full md:w-96 dark:bg-gray-800 p-4 rounded border border-gray-200 dark:border-gray-700 drop-shadow dark:text-white backdrop:bg-gray-400/50 dark:backdrop:bg-gray-800/50">
+
class="
+
bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700
+
dark:text-white backdrop:bg-gray-400/50 dark:backdrop:bg-gray-800/50
+
w-full md:w-96 p-4 rounded drop-shadow overflow-visible">
{{ template "addCollaboratorModal" . }}
</div>
{{ end }}
···
ADD COLLABORATOR
</label>
<p class="text-sm text-gray-500 dark:text-gray-400">Collaborators can push to this repository.</p>
-
<input
-
type="text"
-
id="add-collaborator"
-
name="collaborator"
-
required
-
placeholder="@foo.bsky.social"
-
/>
+
<actor-typeahead>
+
<input
+
autocapitalize="none"
+
autocorrect="off"
+
autocomplete="off"
+
type="text"
+
id="add-collaborator"
+
name="collaborator"
+
required
+
placeholder="user.tngl.sh"
+
class="w-full"
+
/>
+
</actor-typeahead>
<div class="flex gap-2 pt-2">
<button
type="button"
+47
appview/pages/templates/repo/settings/general.html
···
{{ template "repo/settings/fragments/sidebar" . }}
</div>
<div class="col-span-1 md:col-span-3 flex flex-col gap-6 p-2">
+
{{ template "baseSettings" . }}
{{ template "branchSettings" . }}
{{ template "defaultLabelSettings" . }}
{{ template "customLabelSettings" . }}
···
<div id="operation-error" class="text-red-500 dark:text-red-400"></div>
</div>
</section>
+
{{ end }}
+
+
{{ define "baseSettings" }}
+
<form hx-put="/{{ $.RepoInfo.FullName }}/settings/base" hx-swap="none">
+
<fieldset
+
class=""
+
{{ if not .RepoInfo.Roles.IsOwner }}disabled{{ end }}
+
>
+
<h2 class="text-sm pb-2 uppercase font-bold">Description</h2>
+
<textarea
+
rows="3"
+
class="w-full mb-2"
+
id="base-form-description"
+
name="description"
+
>{{ .RepoInfo.Description }}</textarea>
+
<h2 class="text-sm pb-2 uppercase font-bold">Website URL</h2>
+
<input
+
type="text"
+
class="w-full mb-2"
+
id="base-form-website"
+
name="website"
+
value="{{ .RepoInfo.Website }}"
+
>
+
<h2 class="text-sm pb-2 uppercase font-bold">Topics</h2>
+
<p class="text-gray-500 dark:text-gray-400">
+
List of topics separated by spaces.
+
</p>
+
<textarea
+
rows="2"
+
class="w-full my-2"
+
id="base-form-topics"
+
name="topics"
+
>{{ range $topic := .RepoInfo.Topics }}{{ $topic }} {{ end }}</textarea>
+
<div id="repo-base-settings-error" class="text-red-500 dark:text-red-400"></div>
+
<div class="flex justify-end pt-2">
+
<button
+
type="submit"
+
class="btn-create flex items-center gap-2 group"
+
>
+
{{ i "save" "w-4 h-4" }}
+
save
+
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
+
</button>
+
</div>
+
</fieldset>
+
</form>
{{ end }}
{{ define "branchSettings" }}
+8
appview/pages/templates/repo/tree.html
···
{{ $icon := "folder" }}
{{ $iconStyle := "size-4 fill-current" }}
+
{{ if .IsSubmodule }}
+
{{ $link = printf "/%s/%s/%s/%s/%s" $.RepoInfo.FullName "blob" (urlquery $.Ref) $.TreePath .Name }}
+
{{ $icon = "folder-input" }}
+
{{ $iconStyle = "size-4" }}
+
{{ end }}
+
{{ if .IsFile }}
+
{{ $link = printf "/%s/%s/%s/%s/%s" $.RepoInfo.FullName "blob" (urlquery $.Ref) $.TreePath .Name }}
{{ $icon = "file" }}
{{ $iconStyle = "size-4" }}
{{ end }}
+
<a href="{{ $link }}" class="{{ $linkstyle }}">
<div class="flex items-center gap-2">
{{ i $icon $iconStyle "flex-shrink-0" }}
+22 -6
appview/pages/templates/spindles/dashboard.html
···
-
{{ define "title" }}{{.Spindle.Instance}} &middot; spindles{{ end }}
+
{{ define "title" }}{{.Spindle.Instance}} &middot; {{ .Tab }} settings{{ end }}
{{ define "content" }}
-
<div class="px-6 py-4">
+
<div class="p-6">
+
<p class="text-xl font-bold dark:text-white">Settings</p>
+
</div>
+
<div class="bg-white dark:bg-gray-800 p-6 rounded relative w-full mx-auto drop-shadow-sm dark:text-white">
+
<section class="w-full grid grid-cols-1 md:grid-cols-4 gap-6">
+
<div class="col-span-1">
+
{{ template "user/settings/fragments/sidebar" . }}
+
</div>
+
<div class="col-span-1 md:col-span-3 flex flex-col gap-6">
+
{{ template "spindleDash" . }}
+
</div>
+
</section>
+
</div>
+
{{ end }}
+
+
{{ define "spindleDash" }}
+
<div>
<div class="flex justify-between items-center">
-
<h1 class="text-xl font-bold dark:text-white">{{ .Spindle.Instance }}</h1>
+
<h2 class="text-sm pb-2 uppercase font-bold">{{ .Tab }} &middot; {{ .Spindle.Instance }}</h2>
<div id="right-side" class="flex gap-2">
{{ $style := "px-2 py-1 rounded flex items-center flex-shrink-0 gap-2" }}
{{ $isOwner := and .LoggedInUser (eq .LoggedInUser.Did .Spindle.Owner) }}
···
<button
class="btn text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300 gap-2 group"
title="Delete spindle"
-
hx-delete="/spindles/{{ .Instance }}"
+
hx-delete="/settings/spindles/{{ .Instance }}"
hx-swap="outerHTML"
hx-confirm="Are you sure you want to delete the spindle '{{ .Instance }}'?"
hx-headers='{"shouldRedirect": "true"}'
···
<button
class="btn gap-2 group"
title="Retry spindle verification"
-
hx-post="/spindles/{{ .Instance }}/retry"
+
hx-post="/settings/spindles/{{ .Instance }}/retry"
hx-swap="none"
hx-headers='{"shouldRefresh": "true"}'
>
···
<button
class="btn text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300 gap-2 group"
title="Remove member"
-
hx-post="/spindles/{{ $root.Spindle.Instance }}/remove"
+
hx-post="/settings/spindles/{{ $root.Spindle.Instance }}/remove"
hx-swap="none"
hx-vals='{"member": "{{$member}}" }'
hx-confirm="Are you sure you want to remove {{ resolve $member }} from this instance?"
+17 -9
appview/pages/templates/spindles/fragments/addMemberModal.html
···
<div
id="add-member-{{ .Instance }}"
popover
-
class="bg-white w-full md:w-96 dark:bg-gray-800 p-4 rounded border border-gray-200 dark:border-gray-700 drop-shadow dark:text-white backdrop:bg-gray-400/50 dark:backdrop:bg-gray-800/50">
+
class="
+
bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 dark:text-white backdrop:bg-gray-400/50 dark:backdrop:bg-gray-800/50
+
w-full md:w-96 p-4 rounded drop-shadow overflow-visible">
{{ block "addSpindleMemberPopover" . }} {{ end }}
</div>
{{ end }}
{{ define "addSpindleMemberPopover" }}
<form
-
hx-post="/spindles/{{ .Instance }}/add"
+
hx-post="/settings/spindles/{{ .Instance }}/add"
hx-indicator="#spinner"
hx-swap="none"
class="flex flex-col gap-2"
···
ADD MEMBER
</label>
<p class="text-sm text-gray-500 dark:text-gray-400">Members can register repositories and run workflows on this spindle.</p>
-
<input
-
type="text"
-
id="member-did-{{ .Id }}"
-
name="member"
-
required
-
placeholder="@foo.bsky.social"
-
/>
+
<actor-typeahead>
+
<input
+
autocapitalize="none"
+
autocorrect="off"
+
autocomplete="off"
+
type="text"
+
id="member-did-{{ .Id }}"
+
name="member"
+
required
+
placeholder="user.tngl.sh"
+
class="w-full"
+
/>
+
</actor-typeahead>
<div class="flex gap-2 pt-2">
<button
type="button"
+3 -3
appview/pages/templates/spindles/fragments/spindleListing.html
···
{{ define "spindleLeftSide" }}
{{ if .Verified }}
-
<a href="/spindles/{{ .Instance }}" class="hover:no-underline flex items-center gap-2 min-w-0 max-w-[60%]">
+
<a href="/settings/spindles/{{ .Instance }}" class="hover:no-underline flex items-center gap-2 min-w-0 max-w-[60%]">
{{ i "hard-drive" "w-4 h-4" }}
<span class="hover:underline">
{{ .Instance }}
···
<button
class="btn text-red-500 hover:text-red-700 dark:text-red-400 dark:hover:text-red-300 gap-2 group"
title="Delete spindle"
-
hx-delete="/spindles/{{ .Instance }}"
+
hx-delete="/settings/spindles/{{ .Instance }}"
hx-swap="outerHTML"
hx-target="#spindle-{{.Id}}"
hx-confirm="Are you sure you want to delete the spindle '{{ .Instance }}'?"
···
<button
class="btn gap-2 group"
title="Retry spindle verification"
-
hx-post="/spindles/{{ .Instance }}/retry"
+
hx-post="/settings/spindles/{{ .Instance }}/retry"
hx-swap="none"
hx-target="#spindle-{{.Id}}"
>
+90 -59
appview/pages/templates/spindles/index.html
···
-
{{ define "title" }}spindles{{ end }}
+
{{ define "title" }}{{ .Tab }} settings{{ end }}
{{ define "content" }}
-
<div class="px-6 py-4 flex items-center justify-between gap-4 align-bottom">
-
<h1 class="text-xl font-bold dark:text-white">Spindles</h1>
-
<span class="flex items-center gap-1">
-
{{ i "book" "w-3 h-3" }}
-
<a href="https://tangled.org/@tangled.org/core/blob/master/docs/spindle/hosting.md">docs</a>
-
</span>
+
<div class="p-6">
+
<p class="text-xl font-bold dark:text-white">Settings</p>
+
</div>
+
<div class="bg-white dark:bg-gray-800 p-6 rounded relative w-full mx-auto drop-shadow-sm dark:text-white">
+
<section class="w-full grid grid-cols-1 md:grid-cols-4 gap-6">
+
<div class="col-span-1">
+
{{ template "user/settings/fragments/sidebar" . }}
+
</div>
+
<div class="col-span-1 md:col-span-3 flex flex-col gap-6">
+
{{ template "spindleList" . }}
+
</div>
+
</section>
+
</div>
+
{{ end }}
+
+
{{ define "spindleList" }}
+
<div class="grid grid-cols-1 md:grid-cols-3 gap-4 items-center">
+
<div class="col-span-1 md:col-span-2">
+
<h2 class="text-sm pb-2 uppercase font-bold">Spindle</h2>
+
{{ block "about" . }} {{ end }}
+
</div>
+
<div class="col-span-1 md:col-span-1 md:justify-self-end">
+
{{ template "docsButton" . }}
+
</div>
</div>
-
<section class="bg-white dark:bg-gray-800 p-6 rounded relative w-full mx-auto drop-shadow-sm dark:text-white">
+
<section>
<div class="flex flex-col gap-6">
-
{{ block "about" . }} {{ end }}
{{ block "list" . }} {{ end }}
{{ block "register" . }} {{ end }}
</div>
···
{{ define "about" }}
<section class="rounded flex items-center gap-2">
-
<p class="text-gray-500 dark:text-gray-400">
-
Spindles are small CI runners.
-
</p>
+
<p class="text-gray-500 dark:text-gray-400">
+
Spindles are small CI runners.
+
</p>
</section>
{{ end }}
{{ define "list" }}
-
<section class="rounded w-full flex flex-col gap-2">
-
<h2 class="text-sm font-bold py-2 uppercase dark:text-gray-300">your spindles</h2>
-
<div class="flex flex-col rounded border border-gray-200 dark:border-gray-700 w-full">
-
{{ range $spindle := .Spindles }}
-
{{ template "spindles/fragments/spindleListing" . }}
-
{{ else }}
-
<div class="flex items-center justify-center p-2 border-b border-gray-200 dark:border-gray-700 text-gray-500">
-
no spindles registered yet
-
</div>
-
{{ end }}
+
<section class="rounded w-full flex flex-col gap-2">
+
<h2 class="text-sm font-bold py-2 uppercase dark:text-gray-300">your spindles</h2>
+
<div class="flex flex-col rounded border border-gray-200 dark:border-gray-700 w-full">
+
{{ range $spindle := .Spindles }}
+
{{ template "spindles/fragments/spindleListing" . }}
+
{{ else }}
+
<div class="flex items-center justify-center p-2 border-b border-gray-200 dark:border-gray-700 text-gray-500">
+
no spindles registered yet
</div>
-
<div id="operation-error" class="text-red-500 dark:text-red-400"></div>
-
</section>
+
{{ end }}
+
</div>
+
<div id="operation-error" class="text-red-500 dark:text-red-400"></div>
+
</section>
{{ end }}
{{ define "register" }}
-
<section class="rounded w-full lg:w-fit flex flex-col gap-2">
-
<h2 class="text-sm font-bold py-2 uppercase dark:text-gray-300">register a spindle</h2>
-
<p class="mb-2 dark:text-gray-300">Enter the hostname of your spindle to get started.</p>
-
<form
-
hx-post="/spindles/register"
-
class="max-w-2xl mb-2 space-y-4"
-
hx-indicator="#register-button"
-
hx-swap="none"
-
>
-
<div class="flex gap-2">
-
<input
-
type="text"
-
id="instance"
-
name="instance"
-
placeholder="spindle.example.com"
-
required
-
class="flex-1 w-full dark:bg-gray-700 dark:text-white dark:border-gray-600 dark:placeholder-gray-400 px-3 py-2 border rounded"
-
>
-
<button
-
type="submit"
-
id="register-button"
-
class="btn rounded flex items-center py-2 dark:bg-gray-700 dark:text-white dark:hover:bg-gray-600 group"
-
>
-
<span class="inline-flex items-center gap-2">
-
{{ i "plus" "w-4 h-4" }}
-
register
-
</span>
-
<span class="pl-2 hidden group-[.htmx-request]:inline">
-
{{ i "loader-circle" "w-4 h-4 animate-spin" }}
-
</span>
-
</button>
-
</div>
+
<section class="rounded w-full lg:w-fit flex flex-col gap-2">
+
<h2 class="text-sm font-bold py-2 uppercase dark:text-gray-300">register a spindle</h2>
+
<p class="mb-2 dark:text-gray-300">Enter the hostname of your spindle to get started.</p>
+
<form
+
hx-post="/settings/spindles/register"
+
class="max-w-2xl mb-2 space-y-4"
+
hx-indicator="#register-button"
+
hx-swap="none"
+
>
+
<div class="flex gap-2">
+
<input
+
type="text"
+
id="instance"
+
name="instance"
+
placeholder="spindle.example.com"
+
required
+
class="flex-1 w-full dark:bg-gray-700 dark:text-white dark:border-gray-600 dark:placeholder-gray-400 px-3 py-2 border rounded"
+
>
+
<button
+
type="submit"
+
id="register-button"
+
class="btn rounded flex items-center py-2 dark:bg-gray-700 dark:text-white dark:hover:bg-gray-600 group"
+
>
+
<span class="inline-flex items-center gap-2">
+
{{ i "plus" "w-4 h-4" }}
+
register
+
</span>
+
<span class="pl-2 hidden group-[.htmx-request]:inline">
+
{{ i "loader-circle" "w-4 h-4 animate-spin" }}
+
</span>
+
</button>
+
</div>
-
<div id="register-error" class="dark:text-red-400"></div>
-
</form>
+
<div id="register-error" class="dark:text-red-400"></div>
+
</form>
+
+
</section>
+
{{ end }}
-
</section>
+
{{ define "docsButton" }}
+
<a
+
class="btn flex items-center gap-2"
+
href="https://tangled.org/@tangled.org/core/blob/master/docs/spindle/hosting.md">
+
{{ i "book" "size-4" }}
+
docs
+
</a>
+
<div
+
id="add-email-modal"
+
popover
+
class="bg-white w-full md:w-96 dark:bg-gray-800 p-4 rounded border border-gray-200 dark:border-gray-700 drop-shadow dark:text-white backdrop:bg-gray-400/50 dark:backdrop:bg-gray-800/50">
+
</div>
{{ end }}
+6 -5
appview/pages/templates/strings/dashboard.html
···
-
{{ define "title" }}strings by {{ or .Card.UserHandle .Card.UserDid }}{{ end }}
+
{{ define "title" }}strings by {{ resolve .Card.UserDid }}{{ end }}
{{ define "extrameta" }}
-
<meta property="og:title" content="{{ or .Card.UserHandle .Card.UserDid }}" />
+
{{ $handle := resolve .Card.UserDid }}
+
<meta property="og:title" content="{{ $handle }}" />
<meta property="og:type" content="profile" />
-
<meta property="og:url" content="https://tangled.org/{{ or .Card.UserHandle .Card.UserDid }}" />
-
<meta property="og:description" content="{{ or .Card.Profile.Description .Card.UserHandle .Card.UserDid }}" />
+
<meta property="og:url" content="https://tangled.org/{{ $handle }}" />
+
<meta property="og:description" content="{{ or .Card.Profile.Description $handle }}" />
{{ end }}
···
{{ $s := index . 1 }}
<div class="py-4 px-6 drop-shadow-sm rounded bg-white dark:bg-gray-800">
<div class="font-medium dark:text-white flex gap-2 items-center">
-
<a href="/strings/{{ or $root.Card.UserHandle $root.Card.UserDid }}/{{ $s.Rkey }}">{{ $s.Filename }}</a>
+
<a href="/strings/{{ resolve $root.Card.UserDid }}/{{ $s.Rkey }}">{{ $s.Filename }}</a>
</div>
{{ with $s.Description }}
<div class="text-gray-600 dark:text-gray-300 text-sm">
+14 -10
appview/pages/templates/strings/string.html
···
-
{{ define "title" }}{{ .String.Filename }} · by {{ didOrHandle .Owner.DID.String .Owner.Handle.String }}{{ end }}
+
{{ define "title" }}{{ .String.Filename }} · by {{ resolve .Owner.DID.String }}{{ end }}
{{ define "extrameta" }}
-
{{ $ownerId := didOrHandle .Owner.DID.String .Owner.Handle.String }}
+
{{ $ownerId := resolve .Owner.DID.String }}
<meta property="og:title" content="{{ .String.Filename }} · by {{ $ownerId }}" />
<meta property="og:type" content="object" />
<meta property="og:url" content="https://tangled.org/strings/{{ $ownerId }}/{{ .String.Rkey }}" />
···
{{ end }}
{{ define "content" }}
-
{{ $ownerId := didOrHandle .Owner.DID.String .Owner.Handle.String }}
+
{{ $ownerId := resolve .Owner.DID.String }}
<section id="string-header" class="mb-4 py-2 px-6 dark:text-white">
<div class="text-lg flex items-center justify-between">
<div>
···
<span class="select-none">/</span>
<a href="/strings/{{ $ownerId }}/{{ .String.Rkey }}" class="font-bold">{{ .String.Filename }}</a>
</div>
-
{{ if and .LoggedInUser (eq .LoggedInUser.Did .String.Did) }}
-
<div class="flex gap-2 text-base">
+
<div class="flex gap-2 text-base">
+
{{ if and .LoggedInUser (eq .LoggedInUser.Did .String.Did) }}
<a class="btn flex items-center gap-2 no-underline hover:no-underline p-2 group"
hx-boost="true"
href="/strings/{{ .String.Did }}/{{ .String.Rkey }}/edit">
···
<span class="hidden md:inline">delete</span>
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
</button>
-
</div>
-
{{ end }}
+
{{ end }}
+
{{ template "fragments/starBtn"
+
(dict "SubjectAt" .String.AtUri
+
"IsStarred" .IsStarred
+
"StarCount" .StarCount) }}
+
</div>
</div>
<span>
{{ with .String.Description }}
···
</span>
</section>
<section class="bg-white dark:bg-gray-800 px-6 py-4 rounded relative w-full dark:text-white">
-
<div class="flex justify-between items-center text-gray-500 dark:text-gray-400 text-sm md:text-base pb-2 mb-3 text-base border-b border-gray-200 dark:border-gray-700">
+
<div class="flex flex-col md:flex-row md:justify-between md:items-center text-gray-500 dark:text-gray-400 text-sm md:text-base pb-2 mb-3 text-base border-b border-gray-200 dark:border-gray-700">
<span>
{{ .String.Filename }}
<span class="select-none px-1 md:px-2 [&:before]:content-['·']"></span>
···
</div>
<div class="overflow-x-auto overflow-y-hidden relative">
{{ if .ShowRendered }}
-
<div id="blob-contents" class="prose dark:prose-invert">{{ .RenderedContents }}</div>
+
<div id="blob-contents" class="prose dark:prose-invert">{{ .String.Contents | readme }}</div>
{{ else }}
-
<div id="blob-contents" class="whitespace-pre peer-target:bg-yellow-200 dark:peer-target:bg-yellow-900">{{ .String.Contents | escapeHtml }}</div>
+
<div id="blob-contents" class="whitespace-pre peer-target:bg-yellow-200 dark:peer-target:bg-yellow-900">{{ code .String.Contents .String.Filename | escapeHtml }}</div>
{{ end }}
</div>
{{ template "fragments/multiline-select" }}
+1 -2
appview/pages/templates/timeline/fragments/goodfirstissues.html
···
<a href="/goodfirstissues" class="no-underline hover:no-underline">
<div class="flex items-center justify-between gap-2 bg-purple-200 dark:bg-purple-900 border border-purple-400 dark:border-purple-500 rounded mb-4 py-4 px-6 ">
<div class="flex-1 flex flex-col gap-2">
-
<div class="text-purple-500 dark:text-purple-400">Oct 2025</div>
<p>
-
Make your first contribution to an open-source project this October.
+
Make your first contribution to an open-source project.
<em>good-first-issue</em> helps new contributors find easy ways to
start contributing to open-source projects.
</p>
+2 -2
appview/pages/templates/timeline/fragments/hero.html
···
<h1 class="font-bold text-4xl">tightly-knit<br>social coding.</h1>
<p class="text-lg">
-
tangled is new social-enabled git collaboration platform built on <a class="underline" href="https://atproto.com/">atproto</a>.
+
Tangled is a decentralized Git hosting and collaboration platform.
</p>
<p class="text-lg">
-
we envision a place where developers have complete ownership of their
+
We envision a place where developers have complete ownership of their
code, open source communities can freely self-govern and most
importantly, coding can be social and fun again.
</p>
+5 -5
appview/pages/templates/timeline/fragments/timeline.html
···
<div class="flex flex-col divide-y divide-gray-200 dark:divide-gray-700 border border-gray-200 dark:border-gray-700 rounded-sm">
{{ if .Repo }}
{{ template "timeline/fragments/repoEvent" (list $ .) }}
-
{{ else if .Star }}
+
{{ else if .RepoStar }}
{{ template "timeline/fragments/starEvent" (list $ .) }}
{{ else if .Follow }}
{{ template "timeline/fragments/followEvent" (list $ .) }}
···
<span class="text-gray-700 dark:text-gray-400 text-xs">{{ template "repo/fragments/time" $repo.Created }}</span>
</div>
{{ with $repo }}
-
{{ template "user/fragments/repoCard" (list $root . true true (dict "IsStarred" $event.IsStarred "RepoAt" .RepoAt "Stats" (dict "StarCount" $event.StarCount))) }}
+
{{ template "user/fragments/repoCard" (list $root . true true (dict "IsStarred" $event.IsStarred "SubjectAt" .RepoAt "StarCount" $event.StarCount)) }}
{{ end }}
{{ end }}
{{ define "timeline/fragments/starEvent" }}
{{ $root := index . 0 }}
{{ $event := index . 1 }}
-
{{ $star := $event.Star }}
+
{{ $star := $event.RepoStar }}
{{ with $star }}
-
{{ $starrerHandle := resolve .StarredByDid }}
+
{{ $starrerHandle := resolve .Did }}
{{ $repoOwnerHandle := resolve .Repo.Did }}
<div class="pl-6 py-2 bg-white dark:bg-gray-800 text-gray-600 dark:text-gray-300 flex flex-wrap items-center gap-2 text-sm">
{{ template "user/fragments/picHandleLink" $starrerHandle }}
···
<span class="text-gray-700 dark:text-gray-400 text-xs">{{ template "repo/fragments/time" .Created }}</span>
</div>
{{ with .Repo }}
-
{{ template "user/fragments/repoCard" (list $root . true true (dict "IsStarred" $event.IsStarred "RepoAt" .RepoAt "Stats" (dict "StarCount" $event.StarCount))) }}
+
{{ template "user/fragments/repoCard" (list $root . true true (dict "IsStarred" $event.IsStarred "SubjectAt" .RepoAt "StarCount" $event.StarCount)) }}
{{ end }}
{{ end }}
{{ end }}
+4 -2
appview/pages/templates/user/followers.html
···
-
{{ define "title" }}{{ or .Card.UserHandle .Card.UserDid }} · followers {{ end }}
+
{{ define "title" }}{{ resolve .Card.UserDid }} · followers {{ end }}
{{ define "profileContent" }}
<div id="all-followers" class="md:col-span-8 order-2 md:order-2">
···
"FollowersCount" .FollowersCount
"FollowingCount" .FollowingCount) }}
{{ else }}
-
<p class="px-6 dark:text-white">This user does not have any followers yet.</p>
+
<div class="text-base text-gray-500 flex items-center justify-center italic p-12 border border-gray-200 dark:border-gray-700 rounded">
+
<span>This user does not have any followers yet.</span>
+
</div>
{{ end }}
</div>
{{ end }}
+4 -2
appview/pages/templates/user/following.html
···
-
{{ define "title" }}{{ or .Card.UserHandle .Card.UserDid }} · following {{ end }}
+
{{ define "title" }}{{ resolve .Card.UserDid }} · following {{ end }}
{{ define "profileContent" }}
<div id="all-following" class="md:col-span-8 order-2 md:order-2">
···
"FollowersCount" .FollowersCount
"FollowingCount" .FollowingCount) }}
{{ else }}
-
<p class="px-6 dark:text-white">This user does not follow anyone yet.</p>
+
<div class="text-base text-gray-500 flex items-center justify-center italic p-12 border border-gray-200 dark:border-gray-700 rounded">
+
<span>This user does not follow anyone yet.</span>
+
</div>
{{ end }}
</div>
{{ end }}
+17
appview/pages/templates/user/fragments/editBio.html
···
</div>
<div class="flex flex-col gap-1">
+
<label class="m-0 p-0" for="pronouns">pronouns</label>
+
<div class="flex items-center gap-2 w-full">
+
{{ $pronouns := "" }}
+
{{ if and .Profile .Profile.Pronouns }}
+
{{ $pronouns = .Profile.Pronouns }}
+
{{ end }}
+
<input
+
type="text"
+
class="py-1 px-1 w-full"
+
name="pronouns"
+
placeholder="they/them"
+
value="{{ $pronouns }}"
+
>
+
</div>
+
</div>
+
+
<div class="flex flex-col gap-1">
<label class="m-0 p-0" for="location">location</label>
<div class="flex items-center gap-2 w-full">
{{ $location := "" }}
+1 -1
appview/pages/templates/user/fragments/followCard.html
···
<div class="flex flex-col divide-y divide-gray-200 dark:divide-gray-700 rounded-sm">
<div class="py-4 px-6 drop-shadow-sm rounded bg-white dark:bg-gray-800 flex items-center gap-4">
<div class="flex-shrink-0 max-h-full w-24 h-24">
-
<img class="object-cover rounded-full p-2" src="{{ fullAvatar $userIdent }}" />
+
<img class="object-cover rounded-full p-2" src="{{ fullAvatar $userIdent }}" alt="{{ $userIdent }}" />
</div>
<div class="flex flex-col md:flex-row md:items-center md:justify-between gap-2 w-full">
+20 -7
appview/pages/templates/user/fragments/profileCard.html
···
{{ define "user/fragments/profileCard" }}
-
{{ $userIdent := didOrHandle .UserDid .UserHandle }}
+
{{ $userIdent := resolve .UserDid }}
<div class="grid grid-cols-3 md:grid-cols-1 gap-1 items-center">
<div id="avatar" class="col-span-1 flex justify-center items-center">
<div class="w-3/4 aspect-square relative">
···
class="text-lg font-bold dark:text-white overflow-hidden text-ellipsis whitespace-nowrap">
{{ $userIdent }}
</p>
-
<a href="/{{ $userIdent }}/feed.atom">{{ i "rss" "size-4" }}</a>
+
{{ with .Profile }}
+
{{ if .Pronouns }}
+
<p class="text-gray-500 dark:text-gray-400">{{ .Pronouns }}</p>
+
{{ end }}
+
{{ end }}
</div>
<div class="md:hidden">
···
{{ end }}
</div>
{{ end }}
-
{{ if ne .FollowStatus.String "IsSelf" }}
-
{{ template "user/fragments/follow" . }}
-
{{ else }}
+
+
<div class="flex mt-2 items-center gap-2">
+
{{ if ne .FollowStatus.String "IsSelf" }}
+
{{ template "user/fragments/follow" . }}
+
{{ else }}
<button id="editBtn"
-
class="btn mt-2 w-full flex items-center gap-2 group"
+
class="btn w-full flex items-center gap-2 group"
hx-target="#profile-bio"
hx-get="/profile/edit-bio"
hx-swap="innerHTML">
···
edit
{{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }}
</button>
-
{{ end }}
+
{{ end }}
+
+
<a class="btn text-sm no-underline hover:no-underline flex items-center gap-2 group"
+
href="/{{ $userIdent }}/feed.atom">
+
{{ i "rss" "size-4" }}
+
</a>
+
</div>
+
</div>
<div id="update-profile" class="text-red-400 dark:text-red-500"></div>
</div>
+2 -1
appview/pages/templates/user/fragments/repoCard.html
···
{{ define "user/fragments/repoCard" }}
+
{{/* root, repo, fullName [,starButton [,starData]] */}}
{{ $root := index . 0 }}
{{ $repo := index . 1 }}
{{ $fullName := index . 2 }}
···
</div>
{{ if and $starButton $root.LoggedInUser }}
<div class="shrink-0">
-
{{ template "repo/fragments/repoStar" $starData }}
+
{{ template "fragments/starBtn" $starData }}
</div>
{{ end }}
</div>
+23 -2
appview/pages/templates/user/login.html
···
<title>login &middot; tangled</title>
</head>
<body class="flex items-center justify-center min-h-screen">
-
<main class="max-w-md px-6 -mt-4">
+
<main class="max-w-md px-7 mt-4">
<h1 class="flex place-content-center text-3xl font-semibold italic dark:text-white" >
{{ template "fragments/logotype" }}
</h1>
···
tightly-knit social coding.
</h2>
<form
-
class="mt-4 max-w-sm mx-auto"
+
class="mt-4"
hx-post="/login"
hx-swap="none"
hx-disabled-elt="#login-button"
···
<div class="flex flex-col">
<label for="handle">handle</label>
<input
+
autocapitalize="none"
+
autocorrect="off"
+
autocomplete="username"
type="text"
id="handle"
name="handle"
···
<span>login</span>
</button>
</form>
+
{{ if .ErrorCode }}
+
<div class="flex gap-2 my-2 bg-red-50 dark:bg-red-900 border border-red-500 rounded drop-shadow-sm px-3 py-2 text-red-500 dark:text-red-300">
+
<span class="py-1">{{ i "circle-alert" "w-4 h-4" }}</span>
+
<div>
+
<h5 class="font-medium">Login error</h5>
+
<p class="text-sm">
+
{{ if eq .ErrorCode "access_denied" }}
+
You have not authorized the app.
+
{{ else if eq .ErrorCode "session" }}
+
Server failed to create user session.
+
{{ else }}
+
Internal Server error.
+
{{ end }}
+
Please try again.
+
</p>
+
</div>
+
</div>
+
{{ end }}
<p class="text-sm text-gray-500">
Don't have an account? <a href="/signup" class="underline">Create an account</a> on Tangled now!
</p>
+22 -4
appview/pages/templates/user/overview.html
···
-
{{ define "title" }}{{ or .Card.UserHandle .Card.UserDid }}{{ end }}
+
{{ define "title" }}{{ resolve .Card.UserDid }}{{ end }}
{{ define "profileContent" }}
<div id="all-repos" class="md:col-span-4 order-2 md:order-2">
···
<p class="text-sm font-bold px-2 pb-4 dark:text-white">ACTIVITY</p>
<div class="flex flex-col gap-4 relative">
{{ if .ProfileTimeline.IsEmpty }}
-
<p class="dark:text-white">This user does not have any activity yet.</p>
+
<div class="text-base text-gray-500 flex items-center justify-center italic p-12 border border-gray-200 dark:border-gray-700 rounded">
+
<span class="flex items-center gap-2">
+
This user does not have any activity yet.
+
</span>
+
</div>
{{ end }}
{{ with .ProfileTimeline }}
···
</p>
<div class="flex flex-col gap-1">
+
{{ block "commits" .Commits }} {{ end }}
{{ block "repoEvents" .RepoEvents }} {{ end }}
{{ block "issueEvents" .IssueEvents }} {{ end }}
{{ block "pullEvents" .PullEvents }} {{ end }}
···
{{ end }}
{{ end }}
</div>
+
{{ end }}
+
+
{{ define "commits" }}
+
{{ if . }}
+
<div class="flex flex-wrap items-center gap-1">
+
{{ i "git-commit-horizontal" "size-5" }}
+
created {{ . }} commits
+
</div>
+
{{ end }}
{{ end }}
{{ define "repoEvents" }}
···
{{ define "ownRepos" }}
<div>
<div class="text-sm font-bold px-2 pb-4 dark:text-white flex items-center gap-2">
-
<a href="/@{{ or $.Card.UserHandle $.Card.UserDid }}?tab=repos"
+
<a href="/{{ resolve $.Card.UserDid }}?tab=repos"
class="flex text-black dark:text-white items-center gap-2 no-underline hover:no-underline group">
<span>PINNED REPOS</span>
</a>
···
{{ template "user/fragments/repoCard" (list $ . false) }}
</div>
{{ else }}
-
<p class="dark:text-white">This user does not have any pinned repos.</p>
+
<div class="text-base text-gray-500 flex items-center justify-center italic p-12 border border-gray-200 dark:border-gray-700 rounded">
+
<span class="flex items-center gap-2">
+
This user does not have any pinned repos.
+
</span>
+
</div>
{{ end }}
</div>
</div>
+4 -2
appview/pages/templates/user/repos.html
···
-
{{ define "title" }}{{ or .Card.UserHandle .Card.UserDid }} · repos {{ end }}
+
{{ define "title" }}{{ resolve .Card.UserDid }} · repos {{ end }}
{{ define "profileContent" }}
<div id="all-repos" class="md:col-span-8 order-2 md:order-2">
···
{{ template "user/fragments/repoCard" (list $ . false) }}
</div>
{{ else }}
-
<p class="px-6 dark:text-white">This user does not have any repos yet.</p>
+
<div class="text-base text-gray-500 flex items-center justify-center italic p-12 border border-gray-200 dark:border-gray-700 rounded">
+
<span>This user does not have any repos yet.</span>
+
</div>
{{ end }}
</div>
{{ end }}
+14
appview/pages/templates/user/settings/notifications.html
···
<div class="flex items-center justify-between p-2">
<div class="flex items-center gap-2">
<div class="flex flex-col gap-1">
+
<span class="font-bold">Mentions</span>
+
<div class="flex text-sm items-center gap-1 text-gray-500 dark:text-gray-400">
+
<span>When someone mentions you.</span>
+
</div>
+
</div>
+
</div>
+
<label class="flex items-center gap-2">
+
<input type="checkbox" name="user_mentioned" {{if .Preferences.UserMentioned}}checked{{end}}>
+
</label>
+
</div>
+
+
<div class="flex items-center justify-between p-2">
+
<div class="flex items-center gap-2">
+
<div class="flex flex-col gap-1">
<span class="font-bold">Email notifications</span>
<div class="flex text-sm items-center gap-1 text-gray-500 dark:text-gray-400">
<span>Receive notifications via email in addition to in-app notifications.</span>
+9 -6
appview/pages/templates/user/signup.html
···
page to complete your registration.
</span>
<div class="w-full mt-4 text-center">
-
<div class="cf-turnstile" data-sitekey="{{ .CloudflareSiteKey }}"></div>
+
<div class="cf-turnstile" data-sitekey="{{ .CloudflareSiteKey }}" data-size="flexible"></div>
</div>
<button class="btn text-base w-full my-2 mt-6" type="submit" id="signup-button" tabindex="7" >
<span>join now</span>
</button>
+
<p class="text-sm text-gray-500">
+
Already have an AT Protocol account? <a href="/login" class="underline">Login to Tangled</a>.
+
</p>
+
+
<p id="signup-msg" class="error w-full"></p>
+
<p class="text-sm text-gray-500 pt-4">
+
By signing up, you agree to our <a href="/terms" class="underline">Terms of Service</a> and <a href="/privacy" class="underline">Privacy Policy</a>.
+
</p>
</form>
-
<p class="text-sm text-gray-500">
-
Already have an AT Protocol account? <a href="/login" class="underline">Login to Tangled</a>.
-
</p>
-
-
<p id="signup-msg" class="error w-full"></p>
</main>
</body>
</html>
+4 -2
appview/pages/templates/user/starred.html
···
-
{{ define "title" }}{{ or .Card.UserHandle .Card.UserDid }} · repos {{ end }}
+
{{ define "title" }}{{ resolve .Card.UserDid }} · repos {{ end }}
{{ define "profileContent" }}
<div id="all-repos" class="md:col-span-8 order-2 md:order-2">
···
{{ template "user/fragments/repoCard" (list $ . true) }}
</div>
{{ else }}
-
<p class="px-6 dark:text-white">This user does not have any starred repos yet.</p>
+
<div class="text-base text-gray-500 flex items-center justify-center italic p-12 border border-gray-200 dark:border-gray-700 rounded">
+
<span>This user does not have any starred repos yet.</span>
+
</div>
{{ end }}
</div>
{{ end }}
+5 -3
appview/pages/templates/user/strings.html
···
-
{{ define "title" }}{{ or .Card.UserHandle .Card.UserDid }} · strings {{ end }}
+
{{ define "title" }}{{ resolve .Card.UserDid }} · strings {{ end }}
{{ define "profileContent" }}
<div id="all-strings" class="md:col-span-8 order-2 md:order-2">
···
{{ template "singleString" (list $ .) }}
</div>
{{ else }}
-
<p class="px-6 dark:text-white">This user does not have any strings yet.</p>
+
<div class="text-base text-gray-500 flex items-center justify-center italic p-12 border border-gray-200 dark:border-gray-700 rounded">
+
<span>This user does not have any strings yet.</span>
+
</div>
{{ end }}
</div>
{{ end }}
···
{{ $s := index . 1 }}
<div class="py-4 px-6 rounded bg-white dark:bg-gray-800">
<div class="font-medium dark:text-white flex gap-2 items-center">
-
<a href="/strings/{{ or $root.Card.UserHandle $root.Card.UserDid }}/{{ $s.Rkey }}">{{ $s.Filename }}</a>
+
<a href="/strings/{{ resolve $root.Card.UserDid }}/{{ $s.Rkey }}">{{ $s.Filename }}</a>
</div>
{{ with $s.Description }}
<div class="text-gray-600 dark:text-gray-300 text-sm">
+46
appview/pagination/page.go
···
package pagination
+
import "context"
+
type Page struct {
Offset int // where to start from
Limit int // number of items in a page
···
Offset: 0,
Limit: 30,
}
+
}
+
+
type ctxKey struct{}
+
+
func IntoContext(ctx context.Context, page Page) context.Context {
+
return context.WithValue(ctx, ctxKey{}, page)
+
}
+
+
func FromContext(ctx context.Context) Page {
+
if ctx == nil {
+
return FirstPage()
+
}
+
v := ctx.Value(ctxKey{})
+
if v == nil {
+
return FirstPage()
+
}
+
page, ok := v.(Page)
+
if !ok {
+
return FirstPage()
+
}
+
return page
}
func (p Page) Previous() Page {
···
Limit: p.Limit,
}
}
+
+
func IterateAll[T any](
+
fetch func(page Page) ([]T, error),
+
handle func(items []T) error,
+
) error {
+
page := FirstPage()
+
for {
+
items, err := fetch(page)
+
if err != nil {
+
return err
+
}
+
+
err = handle(items)
+
if err != nil {
+
return err
+
}
+
if len(items) < page.Limit {
+
break
+
}
+
page = page.Next()
+
}
+
return nil
+
}
+53 -35
appview/pipelines/pipelines.go
···
"tangled.org/core/appview/reporesolver"
"tangled.org/core/eventconsumer"
"tangled.org/core/idresolver"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
spindlemodel "tangled.org/core/spindle/models"
···
db *db.DB
enforcer *rbac.Enforcer
logger *slog.Logger
+
}
+
+
func (p *Pipelines) Router() http.Handler {
+
r := chi.NewRouter()
+
r.Get("/", p.Index)
+
r.Get("/{pipeline}/workflow/{workflow}", p.Workflow)
+
r.Get("/{pipeline}/workflow/{workflow}/logs", p.Logs)
+
+
return r
}
func New(
···
return
}
-
repoInfo := f.RepoInfo(user)
-
ps, err := db.GetPipelineStatuses(
p.db,
-
db.FilterEq("repo_owner", repoInfo.OwnerDid),
-
db.FilterEq("repo_name", repoInfo.Name),
-
db.FilterEq("knot", repoInfo.Knot),
+
30,
+
orm.FilterEq("repo_owner", f.Did),
+
orm.FilterEq("repo_name", f.Name),
+
orm.FilterEq("knot", f.Knot),
)
if err != nil {
l.Error("failed to query db", "err", err)
···
p.pages.Pipelines(w, pages.PipelinesParams{
LoggedInUser: user,
-
RepoInfo: repoInfo,
+
RepoInfo: p.repoResolver.GetRepoInfo(r, user),
Pipelines: ps,
})
}
···
return
}
-
repoInfo := f.RepoInfo(user)
-
pipelineId := chi.URLParam(r, "pipeline")
if pipelineId == "" {
l.Error("empty pipeline ID")
···
ps, err := db.GetPipelineStatuses(
p.db,
-
db.FilterEq("repo_owner", repoInfo.OwnerDid),
-
db.FilterEq("repo_name", repoInfo.Name),
-
db.FilterEq("knot", repoInfo.Knot),
-
db.FilterEq("id", pipelineId),
+
1,
+
orm.FilterEq("repo_owner", f.Did),
+
orm.FilterEq("repo_name", f.Name),
+
orm.FilterEq("knot", f.Knot),
+
orm.FilterEq("id", pipelineId),
)
if err != nil {
l.Error("failed to query db", "err", err)
···
p.pages.Workflow(w, pages.WorkflowParams{
LoggedInUser: user,
-
RepoInfo: repoInfo,
+
RepoInfo: p.repoResolver.GetRepoInfo(r, user),
Pipeline: singlePipeline,
Workflow: workflow,
})
···
ctx, cancel := context.WithCancel(r.Context())
defer cancel()
-
user := p.oauth.GetUser(r)
f, err := p.repoResolver.Resolve(r)
if err != nil {
l.Error("failed to get repo and knot", "err", err)
http.Error(w, "bad repo/knot", http.StatusBadRequest)
return
}
-
-
repoInfo := f.RepoInfo(user)
pipelineId := chi.URLParam(r, "pipeline")
workflow := chi.URLParam(r, "workflow")
···
ps, err := db.GetPipelineStatuses(
p.db,
-
db.FilterEq("repo_owner", repoInfo.OwnerDid),
-
db.FilterEq("repo_name", repoInfo.Name),
-
db.FilterEq("knot", repoInfo.Knot),
-
db.FilterEq("id", pipelineId),
+
1,
+
orm.FilterEq("repo_owner", f.Did),
+
orm.FilterEq("repo_name", f.Name),
+
orm.FilterEq("knot", f.Knot),
+
orm.FilterEq("id", pipelineId),
)
if err != nil || len(ps) != 1 {
l.Error("pipeline query failed", "err", err, "count", len(ps))
···
}
singlePipeline := ps[0]
-
spindle := repoInfo.Spindle
-
knot := repoInfo.Knot
+
spindle := f.Spindle
+
knot := f.Knot
rkey := singlePipeline.Rkey
if spindle == "" || knot == "" || rkey == "" {
···
// start a goroutine to read from spindle
go readLogs(spindleConn, evChan)
-
stepIdx := 0
+
stepStartTimes := make(map[int]time.Time)
var fragment bytes.Buffer
for {
select {
···
switch logLine.Kind {
case spindlemodel.LogKindControl:
-
// control messages create a new step block
-
stepIdx++
-
collapsed := false
-
if logLine.StepKind == spindlemodel.StepKindSystem {
-
collapsed = true
+
switch logLine.StepStatus {
+
case spindlemodel.StepStatusStart:
+
stepStartTimes[logLine.StepId] = logLine.Time
+
collapsed := false
+
if logLine.StepKind == spindlemodel.StepKindSystem {
+
collapsed = true
+
}
+
err = p.pages.LogBlock(&fragment, pages.LogBlockParams{
+
Id: logLine.StepId,
+
Name: logLine.Content,
+
Command: logLine.StepCommand,
+
Collapsed: collapsed,
+
StartTime: logLine.Time,
+
})
+
case spindlemodel.StepStatusEnd:
+
startTime := stepStartTimes[logLine.StepId]
+
endTime := logLine.Time
+
err = p.pages.LogBlockEnd(&fragment, pages.LogBlockEndParams{
+
Id: logLine.StepId,
+
StartTime: startTime,
+
EndTime: endTime,
+
})
}
-
err = p.pages.LogBlock(&fragment, pages.LogBlockParams{
-
Id: stepIdx,
-
Name: logLine.Content,
-
Command: logLine.StepCommand,
-
Collapsed: collapsed,
-
})
+
case spindlemodel.LogKindData:
// data messages simply insert new log lines into current step
err = p.pages.LogLine(&fragment, pages.LogLineParams{
-
Id: stepIdx,
+
Id: logLine.StepId,
Content: logLine.Content,
})
}
-17
appview/pipelines/router.go
···
-
package pipelines
-
-
import (
-
"net/http"
-
-
"github.com/go-chi/chi/v5"
-
"tangled.org/core/appview/middleware"
-
)
-
-
func (p *Pipelines) Router(mw *middleware.Middleware) http.Handler {
-
r := chi.NewRouter()
-
r.Get("/", p.Index)
-
r.Get("/{pipeline}/workflow/{workflow}", p.Workflow)
-
r.Get("/{pipeline}/workflow/{workflow}/logs", p.Logs)
-
-
return r
-
}
+10 -9
appview/pulls/opengraph.go
···
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/ogcard"
+
"tangled.org/core/orm"
"tangled.org/core/patchutil"
"tangled.org/core/types"
)
···
var statusColor color.RGBA
if pull.State.IsOpen() {
-
statusIcon = "static/icons/git-pull-request.svg"
+
statusIcon = "git-pull-request"
statusText = "open"
statusColor = color.RGBA{34, 139, 34, 255} // green
} else if pull.State.IsMerged() {
-
statusIcon = "static/icons/git-merge.svg"
+
statusIcon = "git-merge"
statusText = "merged"
statusColor = color.RGBA{138, 43, 226, 255} // purple
} else {
-
statusIcon = "static/icons/git-pull-request-closed.svg"
+
statusIcon = "git-pull-request-closed"
statusText = "closed"
statusColor = color.RGBA{128, 128, 128, 255} // gray
}
···
statusIconSize := 36
// Draw icon with status color
-
err = statusStatsArea.DrawSVGIcon(statusIcon, statsX, statsY+iconBaselineOffset-statusIconSize/2+5, statusIconSize, statusColor)
+
err = statusStatsArea.DrawLucideIcon(statusIcon, statsX, statsY+iconBaselineOffset-statusIconSize/2+5, statusIconSize, statusColor)
if err != nil {
log.Printf("failed to draw status icon: %v", err)
}
···
currentX := statsX + statusIconSize + 12 + statusTextWidth + 40
// Draw comment count
-
err = statusStatsArea.DrawSVGIcon("static/icons/message-square.svg", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
+
err = statusStatsArea.DrawLucideIcon("message-square", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
if err != nil {
log.Printf("failed to draw comment icon: %v", err)
}
···
currentX += commentTextWidth + 40
// Draw files changed
-
err = statusStatsArea.DrawSVGIcon("static/icons/file-diff.svg", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
+
err = statusStatsArea.DrawLucideIcon("static/icons/file-diff", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
if err != nil {
log.Printf("failed to draw file diff icon: %v", err)
}
···
dollyX := dollyBounds.Min.X + (dollyBounds.Dx() / 2) - (dollySize / 2)
dollyY := statsY + iconBaselineOffset - dollySize/2 + 25
dollyColor := color.RGBA{180, 180, 180, 255} // light gray
-
err = dollyArea.DrawSVGIcon("templates/fragments/dolly/silhouette.svg", dollyX, dollyY, dollySize, dollyColor)
+
err = dollyArea.DrawDollySilhouette(dollyX, dollyY, dollySize, dollyColor)
if err != nil {
log.Printf("dolly silhouette not available (this is ok): %v", err)
}
···
}
// Get comment count from database
-
comments, err := db.GetPullComments(s.db, db.FilterEq("pull_id", pull.ID))
+
comments, err := db.GetPullComments(s.db, orm.FilterEq("pull_id", pull.ID))
if err != nil {
log.Printf("failed to get pull comments: %v", err)
}
···
filesChanged = niceDiff.Stat.FilesChanged
}
-
card, err := s.drawPullSummaryCard(pull, &f.Repo, commentCount, diffStats, filesChanged)
+
card, err := s.drawPullSummaryCard(pull, f, commentCount, diffStats, filesChanged)
if err != nil {
log.Println("failed to draw pull summary card", err)
http.Error(w, "failed to draw pull summary card", http.StatusInternalServerError)
+189 -139
appview/pulls/pulls.go
···
package pulls
import (
+
"context"
"database/sql"
"encoding/json"
"errors"
···
"tangled.org/core/api/tangled"
"tangled.org/core/appview/config"
"tangled.org/core/appview/db"
+
pulls_indexer "tangled.org/core/appview/indexer/pulls"
+
"tangled.org/core/appview/mentions"
"tangled.org/core/appview/models"
"tangled.org/core/appview/notify"
"tangled.org/core/appview/oauth"
"tangled.org/core/appview/pages"
"tangled.org/core/appview/pages/markup"
+
"tangled.org/core/appview/pages/repoinfo"
"tangled.org/core/appview/reporesolver"
"tangled.org/core/appview/validator"
"tangled.org/core/appview/xrpcclient"
"tangled.org/core/idresolver"
+
"tangled.org/core/orm"
"tangled.org/core/patchutil"
"tangled.org/core/rbac"
"tangled.org/core/tid"
"tangled.org/core/types"
comatproto "github.com/bluesky-social/indigo/api/atproto"
+
"github.com/bluesky-social/indigo/atproto/syntax"
lexutil "github.com/bluesky-social/indigo/lex/util"
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
"github.com/go-chi/chi/v5"
···
)
type Pulls struct {
-
oauth *oauth.OAuth
-
repoResolver *reporesolver.RepoResolver
-
pages *pages.Pages
-
idResolver *idresolver.Resolver
-
db *db.DB
-
config *config.Config
-
notifier notify.Notifier
-
enforcer *rbac.Enforcer
-
logger *slog.Logger
-
validator *validator.Validator
+
oauth *oauth.OAuth
+
repoResolver *reporesolver.RepoResolver
+
pages *pages.Pages
+
idResolver *idresolver.Resolver
+
mentionsResolver *mentions.Resolver
+
db *db.DB
+
config *config.Config
+
notifier notify.Notifier
+
enforcer *rbac.Enforcer
+
logger *slog.Logger
+
validator *validator.Validator
+
indexer *pulls_indexer.Indexer
}
func New(
···
repoResolver *reporesolver.RepoResolver,
pages *pages.Pages,
resolver *idresolver.Resolver,
+
mentionsResolver *mentions.Resolver,
db *db.DB,
config *config.Config,
notifier notify.Notifier,
enforcer *rbac.Enforcer,
validator *validator.Validator,
+
indexer *pulls_indexer.Indexer,
logger *slog.Logger,
) *Pulls {
return &Pulls{
-
oauth: oauth,
-
repoResolver: repoResolver,
-
pages: pages,
-
idResolver: resolver,
-
db: db,
-
config: config,
-
notifier: notifier,
-
enforcer: enforcer,
-
logger: logger,
-
validator: validator,
+
oauth: oauth,
+
repoResolver: repoResolver,
+
pages: pages,
+
idResolver: resolver,
+
mentionsResolver: mentionsResolver,
+
db: db,
+
config: config,
+
notifier: notifier,
+
enforcer: enforcer,
+
logger: logger,
+
validator: validator,
+
indexer: indexer,
}
}
···
s.pages.PullActionsFragment(w, pages.PullActionsParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Pull: pull,
RoundNumber: roundNumber,
MergeCheck: mergeCheckResponse,
···
return
}
+
backlinks, err := db.GetBacklinks(s.db, pull.AtUri())
+
if err != nil {
+
log.Println("failed to get pull backlinks", err)
+
s.pages.Notice(w, "pull-error", "Failed to get pull. Try again later.")
+
return
+
}
+
// can be nil if this pull is not stacked
stack, _ := r.Context().Value("stack").(models.Stack)
abandonedPulls, _ := r.Context().Value("abandonedPulls").([]*models.Pull)
···
resubmitResult = s.resubmitCheck(r, f, pull, stack)
}
-
repoInfo := f.RepoInfo(user)
-
m := make(map[string]models.Pipeline)
var shas []string
···
ps, err := db.GetPipelineStatuses(
s.db,
-
db.FilterEq("repo_owner", repoInfo.OwnerDid),
-
db.FilterEq("repo_name", repoInfo.Name),
-
db.FilterEq("knot", repoInfo.Knot),
-
db.FilterIn("sha", shas),
+
len(shas),
+
orm.FilterEq("repo_owner", f.Did),
+
orm.FilterEq("repo_name", f.Name),
+
orm.FilterEq("knot", f.Knot),
+
orm.FilterIn("sha", shas),
)
if err != nil {
log.Printf("failed to fetch pipeline statuses: %s", err)
···
m[p.Sha] = p
}
-
reactionMap, err := db.GetReactionMap(s.db, 20, pull.PullAt())
+
reactionMap, err := db.GetReactionMap(s.db, 20, pull.AtUri())
if err != nil {
log.Println("failed to get pull reactions")
s.pages.Notice(w, "pulls", "Failed to load pull. Try again later.")
···
userReactions := map[models.ReactionKind]bool{}
if user != nil {
-
userReactions = db.GetReactionStatusMap(s.db, user.Did, pull.PullAt())
+
userReactions = db.GetReactionStatusMap(s.db, user.Did, pull.AtUri())
}
labelDefs, err := db.GetLabelDefinitions(
s.db,
-
db.FilterIn("at_uri", f.Repo.Labels),
-
db.FilterContains("scope", tangled.RepoPullNSID),
+
orm.FilterIn("at_uri", f.Labels),
+
orm.FilterContains("scope", tangled.RepoPullNSID),
)
if err != nil {
log.Println("failed to fetch labels", err)
···
s.pages.RepoSinglePull(w, pages.RepoSinglePullParams{
LoggedInUser: user,
-
RepoInfo: repoInfo,
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Pull: pull,
Stack: stack,
AbandonedPulls: abandonedPulls,
+
Backlinks: backlinks,
BranchDeleteStatus: branchDeleteStatus,
MergeCheck: mergeCheckResponse,
ResubmitCheck: resubmitResult,
···
})
}
-
func (s *Pulls) mergeCheck(r *http.Request, f *reporesolver.ResolvedRepo, pull *models.Pull, stack models.Stack) types.MergeCheckResponse {
+
func (s *Pulls) mergeCheck(r *http.Request, f *models.Repo, pull *models.Pull, stack models.Stack) types.MergeCheckResponse {
if pull.State == models.PullMerged {
return types.MergeCheckResponse{}
}
···
r.Context(),
&xrpcc,
&tangled.RepoMergeCheck_Input{
-
Did: f.OwnerDid(),
+
Did: f.Did,
Name: f.Name,
Branch: pull.TargetBranch,
Patch: patch,
···
return result
}
-
func (s *Pulls) branchDeleteStatus(r *http.Request, f *reporesolver.ResolvedRepo, pull *models.Pull) *models.BranchDeleteStatus {
+
func (s *Pulls) branchDeleteStatus(r *http.Request, repo *models.Repo, pull *models.Pull) *models.BranchDeleteStatus {
if pull.State != models.PullMerged {
return nil
}
···
}
var branch string
-
var repo *models.Repo
// check if the branch exists
// NOTE: appview could cache branches/tags etc. for every repo by listening for gitRefUpdates
if pull.IsBranchBased() {
branch = pull.PullSource.Branch
-
repo = &f.Repo
} else if pull.IsForkBased() {
branch = pull.PullSource.Branch
repo = pull.PullSource.Repo
···
}
}
-
func (s *Pulls) resubmitCheck(r *http.Request, f *reporesolver.ResolvedRepo, pull *models.Pull, stack models.Stack) pages.ResubmitResult {
+
func (s *Pulls) resubmitCheck(r *http.Request, repo *models.Repo, pull *models.Pull, stack models.Stack) pages.ResubmitResult {
if pull.State == models.PullMerged || pull.State == models.PullDeleted || pull.PullSource == nil {
return pages.Unknown
}
···
repoName = sourceRepo.Name
} else {
// pulls within the same repo
-
knot = f.Knot
-
ownerDid = f.OwnerDid()
-
repoName = f.Name
+
knot = repo.Knot
+
ownerDid = repo.Did
+
repoName = repo.Name
}
scheme := "http"
···
Host: host,
}
-
repo := fmt.Sprintf("%s/%s", ownerDid, repoName)
-
branchResp, err := tangled.RepoBranch(r.Context(), xrpcc, pull.PullSource.Branch, repo)
+
didSlashName := fmt.Sprintf("%s/%s", ownerDid, repoName)
+
branchResp, err := tangled.RepoBranch(r.Context(), xrpcc, pull.PullSource.Branch, didSlashName)
if err != nil {
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
log.Println("failed to call XRPC repo.branches", xrpcerr)
···
func (s *Pulls) RepoPullPatch(w http.ResponseWriter, r *http.Request) {
user := s.oauth.GetUser(r)
-
f, err := s.repoResolver.Resolve(r)
-
if err != nil {
-
log.Println("failed to get repo and knot", err)
-
return
-
}
var diffOpts types.DiffOpts
if d := r.URL.Query().Get("diff"); d == "split" {
···
s.pages.RepoPullPatchPage(w, pages.RepoPullPatchParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Pull: pull,
Stack: stack,
Round: roundIdInt,
···
func (s *Pulls) RepoPullInterdiff(w http.ResponseWriter, r *http.Request) {
user := s.oauth.GetUser(r)
-
f, err := s.repoResolver.Resolve(r)
-
if err != nil {
-
log.Println("failed to get repo and knot", err)
-
return
-
}
-
var diffOpts types.DiffOpts
if d := r.URL.Query().Get("diff"); d == "split" {
diffOpts.Split = true
···
s.pages.RepoPullInterdiffPage(w, pages.RepoPullInterdiffParams{
LoggedInUser: s.oauth.GetUser(r),
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Pull: pull,
Round: roundIdInt,
Interdiff: interdiff,
···
}
func (s *Pulls) RepoPulls(w http.ResponseWriter, r *http.Request) {
+
l := s.logger.With("handler", "RepoPulls")
+
user := s.oauth.GetUser(r)
params := r.URL.Query()
···
return
}
+
keyword := params.Get("q")
+
+
var ids []int64
+
searchOpts := models.PullSearchOptions{
+
Keyword: keyword,
+
RepoAt: f.RepoAt().String(),
+
State: state,
+
// Page: page,
+
}
+
l.Debug("searching with", "searchOpts", searchOpts)
+
if keyword != "" {
+
res, err := s.indexer.Search(r.Context(), searchOpts)
+
if err != nil {
+
l.Error("failed to search for pulls", "err", err)
+
return
+
}
+
ids = res.Hits
+
l.Debug("searched pulls with indexer", "count", len(ids))
+
} else {
+
ids, err = db.GetPullIDs(s.db, searchOpts)
+
if err != nil {
+
l.Error("failed to get all pull ids", "err", err)
+
return
+
}
+
l.Debug("indexed all pulls from the db", "count", len(ids))
+
}
+
pulls, err := db.GetPulls(
s.db,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterEq("state", state),
+
orm.FilterIn("id", ids),
)
if err != nil {
log.Println("failed to get pulls", err)
···
}
pulls = pulls[:n]
-
repoInfo := f.RepoInfo(user)
ps, err := db.GetPipelineStatuses(
s.db,
-
db.FilterEq("repo_owner", repoInfo.OwnerDid),
-
db.FilterEq("repo_name", repoInfo.Name),
-
db.FilterEq("knot", repoInfo.Knot),
-
db.FilterIn("sha", shas),
+
len(shas),
+
orm.FilterEq("repo_owner", f.Did),
+
orm.FilterEq("repo_name", f.Name),
+
orm.FilterEq("knot", f.Knot),
+
orm.FilterIn("sha", shas),
)
if err != nil {
log.Printf("failed to fetch pipeline statuses: %s", err)
···
labelDefs, err := db.GetLabelDefinitions(
s.db,
-
db.FilterIn("at_uri", f.Repo.Labels),
-
db.FilterContains("scope", tangled.RepoPullNSID),
+
orm.FilterIn("at_uri", f.Labels),
+
orm.FilterContains("scope", tangled.RepoPullNSID),
)
if err != nil {
log.Println("failed to fetch labels", err)
···
s.pages.RepoPulls(w, pages.RepoPullsParams{
LoggedInUser: s.oauth.GetUser(r),
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Pulls: pulls,
LabelDefs: defs,
FilteringBy: state,
+
FilterQuery: keyword,
Stacks: stacks,
Pipelines: m,
})
···
case http.MethodGet:
s.pages.PullNewCommentFragment(w, pages.PullNewCommentParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Pull: pull,
RoundNumber: roundNumber,
})
···
return
}
+
mentions, references := s.mentionsResolver.Resolve(r.Context(), body)
+
// Start a transaction
tx, err := s.db.BeginTx(r.Context(), nil)
if err != nil {
···
Rkey: tid.TID(),
Record: &lexutil.LexiconTypeDecoder{
Val: &tangled.RepoPullComment{
-
Pull: pull.PullAt().String(),
+
Pull: pull.AtUri().String(),
Body: body,
CreatedAt: createdAt,
},
···
Body: body,
CommentAt: atResp.Uri,
SubmissionId: pull.Submissions[roundNumber].ID,
+
Mentions: mentions,
+
References: references,
}
// Create the pull comment in the database with the commentAt field
···
return
}
-
s.notifier.NewPullComment(r.Context(), comment)
+
s.notifier.NewPullComment(r.Context(), comment, mentions)
-
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d#comment-%d", f.OwnerSlashRepo(), pull.PullId, commentId))
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d#comment-%d", ownerSlashRepo, pull.PullId, commentId))
return
}
}
···
Host: host,
}
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
xrpcBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
if err != nil {
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
···
s.pages.RepoNewPull(w, pages.RepoNewPullParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Branches: result.Branches,
Strategy: strategy,
SourceBranch: sourceBranch,
···
}
// Determine PR type based on input parameters
-
isPushAllowed := f.RepoInfo(user).Roles.IsPushAllowed()
+
roles := repoinfo.RolesInRepo{Roles: s.enforcer.GetPermissionsInRepo(user.Did, f.Knot, f.DidSlashRepo())}
+
isPushAllowed := roles.IsPushAllowed()
isBranchBased := isPushAllowed && sourceBranch != "" && fromFork == ""
isForkBased := fromFork != "" && sourceBranch != ""
isPatchBased := patch != "" && !isBranchBased && !isForkBased
···
func (s *Pulls) handleBranchBasedPull(
w http.ResponseWriter,
r *http.Request,
-
f *reporesolver.ResolvedRepo,
+
repo *models.Repo,
user *oauth.User,
title,
body,
···
if !s.config.Core.Dev {
scheme = "https"
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
host := fmt.Sprintf("%s://%s", scheme, repo.Knot)
xrpcc := &indigoxrpc.Client{
Host: host,
}
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
xrpcBytes, err := tangled.RepoCompare(r.Context(), xrpcc, repo, targetBranch, sourceBranch)
+
didSlashRepo := fmt.Sprintf("%s/%s", repo.Did, repo.Name)
+
xrpcBytes, err := tangled.RepoCompare(r.Context(), xrpcc, didSlashRepo, targetBranch, sourceBranch)
if err != nil {
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
log.Println("failed to call XRPC repo.compare", xrpcerr)
···
Sha: comparison.Rev2,
}
-
s.createPullRequest(w, r, f, user, title, body, targetBranch, patch, combined, sourceRev, pullSource, recordPullSource, isStacked)
+
s.createPullRequest(w, r, repo, user, title, body, targetBranch, patch, combined, sourceRev, pullSource, recordPullSource, isStacked)
}
-
func (s *Pulls) handlePatchBasedPull(w http.ResponseWriter, r *http.Request, f *reporesolver.ResolvedRepo, user *oauth.User, title, body, targetBranch, patch string, isStacked bool) {
+
func (s *Pulls) handlePatchBasedPull(w http.ResponseWriter, r *http.Request, repo *models.Repo, user *oauth.User, title, body, targetBranch, patch string, isStacked bool) {
if err := s.validator.ValidatePatch(&patch); err != nil {
s.logger.Error("patch validation failed", "err", err)
s.pages.Notice(w, "pull", "Invalid patch format. Please provide a valid diff.")
return
}
-
s.createPullRequest(w, r, f, user, title, body, targetBranch, patch, "", "", nil, nil, isStacked)
+
s.createPullRequest(w, r, repo, user, title, body, targetBranch, patch, "", "", nil, nil, isStacked)
}
-
func (s *Pulls) handleForkBasedPull(w http.ResponseWriter, r *http.Request, f *reporesolver.ResolvedRepo, user *oauth.User, forkRepo string, title, body, targetBranch, sourceBranch string, isStacked bool) {
+
func (s *Pulls) handleForkBasedPull(w http.ResponseWriter, r *http.Request, repo *models.Repo, user *oauth.User, forkRepo string, title, body, targetBranch, sourceBranch string, isStacked bool) {
repoString := strings.SplitN(forkRepo, "/", 2)
forkOwnerDid := repoString[0]
repoName := repoString[1]
···
Sha: sourceRev,
-
s.createPullRequest(w, r, f, user, title, body, targetBranch, patch, combined, sourceRev, pullSource, recordPullSource, isStacked)
+
s.createPullRequest(w, r, repo, user, title, body, targetBranch, patch, combined, sourceRev, pullSource, recordPullSource, isStacked)
func (s *Pulls) createPullRequest(
w http.ResponseWriter,
r *http.Request,
-
f *reporesolver.ResolvedRepo,
+
repo *models.Repo,
user *oauth.User,
title, body, targetBranch string,
patch string,
···
s.createStackedPullRequest(
w,
r,
-
f,
+
repo,
user,
targetBranch,
patch,
···
+
mentions, references := s.mentionsResolver.Resolve(r.Context(), body)
+
rkey := tid.TID()
initialSubmission := models.PullSubmission{
Patch: patch,
···
Body: body,
TargetBranch: targetBranch,
OwnerDid: user.Did,
-
RepoAt: f.RepoAt(),
+
RepoAt: repo.RepoAt(),
Rkey: rkey,
+
Mentions: mentions,
+
References: references,
Submissions: []*models.PullSubmission{
&initialSubmission,
},
···
s.pages.Notice(w, "pull", "Failed to create pull request. Try again later.")
return
-
pullId, err := db.NextPullId(tx, f.RepoAt())
+
pullId, err := db.NextPullId(tx, repo.RepoAt())
if err != nil {
log.Println("failed to get pull id", err)
s.pages.Notice(w, "pull", "Failed to create pull request. Try again later.")
···
Val: &tangled.RepoPull{
Title: title,
Target: &tangled.RepoPull_Target{
-
Repo: string(f.RepoAt()),
+
Repo: string(repo.RepoAt()),
Branch: targetBranch,
},
Patch: patch,
···
s.notifier.NewPull(r.Context(), pull)
-
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", f.OwnerSlashRepo(), pullId))
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, repo)
+
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", ownerSlashRepo, pullId))
func (s *Pulls) createStackedPullRequest(
w http.ResponseWriter,
r *http.Request,
-
f *reporesolver.ResolvedRepo,
+
repo *models.Repo,
user *oauth.User,
targetBranch string,
patch string,
···
// build a stack out of this patch
stackId := uuid.New()
-
stack, err := newStack(f, user, targetBranch, patch, pullSource, stackId.String())
+
stack, err := s.newStack(r.Context(), repo, user, targetBranch, patch, pullSource, stackId.String())
if err != nil {
log.Println("failed to create stack", err)
s.pages.Notice(w, "pull", fmt.Sprintf("Failed to create stack: %v", err))
···
return
-
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls", f.OwnerSlashRepo()))
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, repo)
+
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls", ownerSlashRepo))
func (s *Pulls) ValidatePatch(w http.ResponseWriter, r *http.Request) {
···
func (s *Pulls) PatchUploadFragment(w http.ResponseWriter, r *http.Request) {
user := s.oauth.GetUser(r)
-
f, err := s.repoResolver.Resolve(r)
-
if err != nil {
-
log.Println("failed to get repo and knot", err)
-
return
-
}
s.pages.PullPatchUploadFragment(w, pages.PullPatchUploadParams{
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
})
···
Host: host,
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
xrpcBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
if err != nil {
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
···
s.pages.PullCompareBranchesFragment(w, pages.PullCompareBranchesParams{
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Branches: withoutDefault,
})
func (s *Pulls) CompareForksFragment(w http.ResponseWriter, r *http.Request) {
user := s.oauth.GetUser(r)
-
f, err := s.repoResolver.Resolve(r)
-
if err != nil {
-
log.Println("failed to get repo and knot", err)
-
return
-
}
forks, err := db.GetForksByDid(s.db, user.Did)
if err != nil {
···
s.pages.PullCompareForkFragment(w, pages.PullCompareForkParams{
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Forks: forks,
Selected: r.URL.Query().Get("fork"),
})
···
// fork repo
repo, err := db.GetRepo(
s.db,
-
db.FilterEq("did", forkOwnerDid),
-
db.FilterEq("name", forkName),
+
orm.FilterEq("did", forkOwnerDid),
+
orm.FilterEq("name", forkName),
if err != nil {
log.Println("failed to get repo", "did", forkOwnerDid, "name", forkName, "err", err)
···
Host: targetHost,
-
targetRepo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
+
targetRepo := fmt.Sprintf("%s/%s", f.Did, f.Name)
targetXrpcBytes, err := tangled.RepoBranches(r.Context(), targetXrpcc, "", 0, targetRepo)
if err != nil {
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
···
})
s.pages.PullCompareForkBranchesFragment(w, pages.PullCompareForkBranchesParams{
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
SourceBranches: sourceBranches.Branches,
TargetBranches: targetBranches.Branches,
})
···
func (s *Pulls) ResubmitPull(w http.ResponseWriter, r *http.Request) {
user := s.oauth.GetUser(r)
-
f, err := s.repoResolver.Resolve(r)
-
if err != nil {
-
log.Println("failed to get repo and knot", err)
-
return
-
}
pull, ok := r.Context().Value("pull").(*models.Pull)
if !ok {
···
switch r.Method {
case http.MethodGet:
s.pages.PullResubmitFragment(w, pages.PullResubmitParams{
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: s.repoResolver.GetRepoInfo(r, user),
Pull: pull,
})
return
···
return
-
if !f.RepoInfo(user).Roles.IsPushAllowed() {
+
roles := repoinfo.RolesInRepo{Roles: s.enforcer.GetPermissionsInRepo(user.Did, f.Knot, f.DidSlashRepo())}
+
if !roles.IsPushAllowed() {
log.Println("unauthorized user")
w.WriteHeader(http.StatusUnauthorized)
return
···
Host: host,
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
xrpcBytes, err := tangled.RepoCompare(r.Context(), xrpcc, repo, pull.TargetBranch, pull.PullSource.Branch)
if err != nil {
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
···
func (s *Pulls) resubmitPullHelper(
w http.ResponseWriter,
r *http.Request,
-
f *reporesolver.ResolvedRepo,
+
repo *models.Repo,
user *oauth.User,
pull *models.Pull,
patch string,
···
) {
if pull.IsStacked() {
log.Println("resubmitting stacked PR")
-
s.resubmitStackedPullHelper(w, r, f, user, pull, patch, pull.StackId)
+
s.resubmitStackedPullHelper(w, r, repo, user, pull, patch, pull.StackId)
return
···
defer tx.Rollback()
-
pullAt := pull.PullAt()
+
pullAt := pull.AtUri()
newRoundNumber := len(pull.Submissions)
newPatch := patch
newSourceRev := sourceRev
···
Val: &tangled.RepoPull{
Title: pull.Title,
Target: &tangled.RepoPull_Target{
-
Repo: string(f.RepoAt()),
+
Repo: string(repo.RepoAt()),
Branch: pull.TargetBranch,
},
Patch: patch, // new patch
···
return
-
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", f.OwnerSlashRepo(), pull.PullId))
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, repo)
+
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", ownerSlashRepo, pull.PullId))
func (s *Pulls) resubmitStackedPullHelper(
w http.ResponseWriter,
r *http.Request,
-
f *reporesolver.ResolvedRepo,
+
repo *models.Repo,
user *oauth.User,
pull *models.Pull,
patch string,
···
targetBranch := pull.TargetBranch
origStack, _ := r.Context().Value("stack").(models.Stack)
-
newStack, err := newStack(f, user, targetBranch, patch, pull.PullSource, stackId)
+
newStack, err := s.newStack(r.Context(), repo, user, targetBranch, patch, pull.PullSource, stackId)
if err != nil {
log.Println("failed to create resubmitted stack", err)
s.pages.Notice(w, "pull-merge-error", "Failed to merge pull request. Try again later.")
···
// resubmit the new pull
-
pullAt := op.PullAt()
+
pullAt := op.AtUri()
newRoundNumber := len(op.Submissions)
newPatch := np.LatestPatch()
combinedPatch := np.LatestSubmission().Combined
···
tx,
p.ParentChangeId,
// these should be enough filters to be unique per-stack
-
db.FilterEq("repo_at", p.RepoAt.String()),
-
db.FilterEq("owner_did", p.OwnerDid),
-
db.FilterEq("change_id", p.ChangeId),
+
orm.FilterEq("repo_at", p.RepoAt.String()),
+
orm.FilterEq("owner_did", p.OwnerDid),
+
orm.FilterEq("change_id", p.ChangeId),
if err != nil {
···
return
-
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", f.OwnerSlashRepo(), pull.PullId))
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, repo)
+
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", ownerSlashRepo, pull.PullId))
func (s *Pulls) MergePull(w http.ResponseWriter, r *http.Request) {
+
user := s.oauth.GetUser(r)
f, err := s.repoResolver.Resolve(r)
if err != nil {
log.Println("failed to resolve repo:", err)
···
authorName := ident.Handle.String()
mergeInput := &tangled.RepoMerge_Input{
-
Did: f.OwnerDid(),
+
Did: f.Did,
Name: f.Name,
Branch: pull.TargetBranch,
Patch: patch,
···
s.pages.Notice(w, "pull-merge-error", "Failed to merge pull request. Try again later.")
return
+
p.State = models.PullMerged
err = tx.Commit()
···
// notify about the pull merge
for _, p := range pullsToMerge {
-
s.notifier.NewPullMerged(r.Context(), p)
+
s.notifier.NewPullState(r.Context(), syntax.DID(user.Did), p)
-
s.pages.HxLocation(w, fmt.Sprintf("/@%s/%s/pulls/%d", f.OwnerHandle(), f.Name, pull.PullId))
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", ownerSlashRepo, pull.PullId))
func (s *Pulls) ClosePull(w http.ResponseWriter, r *http.Request) {
···
// auth filter: only owner or collaborators can close
-
roles := f.RolesInRepo(user)
+
roles := repoinfo.RolesInRepo{Roles: s.enforcer.GetPermissionsInRepo(user.Did, f.Knot, f.DidSlashRepo())}
isOwner := roles.IsOwner()
isCollaborator := roles.IsCollaborator()
isPullAuthor := user.Did == pull.OwnerDid
···
s.pages.Notice(w, "pull-close", "Failed to close pull.")
return
+
p.State = models.PullClosed
// Commit the transaction
···
for _, p := range pullsToClose {
-
s.notifier.NewPullClosed(r.Context(), p)
+
s.notifier.NewPullState(r.Context(), syntax.DID(user.Did), p)
-
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", f.OwnerSlashRepo(), pull.PullId))
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", ownerSlashRepo, pull.PullId))
func (s *Pulls) ReopenPull(w http.ResponseWriter, r *http.Request) {
···
// auth filter: only owner or collaborators can close
-
roles := f.RolesInRepo(user)
+
roles := repoinfo.RolesInRepo{Roles: s.enforcer.GetPermissionsInRepo(user.Did, f.Knot, f.DidSlashRepo())}
isOwner := roles.IsOwner()
isCollaborator := roles.IsCollaborator()
isPullAuthor := user.Did == pull.OwnerDid
···
s.pages.Notice(w, "pull-close", "Failed to close pull.")
return
+
p.State = models.PullOpen
// Commit the transaction
···
return
-
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", f.OwnerSlashRepo(), pull.PullId))
+
for _, p := range pullsToReopen {
+
s.notifier.NewPullState(r.Context(), syntax.DID(user.Did), p)
+
}
+
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
s.pages.HxLocation(w, fmt.Sprintf("/%s/pulls/%d", ownerSlashRepo, pull.PullId))
-
func newStack(f *reporesolver.ResolvedRepo, user *oauth.User, targetBranch, patch string, pullSource *models.PullSource, stackId string) (models.Stack, error) {
+
func (s *Pulls) newStack(ctx context.Context, repo *models.Repo, user *oauth.User, targetBranch, patch string, pullSource *models.PullSource, stackId string) (models.Stack, error) {
formatPatches, err := patchutil.ExtractPatches(patch)
if err != nil {
return nil, fmt.Errorf("Failed to extract patches: %v", err)
···
body := fp.Body
rkey := tid.TID()
+
mentions, references := s.mentionsResolver.Resolve(ctx, body)
+
initialSubmission := models.PullSubmission{
Patch: fp.Raw,
SourceRev: fp.SHA,
···
Body: body,
TargetBranch: targetBranch,
OwnerDid: user.Did,
-
RepoAt: f.RepoAt(),
+
RepoAt: repo.RepoAt(),
Rkey: rkey,
+
Mentions: mentions,
+
References: references,
Submissions: []*models.PullSubmission{
&initialSubmission,
},
+49
appview/repo/archive.go
···
+
package repo
+
+
import (
+
"fmt"
+
"net/http"
+
"net/url"
+
"strings"
+
+
"tangled.org/core/api/tangled"
+
xrpcclient "tangled.org/core/appview/xrpcclient"
+
+
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
+
"github.com/go-chi/chi/v5"
+
"github.com/go-git/go-git/v5/plumbing"
+
)
+
+
func (rp *Repo) DownloadArchive(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "DownloadArchive")
+
ref := chi.URLParam(r, "ref")
+
ref, _ = url.PathUnescape(ref)
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
return
+
}
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
didSlashRepo := f.DidSlashRepo()
+
archiveBytes, err := tangled.RepoArchive(r.Context(), xrpcc, "tar.gz", "", ref, didSlashRepo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.archive", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
// Set headers for file download, just pass along whatever the knot specifies
+
safeRefFilename := strings.ReplaceAll(plumbing.ReferenceName(ref).Short(), "/", "-")
+
filename := fmt.Sprintf("%s-%s.tar.gz", f.Name, safeRefFilename)
+
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
+
w.Header().Set("Content-Type", "application/gzip")
+
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(archiveBytes)))
+
// Write the archive data directly
+
w.Write(archiveBytes)
+
}
+21 -14
appview/repo/artifact.go
···
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/pages"
-
"tangled.org/core/appview/reporesolver"
"tangled.org/core/appview/xrpcclient"
+
"tangled.org/core/orm"
"tangled.org/core/tid"
"tangled.org/core/types"
···
rp.pages.RepoArtifactFragment(w, pages.RepoArtifactParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Artifact: artifact,
})
}
···
artifacts, err := db.GetArtifact(
rp.db,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterEq("tag", tag.Tag.Hash[:]),
-
db.FilterEq("name", filename),
+
orm.FilterEq("repo_at", f.RepoAt()),
+
orm.FilterEq("tag", tag.Tag.Hash[:]),
+
orm.FilterEq("name", filename),
)
if err != nil {
log.Println("failed to get artifacts", err)
···
artifact := artifacts[0]
-
ownerPds := f.OwnerId.PDSEndpoint()
+
ownerId, err := rp.idResolver.ResolveIdent(r.Context(), f.Did)
+
if err != nil {
+
log.Println("failed to resolve repo owner did", f.Did, err)
+
http.Error(w, "repository owner not found", http.StatusNotFound)
+
return
+
}
+
+
ownerPds := ownerId.PDSEndpoint()
url, _ := url.Parse(fmt.Sprintf("%s/xrpc/com.atproto.sync.getBlob", ownerPds))
q := url.Query()
q.Set("cid", artifact.BlobCid.String())
···
artifacts, err := db.GetArtifact(
rp.db,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterEq("tag", tag[:]),
-
db.FilterEq("name", filename),
+
orm.FilterEq("repo_at", f.RepoAt()),
+
orm.FilterEq("tag", tag[:]),
+
orm.FilterEq("name", filename),
)
if err != nil {
log.Println("failed to get artifacts", err)
···
defer tx.Rollback()
err = db.DeleteArtifact(tx,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterEq("tag", artifact.Tag[:]),
-
db.FilterEq("name", filename),
+
orm.FilterEq("repo_at", f.RepoAt()),
+
orm.FilterEq("tag", artifact.Tag[:]),
+
orm.FilterEq("name", filename),
)
if err != nil {
log.Println("failed to remove artifact record from db", err)
···
w.Write([]byte{})
}
-
func (rp *Repo) resolveTag(ctx context.Context, f *reporesolver.ResolvedRepo, tagParam string) (*types.TagReference, error) {
+
func (rp *Repo) resolveTag(ctx context.Context, f *models.Repo, tagParam string) (*types.TagReference, error) {
tagParam, err := url.QueryUnescape(tagParam)
if err != nil {
return nil, err
···
Host: host,
}
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
xrpcBytes, err := tangled.RepoTags(ctx, xrpcc, "", 0, repo)
if err != nil {
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+293
appview/repo/blob.go
···
+
package repo
+
+
import (
+
"encoding/base64"
+
"fmt"
+
"io"
+
"net/http"
+
"net/url"
+
"path/filepath"
+
"slices"
+
"strings"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/appview/config"
+
"tangled.org/core/appview/models"
+
"tangled.org/core/appview/pages"
+
"tangled.org/core/appview/pages/markup"
+
"tangled.org/core/appview/reporesolver"
+
xrpcclient "tangled.org/core/appview/xrpcclient"
+
+
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
+
"github.com/go-chi/chi/v5"
+
)
+
+
// the content can be one of the following:
+
//
+
// - code : text | | raw
+
// - markup : text | rendered | raw
+
// - svg : text | rendered | raw
+
// - png : | rendered | raw
+
// - video : | rendered | raw
+
// - submodule : | rendered |
+
// - rest : | |
+
func (rp *Repo) Blob(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "RepoBlob")
+
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
return
+
}
+
+
ref := chi.URLParam(r, "ref")
+
ref, _ = url.PathUnescape(ref)
+
+
filePath := chi.URLParam(r, "*")
+
filePath, _ = url.PathUnescape(filePath)
+
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
+
resp, err := tangled.RepoBlob(r.Context(), xrpcc, filePath, false, ref, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.blob", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
+
// Use XRPC response directly instead of converting to internal types
+
var breadcrumbs [][]string
+
breadcrumbs = append(breadcrumbs, []string{f.Name, fmt.Sprintf("/%s/tree/%s", ownerSlashRepo, url.PathEscape(ref))})
+
if filePath != "" {
+
for idx, elem := range strings.Split(filePath, "/") {
+
breadcrumbs = append(breadcrumbs, []string{elem, fmt.Sprintf("%s/%s", breadcrumbs[idx][1], url.PathEscape(elem))})
+
}
+
}
+
+
// Create the blob view
+
blobView := NewBlobView(resp, rp.config, f, ref, filePath, r.URL.Query())
+
+
user := rp.oauth.GetUser(r)
+
+
rp.pages.RepoBlob(w, pages.RepoBlobParams{
+
LoggedInUser: user,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
BreadCrumbs: breadcrumbs,
+
BlobView: blobView,
+
RepoBlob_Output: resp,
+
})
+
}
+
+
func (rp *Repo) RepoBlobRaw(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "RepoBlobRaw")
+
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
w.WriteHeader(http.StatusBadRequest)
+
return
+
}
+
+
ref := chi.URLParam(r, "ref")
+
ref, _ = url.PathUnescape(ref)
+
+
filePath := chi.URLParam(r, "*")
+
filePath, _ = url.PathUnescape(filePath)
+
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
repo := f.DidSlashRepo()
+
baseURL := &url.URL{
+
Scheme: scheme,
+
Host: f.Knot,
+
Path: "/xrpc/sh.tangled.repo.blob",
+
}
+
query := baseURL.Query()
+
query.Set("repo", repo)
+
query.Set("ref", ref)
+
query.Set("path", filePath)
+
query.Set("raw", "true")
+
baseURL.RawQuery = query.Encode()
+
blobURL := baseURL.String()
+
req, err := http.NewRequest("GET", blobURL, nil)
+
if err != nil {
+
l.Error("failed to create request", "err", err)
+
return
+
}
+
+
// forward the If-None-Match header
+
if clientETag := r.Header.Get("If-None-Match"); clientETag != "" {
+
req.Header.Set("If-None-Match", clientETag)
+
}
+
client := &http.Client{}
+
+
resp, err := client.Do(req)
+
if err != nil {
+
l.Error("failed to reach knotserver", "err", err)
+
rp.pages.Error503(w)
+
return
+
}
+
+
defer resp.Body.Close()
+
+
// forward 304 not modified
+
if resp.StatusCode == http.StatusNotModified {
+
w.WriteHeader(http.StatusNotModified)
+
return
+
}
+
+
if resp.StatusCode != http.StatusOK {
+
l.Error("knotserver returned non-OK status for raw blob", "url", blobURL, "statuscode", resp.StatusCode)
+
w.WriteHeader(resp.StatusCode)
+
_, _ = io.Copy(w, resp.Body)
+
return
+
}
+
+
contentType := resp.Header.Get("Content-Type")
+
body, err := io.ReadAll(resp.Body)
+
if err != nil {
+
l.Error("error reading response body from knotserver", "err", err)
+
w.WriteHeader(http.StatusInternalServerError)
+
return
+
}
+
+
if strings.HasPrefix(contentType, "text/") || isTextualMimeType(contentType) {
+
// serve all textual content as text/plain
+
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
+
w.Write(body)
+
} else if strings.HasPrefix(contentType, "image/") || strings.HasPrefix(contentType, "video/") {
+
// serve images and videos with their original content type
+
w.Header().Set("Content-Type", contentType)
+
w.Write(body)
+
} else {
+
w.WriteHeader(http.StatusUnsupportedMediaType)
+
w.Write([]byte("unsupported content type"))
+
return
+
}
+
}
+
+
// NewBlobView creates a BlobView from the XRPC response
+
func NewBlobView(resp *tangled.RepoBlob_Output, config *config.Config, repo *models.Repo, ref, filePath string, queryParams url.Values) models.BlobView {
+
view := models.BlobView{
+
Contents: "",
+
Lines: 0,
+
}
+
+
// Set size
+
if resp.Size != nil {
+
view.SizeHint = uint64(*resp.Size)
+
} else if resp.Content != nil {
+
view.SizeHint = uint64(len(*resp.Content))
+
}
+
+
if resp.Submodule != nil {
+
view.ContentType = models.BlobContentTypeSubmodule
+
view.HasRenderedView = true
+
view.ContentSrc = resp.Submodule.Url
+
return view
+
}
+
+
// Determine if binary
+
if resp.IsBinary != nil && *resp.IsBinary {
+
view.ContentSrc = generateBlobURL(config, repo, ref, filePath)
+
ext := strings.ToLower(filepath.Ext(resp.Path))
+
+
switch ext {
+
case ".jpg", ".jpeg", ".png", ".gif", ".webp":
+
view.ContentType = models.BlobContentTypeImage
+
view.HasRawView = true
+
view.HasRenderedView = true
+
view.ShowingRendered = true
+
+
case ".svg":
+
view.ContentType = models.BlobContentTypeSvg
+
view.HasRawView = true
+
view.HasTextView = true
+
view.HasRenderedView = true
+
view.ShowingRendered = queryParams.Get("code") != "true"
+
if resp.Content != nil {
+
bytes, _ := base64.StdEncoding.DecodeString(*resp.Content)
+
view.Contents = string(bytes)
+
view.Lines = strings.Count(view.Contents, "\n") + 1
+
}
+
+
case ".mp4", ".webm", ".ogg", ".mov", ".avi":
+
view.ContentType = models.BlobContentTypeVideo
+
view.HasRawView = true
+
view.HasRenderedView = true
+
view.ShowingRendered = true
+
}
+
+
return view
+
}
+
+
// otherwise, we are dealing with text content
+
view.HasRawView = true
+
view.HasTextView = true
+
+
if resp.Content != nil {
+
view.Contents = *resp.Content
+
view.Lines = strings.Count(view.Contents, "\n") + 1
+
}
+
+
// with text, we may be dealing with markdown
+
format := markup.GetFormat(resp.Path)
+
if format == markup.FormatMarkdown {
+
view.ContentType = models.BlobContentTypeMarkup
+
view.HasRenderedView = true
+
view.ShowingRendered = queryParams.Get("code") != "true"
+
}
+
+
return view
+
}
+
+
func generateBlobURL(config *config.Config, repo *models.Repo, ref, filePath string) string {
+
scheme := "http"
+
if !config.Core.Dev {
+
scheme = "https"
+
}
+
+
repoName := fmt.Sprintf("%s/%s", repo.Did, repo.Name)
+
baseURL := &url.URL{
+
Scheme: scheme,
+
Host: repo.Knot,
+
Path: "/xrpc/sh.tangled.repo.blob",
+
}
+
query := baseURL.Query()
+
query.Set("repo", repoName)
+
query.Set("ref", ref)
+
query.Set("path", filePath)
+
query.Set("raw", "true")
+
baseURL.RawQuery = query.Encode()
+
blobURL := baseURL.String()
+
+
if !config.Core.Dev {
+
return markup.GenerateCamoURL(config.Camo.Host, config.Camo.SharedSecret, blobURL)
+
}
+
return blobURL
+
}
+
+
func isTextualMimeType(mimeType string) bool {
+
textualTypes := []string{
+
"application/json",
+
"application/xml",
+
"application/yaml",
+
"application/x-yaml",
+
"application/toml",
+
"application/javascript",
+
"application/ecmascript",
+
"message/",
+
}
+
return slices.Contains(textualTypes, mimeType)
+
}
+95
appview/repo/branches.go
···
+
package repo
+
+
import (
+
"encoding/json"
+
"fmt"
+
"net/http"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/appview/oauth"
+
"tangled.org/core/appview/pages"
+
xrpcclient "tangled.org/core/appview/xrpcclient"
+
"tangled.org/core/types"
+
+
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
+
)
+
+
func (rp *Repo) Branches(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "RepoBranches")
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
return
+
}
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
+
xrpcBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
var result types.RepoBranchesResponse
+
if err := json.Unmarshal(xrpcBytes, &result); err != nil {
+
l.Error("failed to decode XRPC response", "err", err)
+
rp.pages.Error503(w)
+
return
+
}
+
sortBranches(result.Branches)
+
user := rp.oauth.GetUser(r)
+
rp.pages.RepoBranches(w, pages.RepoBranchesParams{
+
LoggedInUser: user,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
RepoBranchesResponse: result,
+
})
+
}
+
+
func (rp *Repo) DeleteBranch(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "DeleteBranch")
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
return
+
}
+
noticeId := "delete-branch-error"
+
fail := func(msg string, err error) {
+
l.Error(msg, "err", err)
+
rp.pages.Notice(w, noticeId, msg)
+
}
+
branch := r.FormValue("branch")
+
if branch == "" {
+
fail("No branch provided.", nil)
+
return
+
}
+
client, err := rp.oauth.ServiceClient(
+
r,
+
oauth.WithService(f.Knot),
+
oauth.WithLxm(tangled.RepoDeleteBranchNSID),
+
oauth.WithDev(rp.config.Core.Dev),
+
)
+
if err != nil {
+
fail("Failed to connect to knotserver", nil)
+
return
+
}
+
err = tangled.RepoDeleteBranch(
+
r.Context(),
+
client,
+
&tangled.RepoDeleteBranch_Input{
+
Branch: branch,
+
Repo: f.RepoAt().String(),
+
},
+
)
+
if err := xrpcclient.HandleXrpcErr(err); err != nil {
+
fail(fmt.Sprintf("Failed to delete branch: %s", err), err)
+
return
+
}
+
l.Error("deleted branch from knot", "branch", branch, "repo", f.RepoAt())
+
rp.pages.HxRefresh(w)
+
}
+214
appview/repo/compare.go
···
+
package repo
+
+
import (
+
"encoding/json"
+
"fmt"
+
"net/http"
+
"net/url"
+
"strings"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/appview/pages"
+
xrpcclient "tangled.org/core/appview/xrpcclient"
+
"tangled.org/core/patchutil"
+
"tangled.org/core/types"
+
+
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
+
"github.com/go-chi/chi/v5"
+
)
+
+
func (rp *Repo) CompareNew(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "RepoCompareNew")
+
+
user := rp.oauth.GetUser(r)
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
return
+
}
+
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
+
branchBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
var branchResult types.RepoBranchesResponse
+
if err := json.Unmarshal(branchBytes, &branchResult); err != nil {
+
l.Error("failed to decode XRPC branches response", "err", err)
+
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
+
return
+
}
+
branches := branchResult.Branches
+
+
sortBranches(branches)
+
+
var defaultBranch string
+
for _, b := range branches {
+
if b.IsDefault {
+
defaultBranch = b.Name
+
}
+
}
+
+
base := defaultBranch
+
head := defaultBranch
+
+
params := r.URL.Query()
+
queryBase := params.Get("base")
+
queryHead := params.Get("head")
+
if queryBase != "" {
+
base = queryBase
+
}
+
if queryHead != "" {
+
head = queryHead
+
}
+
+
tagBytes, err := tangled.RepoTags(r.Context(), xrpcc, "", 0, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.tags", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
var tags types.RepoTagsResponse
+
if err := json.Unmarshal(tagBytes, &tags); err != nil {
+
l.Error("failed to decode XRPC tags response", "err", err)
+
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
+
return
+
}
+
+
rp.pages.RepoCompareNew(w, pages.RepoCompareNewParams{
+
LoggedInUser: user,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
Branches: branches,
+
Tags: tags.Tags,
+
Base: base,
+
Head: head,
+
})
+
}
+
+
func (rp *Repo) Compare(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "RepoCompare")
+
+
user := rp.oauth.GetUser(r)
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
return
+
}
+
+
var diffOpts types.DiffOpts
+
if d := r.URL.Query().Get("diff"); d == "split" {
+
diffOpts.Split = true
+
}
+
+
// if user is navigating to one of
+
// /compare/{base}...{head}
+
// /compare/{base}/{head}
+
var base, head string
+
rest := chi.URLParam(r, "*")
+
+
var parts []string
+
if strings.Contains(rest, "...") {
+
parts = strings.SplitN(rest, "...", 2)
+
} else if strings.Contains(rest, "/") {
+
parts = strings.SplitN(rest, "/", 2)
+
}
+
+
if len(parts) == 2 {
+
base = parts[0]
+
head = parts[1]
+
}
+
+
base, _ = url.PathUnescape(base)
+
head, _ = url.PathUnescape(head)
+
+
if base == "" || head == "" {
+
l.Error("invalid comparison")
+
rp.pages.Error404(w)
+
return
+
}
+
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
+
+
branchBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
var branches types.RepoBranchesResponse
+
if err := json.Unmarshal(branchBytes, &branches); err != nil {
+
l.Error("failed to decode XRPC branches response", "err", err)
+
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
+
return
+
}
+
+
tagBytes, err := tangled.RepoTags(r.Context(), xrpcc, "", 0, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.tags", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
var tags types.RepoTagsResponse
+
if err := json.Unmarshal(tagBytes, &tags); err != nil {
+
l.Error("failed to decode XRPC tags response", "err", err)
+
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
+
return
+
}
+
+
compareBytes, err := tangled.RepoCompare(r.Context(), xrpcc, repo, base, head)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.compare", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
var formatPatch types.RepoFormatPatchResponse
+
if err := json.Unmarshal(compareBytes, &formatPatch); err != nil {
+
l.Error("failed to decode XRPC compare response", "err", err)
+
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
+
return
+
}
+
+
var diff types.NiceDiff
+
if formatPatch.CombinedPatchRaw != "" {
+
diff = patchutil.AsNiceDiff(formatPatch.CombinedPatchRaw, base)
+
} else {
+
diff = patchutil.AsNiceDiff(formatPatch.FormatPatchRaw, base)
+
}
+
+
rp.pages.RepoCompare(w, pages.RepoCompareParams{
+
LoggedInUser: user,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
Branches: branches.Branches,
+
Tags: tags.Tags,
+
Base: base,
+
Head: head,
+
Diff: &diff,
+
DiffOpts: diffOpts,
+
})
+
+
}
+25 -18
appview/repo/feed.go
···
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/pagination"
-
"tangled.org/core/appview/reporesolver"
+
"tangled.org/core/orm"
+
"github.com/bluesky-social/indigo/atproto/identity"
"github.com/bluesky-social/indigo/atproto/syntax"
"github.com/gorilla/feeds"
)
-
func (rp *Repo) getRepoFeed(ctx context.Context, f *reporesolver.ResolvedRepo) (*feeds.Feed, error) {
+
func (rp *Repo) getRepoFeed(ctx context.Context, repo *models.Repo, ownerSlashRepo string) (*feeds.Feed, error) {
const feedLimitPerType = 100
-
pulls, err := db.GetPullsWithLimit(rp.db, feedLimitPerType, db.FilterEq("repo_at", f.RepoAt()))
+
pulls, err := db.GetPullsWithLimit(rp.db, feedLimitPerType, orm.FilterEq("repo_at", repo.RepoAt()))
if err != nil {
return nil, err
}
···
issues, err := db.GetIssuesPaginated(
rp.db,
pagination.Page{Limit: feedLimitPerType},
-
db.FilterEq("repo_at", f.RepoAt()),
+
orm.FilterEq("repo_at", repo.RepoAt()),
)
if err != nil {
return nil, err
}
feed := &feeds.Feed{
-
Title: fmt.Sprintf("activity feed for %s", f.OwnerSlashRepo()),
-
Link: &feeds.Link{Href: fmt.Sprintf("%s/%s", rp.config.Core.AppviewHost, f.OwnerSlashRepo()), Type: "text/html", Rel: "alternate"},
+
Title: fmt.Sprintf("activity feed for @%s", ownerSlashRepo),
+
Link: &feeds.Link{Href: fmt.Sprintf("%s/%s", rp.config.Core.AppviewHost, ownerSlashRepo), Type: "text/html", Rel: "alternate"},
Items: make([]*feeds.Item, 0),
Updated: time.UnixMilli(0),
}
for _, pull := range pulls {
-
items, err := rp.createPullItems(ctx, pull, f)
+
items, err := rp.createPullItems(ctx, pull, repo, ownerSlashRepo)
if err != nil {
return nil, err
}
···
}
for _, issue := range issues {
-
item, err := rp.createIssueItem(ctx, issue, f)
+
item, err := rp.createIssueItem(ctx, issue, repo, ownerSlashRepo)
if err != nil {
return nil, err
}
···
return feed, nil
}
-
func (rp *Repo) createPullItems(ctx context.Context, pull *models.Pull, f *reporesolver.ResolvedRepo) ([]*feeds.Item, error) {
+
func (rp *Repo) createPullItems(ctx context.Context, pull *models.Pull, repo *models.Repo, ownerSlashRepo string) ([]*feeds.Item, error) {
owner, err := rp.idResolver.ResolveIdent(ctx, pull.OwnerDid)
if err != nil {
return nil, err
···
var items []*feeds.Item
state := rp.getPullState(pull)
-
description := rp.buildPullDescription(owner.Handle, state, pull, f.OwnerSlashRepo())
+
description := rp.buildPullDescription(owner.Handle, state, pull, ownerSlashRepo)
mainItem := &feeds.Item{
Title: fmt.Sprintf("[PR #%d] %s", pull.PullId, pull.Title),
Description: description,
-
Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/pulls/%d", rp.config.Core.AppviewHost, f.OwnerSlashRepo(), pull.PullId)},
+
Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/pulls/%d", rp.config.Core.AppviewHost, ownerSlashRepo, pull.PullId)},
Created: pull.Created,
Author: &feeds.Author{Name: fmt.Sprintf("@%s", owner.Handle)},
}
···
roundItem := &feeds.Item{
Title: fmt.Sprintf("[PR #%d] %s (round #%d)", pull.PullId, pull.Title, round.RoundNumber),
-
Description: fmt.Sprintf("@%s submitted changes (at round #%d) on PR #%d in %s", owner.Handle, round.RoundNumber, pull.PullId, f.OwnerSlashRepo()),
-
Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/pulls/%d/round/%d/", rp.config.Core.AppviewHost, f.OwnerSlashRepo(), pull.PullId, round.RoundNumber)},
+
Description: fmt.Sprintf("@%s submitted changes (at round #%d) on PR #%d in @%s", owner.Handle, round.RoundNumber, pull.PullId, ownerSlashRepo),
+
Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/pulls/%d/round/%d/", rp.config.Core.AppviewHost, ownerSlashRepo, pull.PullId, round.RoundNumber)},
Created: round.Created,
Author: &feeds.Author{Name: fmt.Sprintf("@%s", owner.Handle)},
}
···
return items, nil
}
-
func (rp *Repo) createIssueItem(ctx context.Context, issue models.Issue, f *reporesolver.ResolvedRepo) (*feeds.Item, error) {
+
func (rp *Repo) createIssueItem(ctx context.Context, issue models.Issue, repo *models.Repo, ownerSlashRepo string) (*feeds.Item, error) {
owner, err := rp.idResolver.ResolveIdent(ctx, issue.Did)
if err != nil {
return nil, err
···
return &feeds.Item{
Title: fmt.Sprintf("[Issue #%d] %s", issue.IssueId, issue.Title),
-
Description: fmt.Sprintf("@%s %s issue #%d in %s", owner.Handle, state, issue.IssueId, f.OwnerSlashRepo()),
-
Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/issues/%d", rp.config.Core.AppviewHost, f.OwnerSlashRepo(), issue.IssueId)},
+
Description: fmt.Sprintf("@%s %s issue #%d in @%s", owner.Handle, state, issue.IssueId, ownerSlashRepo),
+
Link: &feeds.Link{Href: fmt.Sprintf("%s/%s/issues/%d", rp.config.Core.AppviewHost, ownerSlashRepo, issue.IssueId)},
Created: issue.Created,
Author: &feeds.Author{Name: fmt.Sprintf("@%s", owner.Handle)},
}, nil
···
return fmt.Sprintf("%s in %s", base, repoName)
}
-
func (rp *Repo) RepoAtomFeed(w http.ResponseWriter, r *http.Request) {
+
func (rp *Repo) AtomFeed(w http.ResponseWriter, r *http.Request) {
f, err := rp.repoResolver.Resolve(r)
if err != nil {
log.Println("failed to fully resolve repo:", err)
return
}
+
repoOwnerId, ok := r.Context().Value("resolvedId").(identity.Identity)
+
if !ok || repoOwnerId.Handle.IsInvalidHandle() {
+
log.Println("failed to get resolved repo owner id")
+
return
+
}
+
ownerSlashRepo := repoOwnerId.Handle.String() + "/" + f.Name
-
feed, err := rp.getRepoFeed(r.Context(), f)
+
feed, err := rp.getRepoFeed(r.Context(), f, ownerSlashRepo)
if err != nil {
log.Println("failed to get repo feed:", err)
rp.pages.Error500(w)
+28 -30
appview/repo/index.go
···
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/pages"
-
"tangled.org/core/appview/reporesolver"
"tangled.org/core/appview/xrpcclient"
+
"tangled.org/core/orm"
"tangled.org/core/types"
"github.com/go-chi/chi/v5"
"github.com/go-enry/go-enry/v2"
)
-
func (rp *Repo) RepoIndex(w http.ResponseWriter, r *http.Request) {
+
func (rp *Repo) Index(w http.ResponseWriter, r *http.Request) {
l := rp.logger.With("handler", "RepoIndex")
ref := chi.URLParam(r, "ref")
···
}
user := rp.oauth.GetUser(r)
-
repoInfo := f.RepoInfo(user)
// Build index response from multiple XRPC calls
result, err := rp.buildIndexResponse(r.Context(), xrpcc, f, ref)
···
rp.pages.RepoIndexPage(w, pages.RepoIndexParams{
LoggedInUser: user,
NeedsKnotUpgrade: true,
-
RepoInfo: repoInfo,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
})
return
}
···
l.Error("failed to get email to did map", "err", err)
}
-
vc, err := commitverify.GetVerifiedObjectCommits(rp.db, emailToDidMap, commitsTrunc)
+
vc, err := commitverify.GetVerifiedCommits(rp.db, emailToDidMap, commitsTrunc)
if err != nil {
l.Error("failed to GetVerifiedObjectCommits", "err", err)
}
···
for _, c := range commitsTrunc {
shas = append(shas, c.Hash.String())
}
-
pipelines, err := getPipelineStatuses(rp.db, repoInfo, shas)
+
pipelines, err := getPipelineStatuses(rp.db, f, shas)
if err != nil {
l.Error("failed to fetch pipeline statuses", "err", err)
// non-fatal
···
rp.pages.RepoIndexPage(w, pages.RepoIndexParams{
LoggedInUser: user,
-
RepoInfo: repoInfo,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
TagMap: tagMap,
RepoIndexResponse: *result,
CommitsTrunc: commitsTrunc,
TagsTrunc: tagsTrunc,
// ForkInfo: forkInfo, // TODO: reinstate this after xrpc properly lands
-
BranchesTrunc: branchesTrunc,
-
EmailToDidOrHandle: emailToDidOrHandle(rp, emailToDidMap),
-
VerifiedCommits: vc,
-
Languages: languageInfo,
-
Pipelines: pipelines,
+
BranchesTrunc: branchesTrunc,
+
EmailToDid: emailToDidMap,
+
VerifiedCommits: vc,
+
Languages: languageInfo,
+
Pipelines: pipelines,
})
}
func (rp *Repo) getLanguageInfo(
ctx context.Context,
l *slog.Logger,
-
f *reporesolver.ResolvedRepo,
+
repo *models.Repo,
xrpcc *indigoxrpc.Client,
currentRef string,
isDefaultRef bool,
···
// first attempt to fetch from db
langs, err := db.GetRepoLanguages(
rp.db,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterEq("ref", currentRef),
+
orm.FilterEq("repo_at", repo.RepoAt()),
+
orm.FilterEq("ref", currentRef),
)
if err != nil || langs == nil {
// non-fatal, fetch langs from ks via XRPC
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
ls, err := tangled.RepoLanguages(ctx, xrpcc, currentRef, repo)
+
didSlashRepo := fmt.Sprintf("%s/%s", repo.Did, repo.Name)
+
ls, err := tangled.RepoLanguages(ctx, xrpcc, currentRef, didSlashRepo)
if err != nil {
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
l.Error("failed to call XRPC repo.languages", "err", xrpcerr)
···
for _, lang := range ls.Languages {
langs = append(langs, models.RepoLanguage{
-
RepoAt: f.RepoAt(),
+
RepoAt: repo.RepoAt(),
Ref: currentRef,
IsDefaultRef: isDefaultRef,
Language: lang.Name,
···
defer tx.Rollback()
// update appview's cache
-
err = db.UpdateRepoLanguages(tx, f.RepoAt(), currentRef, langs)
+
err = db.UpdateRepoLanguages(tx, repo.RepoAt(), currentRef, langs)
if err != nil {
// non-fatal
l.Error("failed to cache lang results", "err", err)
···
}
// buildIndexResponse creates a RepoIndexResponse by combining multiple xrpc calls in parallel
-
func (rp *Repo) buildIndexResponse(ctx context.Context, xrpcc *indigoxrpc.Client, f *reporesolver.ResolvedRepo, ref string) (*types.RepoIndexResponse, error) {
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
+
func (rp *Repo) buildIndexResponse(ctx context.Context, xrpcc *indigoxrpc.Client, repo *models.Repo, ref string) (*types.RepoIndexResponse, error) {
+
didSlashRepo := fmt.Sprintf("%s/%s", repo.Did, repo.Name)
// first get branches to determine the ref if not specified
-
branchesBytes, err := tangled.RepoBranches(ctx, xrpcc, "", 0, repo)
+
branchesBytes, err := tangled.RepoBranches(ctx, xrpcc, "", 0, didSlashRepo)
if err != nil {
return nil, fmt.Errorf("failed to call repoBranches: %w", err)
}
···
wg.Add(1)
go func() {
defer wg.Done()
-
tagsBytes, err := tangled.RepoTags(ctx, xrpcc, "", 0, repo)
+
tagsBytes, err := tangled.RepoTags(ctx, xrpcc, "", 0, didSlashRepo)
if err != nil {
errs = errors.Join(errs, fmt.Errorf("failed to call repoTags: %w", err))
return
···
wg.Add(1)
go func() {
defer wg.Done()
-
resp, err := tangled.RepoTree(ctx, xrpcc, "", ref, repo)
+
resp, err := tangled.RepoTree(ctx, xrpcc, "", ref, didSlashRepo)
if err != nil {
errs = errors.Join(errs, fmt.Errorf("failed to call repoTree: %w", err))
return
···
wg.Add(1)
go func() {
defer wg.Done()
-
logBytes, err := tangled.RepoLog(ctx, xrpcc, "", 50, "", ref, repo)
+
logBytes, err := tangled.RepoLog(ctx, xrpcc, "", 50, "", ref, didSlashRepo)
if err != nil {
errs = errors.Join(errs, fmt.Errorf("failed to call repoLog: %w", err))
return
···
if treeResp != nil && treeResp.Files != nil {
for _, file := range treeResp.Files {
niceFile := types.NiceTree{
-
IsFile: file.Is_file,
-
IsSubtree: file.Is_subtree,
-
Name: file.Name,
-
Mode: file.Mode,
-
Size: file.Size,
+
Name: file.Name,
+
Mode: file.Mode,
+
Size: file.Size,
}
+
if file.Last_commit != nil {
when, _ := time.Parse(time.RFC3339, file.Last_commit.When)
niceFile.LastCommit = &types.LastCommitInfo{
+220
appview/repo/log.go
···
+
package repo
+
+
import (
+
"encoding/json"
+
"fmt"
+
"net/http"
+
"net/url"
+
"strconv"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/appview/commitverify"
+
"tangled.org/core/appview/db"
+
"tangled.org/core/appview/models"
+
"tangled.org/core/appview/pages"
+
xrpcclient "tangled.org/core/appview/xrpcclient"
+
"tangled.org/core/types"
+
+
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
+
"github.com/go-chi/chi/v5"
+
"github.com/go-git/go-git/v5/plumbing"
+
)
+
+
func (rp *Repo) Log(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "RepoLog")
+
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to fully resolve repo", "err", err)
+
return
+
}
+
+
page := 1
+
if r.URL.Query().Get("page") != "" {
+
page, err = strconv.Atoi(r.URL.Query().Get("page"))
+
if err != nil {
+
page = 1
+
}
+
}
+
+
ref := chi.URLParam(r, "ref")
+
ref, _ = url.PathUnescape(ref)
+
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
+
limit := int64(60)
+
cursor := ""
+
if page > 1 {
+
// Convert page number to cursor (offset)
+
offset := (page - 1) * int(limit)
+
cursor = strconv.Itoa(offset)
+
}
+
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
+
xrpcBytes, err := tangled.RepoLog(r.Context(), xrpcc, cursor, limit, "", ref, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.log", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
var xrpcResp types.RepoLogResponse
+
if err := json.Unmarshal(xrpcBytes, &xrpcResp); err != nil {
+
l.Error("failed to decode XRPC response", "err", err)
+
rp.pages.Error503(w)
+
return
+
}
+
+
tagBytes, err := tangled.RepoTags(r.Context(), xrpcc, "", 0, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.tags", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
tagMap := make(map[string][]string)
+
if tagBytes != nil {
+
var tagResp types.RepoTagsResponse
+
if err := json.Unmarshal(tagBytes, &tagResp); err == nil {
+
for _, tag := range tagResp.Tags {
+
hash := tag.Hash
+
if tag.Tag != nil {
+
hash = tag.Tag.Target.String()
+
}
+
tagMap[hash] = append(tagMap[hash], tag.Name)
+
}
+
}
+
}
+
+
branchBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
if branchBytes != nil {
+
var branchResp types.RepoBranchesResponse
+
if err := json.Unmarshal(branchBytes, &branchResp); err == nil {
+
for _, branch := range branchResp.Branches {
+
tagMap[branch.Hash] = append(tagMap[branch.Hash], branch.Name)
+
}
+
}
+
}
+
+
user := rp.oauth.GetUser(r)
+
+
emailToDidMap, err := db.GetEmailToDid(rp.db, uniqueEmails(xrpcResp.Commits), true)
+
if err != nil {
+
l.Error("failed to fetch email to did mapping", "err", err)
+
}
+
+
vc, err := commitverify.GetVerifiedCommits(rp.db, emailToDidMap, xrpcResp.Commits)
+
if err != nil {
+
l.Error("failed to GetVerifiedObjectCommits", "err", err)
+
}
+
+
var shas []string
+
for _, c := range xrpcResp.Commits {
+
shas = append(shas, c.Hash.String())
+
}
+
pipelines, err := getPipelineStatuses(rp.db, f, shas)
+
if err != nil {
+
l.Error("failed to getPipelineStatuses", "err", err)
+
// non-fatal
+
}
+
+
rp.pages.RepoLog(w, pages.RepoLogParams{
+
LoggedInUser: user,
+
TagMap: tagMap,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
RepoLogResponse: xrpcResp,
+
EmailToDid: emailToDidMap,
+
VerifiedCommits: vc,
+
Pipelines: pipelines,
+
})
+
}
+
+
func (rp *Repo) Commit(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "RepoCommit")
+
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to fully resolve repo", "err", err)
+
return
+
}
+
ref := chi.URLParam(r, "ref")
+
ref, _ = url.PathUnescape(ref)
+
+
var diffOpts types.DiffOpts
+
if d := r.URL.Query().Get("diff"); d == "split" {
+
diffOpts.Split = true
+
}
+
+
if !plumbing.IsHash(ref) {
+
rp.pages.Error404(w)
+
return
+
}
+
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
+
xrpcBytes, err := tangled.RepoDiff(r.Context(), xrpcc, ref, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.diff", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
var result types.RepoCommitResponse
+
if err := json.Unmarshal(xrpcBytes, &result); err != nil {
+
l.Error("failed to decode XRPC response", "err", err)
+
rp.pages.Error503(w)
+
return
+
}
+
+
emailToDidMap, err := db.GetEmailToDid(rp.db, []string{result.Diff.Commit.Committer.Email, result.Diff.Commit.Author.Email}, true)
+
if err != nil {
+
l.Error("failed to get email to did mapping", "err", err)
+
}
+
+
vc, err := commitverify.GetVerifiedCommits(rp.db, emailToDidMap, []types.Commit{result.Diff.Commit})
+
if err != nil {
+
l.Error("failed to GetVerifiedCommits", "err", err)
+
}
+
+
user := rp.oauth.GetUser(r)
+
pipelines, err := getPipelineStatuses(rp.db, f, []string{result.Diff.Commit.This})
+
if err != nil {
+
l.Error("failed to getPipelineStatuses", "err", err)
+
// non-fatal
+
}
+
var pipeline *models.Pipeline
+
if p, ok := pipelines[result.Diff.Commit.This]; ok {
+
pipeline = &p
+
}
+
+
rp.pages.RepoCommit(w, pages.RepoCommitParams{
+
LoggedInUser: user,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
RepoCommitResponse: result,
+
EmailToDid: emailToDidMap,
+
VerifiedCommit: vc,
+
Pipeline: pipeline,
+
DiffOpts: diffOpts,
+
})
+
}
+9 -8
appview/repo/opengraph.go
···
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/ogcard"
+
"tangled.org/core/orm"
"tangled.org/core/types"
)
···
// Draw star icon, count, and label
// Align icon baseline with text baseline
iconBaselineOffset := int(textSize) / 2
-
err = statsArea.DrawSVGIcon("static/icons/star.svg", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
+
err = statsArea.DrawLucideIcon("star", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
if err != nil {
log.Printf("failed to draw star icon: %v", err)
}
···
// Draw issues icon, count, and label
issueStartX := currentX
-
err = statsArea.DrawSVGIcon("static/icons/circle-dot.svg", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
+
err = statsArea.DrawLucideIcon("circle-dot", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
if err != nil {
log.Printf("failed to draw circle-dot icon: %v", err)
}
···
// Draw pull request icon, count, and label
prStartX := currentX
-
err = statsArea.DrawSVGIcon("static/icons/git-pull-request.svg", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
+
err = statsArea.DrawLucideIcon("git-pull-request", currentX, statsY+iconBaselineOffset-iconSize/2+5, iconSize, iconColor)
if err != nil {
log.Printf("failed to draw git-pull-request icon: %v", err)
}
···
dollyX := dollyBounds.Min.X + (dollyBounds.Dx() / 2) - (dollySize / 2)
dollyY := statsY + iconBaselineOffset - dollySize/2 + 25
dollyColor := color.RGBA{180, 180, 180, 255} // light gray
-
err = dollyArea.DrawSVGIcon("templates/fragments/dolly/silhouette.svg", dollyX, dollyY, dollySize, dollyColor)
+
err = dollyArea.DrawDollySilhouette(dollyX, dollyY, dollySize, dollyColor)
if err != nil {
log.Printf("dolly silhouette not available (this is ok): %v", err)
}
···
return nil
}
-
func (rp *Repo) RepoOpenGraphSummary(w http.ResponseWriter, r *http.Request) {
+
func (rp *Repo) Opengraph(w http.ResponseWriter, r *http.Request) {
f, err := rp.repoResolver.Resolve(r)
if err != nil {
log.Println("failed to get repo and knot", err)
···
var languageStats []types.RepoLanguageDetails
langs, err := db.GetRepoLanguages(
rp.db,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterEq("is_default_ref", 1),
+
orm.FilterEq("repo_at", f.RepoAt()),
+
orm.FilterEq("is_default_ref", 1),
)
if err != nil {
log.Printf("failed to get language stats from db: %v", err)
···
})
}
-
card, err := rp.drawRepoSummaryCard(&f.Repo, languageStats)
+
card, err := rp.drawRepoSummaryCard(f, languageStats)
if err != nil {
log.Println("failed to draw repo summary card", err)
http.Error(w, "failed to draw repo summary card", http.StatusInternalServerError)
+39 -1413
appview/repo/repo.go
···
import (
"context"
"database/sql"
-
"encoding/json"
"errors"
"fmt"
-
"io"
"log/slog"
"net/http"
"net/url"
-
"path/filepath"
"slices"
-
"strconv"
"strings"
"time"
"tangled.org/core/api/tangled"
-
"tangled.org/core/appview/commitverify"
"tangled.org/core/appview/config"
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/notify"
"tangled.org/core/appview/oauth"
"tangled.org/core/appview/pages"
-
"tangled.org/core/appview/pages/markup"
"tangled.org/core/appview/reporesolver"
"tangled.org/core/appview/validator"
xrpcclient "tangled.org/core/appview/xrpcclient"
"tangled.org/core/eventconsumer"
"tangled.org/core/idresolver"
-
"tangled.org/core/patchutil"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
"tangled.org/core/tid"
-
"tangled.org/core/types"
"tangled.org/core/xrpc/serviceauth"
comatproto "github.com/bluesky-social/indigo/api/atproto"
atpclient "github.com/bluesky-social/indigo/atproto/client"
"github.com/bluesky-social/indigo/atproto/syntax"
lexutil "github.com/bluesky-social/indigo/lex/util"
-
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
securejoin "github.com/cyphar/filepath-securejoin"
"github.com/go-chi/chi/v5"
-
"github.com/go-git/go-git/v5/plumbing"
)
type Repo struct {
···
}
}
-
func (rp *Repo) DownloadArchive(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "DownloadArchive")
-
-
ref := chi.URLParam(r, "ref")
-
ref, _ = url.PathUnescape(ref)
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
archiveBytes, err := tangled.RepoArchive(r.Context(), xrpcc, "tar.gz", "", ref, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.archive", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
// Set headers for file download, just pass along whatever the knot specifies
-
safeRefFilename := strings.ReplaceAll(plumbing.ReferenceName(ref).Short(), "/", "-")
-
filename := fmt.Sprintf("%s-%s.tar.gz", f.Name, safeRefFilename)
-
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
-
w.Header().Set("Content-Type", "application/gzip")
-
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(archiveBytes)))
-
-
// Write the archive data directly
-
w.Write(archiveBytes)
-
}
-
-
func (rp *Repo) RepoLog(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoLog")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to fully resolve repo", "err", err)
-
return
-
}
-
-
page := 1
-
if r.URL.Query().Get("page") != "" {
-
page, err = strconv.Atoi(r.URL.Query().Get("page"))
-
if err != nil {
-
page = 1
-
}
-
}
-
-
ref := chi.URLParam(r, "ref")
-
ref, _ = url.PathUnescape(ref)
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
limit := int64(60)
-
cursor := ""
-
if page > 1 {
-
// Convert page number to cursor (offset)
-
offset := (page - 1) * int(limit)
-
cursor = strconv.Itoa(offset)
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
xrpcBytes, err := tangled.RepoLog(r.Context(), xrpcc, cursor, limit, "", ref, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.log", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var xrpcResp types.RepoLogResponse
-
if err := json.Unmarshal(xrpcBytes, &xrpcResp); err != nil {
-
l.Error("failed to decode XRPC response", "err", err)
-
rp.pages.Error503(w)
-
return
-
}
-
-
tagBytes, err := tangled.RepoTags(r.Context(), xrpcc, "", 0, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.tags", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
tagMap := make(map[string][]string)
-
if tagBytes != nil {
-
var tagResp types.RepoTagsResponse
-
if err := json.Unmarshal(tagBytes, &tagResp); err == nil {
-
for _, tag := range tagResp.Tags {
-
hash := tag.Hash
-
if tag.Tag != nil {
-
hash = tag.Tag.Target.String()
-
}
-
tagMap[hash] = append(tagMap[hash], tag.Name)
-
}
-
}
-
}
-
-
branchBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
if branchBytes != nil {
-
var branchResp types.RepoBranchesResponse
-
if err := json.Unmarshal(branchBytes, &branchResp); err == nil {
-
for _, branch := range branchResp.Branches {
-
tagMap[branch.Hash] = append(tagMap[branch.Hash], branch.Name)
-
}
-
}
-
}
-
-
user := rp.oauth.GetUser(r)
-
-
emailToDidMap, err := db.GetEmailToDid(rp.db, uniqueEmails(xrpcResp.Commits), true)
-
if err != nil {
-
l.Error("failed to fetch email to did mapping", "err", err)
-
}
-
-
vc, err := commitverify.GetVerifiedObjectCommits(rp.db, emailToDidMap, xrpcResp.Commits)
-
if err != nil {
-
l.Error("failed to GetVerifiedObjectCommits", "err", err)
-
}
-
-
repoInfo := f.RepoInfo(user)
-
-
var shas []string
-
for _, c := range xrpcResp.Commits {
-
shas = append(shas, c.Hash.String())
-
}
-
pipelines, err := getPipelineStatuses(rp.db, repoInfo, shas)
-
if err != nil {
-
l.Error("failed to getPipelineStatuses", "err", err)
-
// non-fatal
-
}
-
-
rp.pages.RepoLog(w, pages.RepoLogParams{
-
LoggedInUser: user,
-
TagMap: tagMap,
-
RepoInfo: repoInfo,
-
RepoLogResponse: xrpcResp,
-
EmailToDidOrHandle: emailToDidOrHandle(rp, emailToDidMap),
-
VerifiedCommits: vc,
-
Pipelines: pipelines,
-
})
-
}
-
-
func (rp *Repo) RepoDescriptionEdit(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoDescriptionEdit")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
w.WriteHeader(http.StatusBadRequest)
-
return
-
}
-
-
user := rp.oauth.GetUser(r)
-
rp.pages.EditRepoDescriptionFragment(w, pages.RepoDescriptionParams{
-
RepoInfo: f.RepoInfo(user),
-
})
-
}
-
-
func (rp *Repo) RepoDescription(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoDescription")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
w.WriteHeader(http.StatusBadRequest)
-
return
-
}
-
-
repoAt := f.RepoAt()
-
rkey := repoAt.RecordKey().String()
-
if rkey == "" {
-
l.Error("invalid aturi for repo", "err", err)
-
w.WriteHeader(http.StatusInternalServerError)
-
return
-
}
-
-
user := rp.oauth.GetUser(r)
-
-
switch r.Method {
-
case http.MethodGet:
-
rp.pages.RepoDescriptionFragment(w, pages.RepoDescriptionParams{
-
RepoInfo: f.RepoInfo(user),
-
})
-
return
-
case http.MethodPut:
-
newDescription := r.FormValue("description")
-
client, err := rp.oauth.AuthorizedClient(r)
-
if err != nil {
-
l.Error("failed to get client")
-
rp.pages.Notice(w, "repo-notice", "Failed to update description, try again later.")
-
return
-
}
-
-
// optimistic update
-
err = db.UpdateDescription(rp.db, string(repoAt), newDescription)
-
if err != nil {
-
l.Error("failed to perform update-description query", "err", err)
-
rp.pages.Notice(w, "repo-notice", "Failed to update description, try again later.")
-
return
-
}
-
-
newRepo := f.Repo
-
newRepo.Description = newDescription
-
record := newRepo.AsRecord()
-
-
// this is a bit of a pain because the golang atproto impl does not allow nil SwapRecord field
-
//
-
// SwapRecord is optional and should happen automagically, but given that it does not, we have to perform two requests
-
ex, err := comatproto.RepoGetRecord(r.Context(), client, "", tangled.RepoNSID, newRepo.Did, newRepo.Rkey)
-
if err != nil {
-
// failed to get record
-
rp.pages.Notice(w, "repo-notice", "Failed to update description, no record found on PDS.")
-
return
-
}
-
_, err = comatproto.RepoPutRecord(r.Context(), client, &comatproto.RepoPutRecord_Input{
-
Collection: tangled.RepoNSID,
-
Repo: newRepo.Did,
-
Rkey: newRepo.Rkey,
-
SwapRecord: ex.Cid,
-
Record: &lexutil.LexiconTypeDecoder{
-
Val: &record,
-
},
-
})
-
-
if err != nil {
-
l.Error("failed to perferom update-description query", "err", err)
-
// failed to get record
-
rp.pages.Notice(w, "repo-notice", "Failed to update description, unable to save to PDS.")
-
return
-
}
-
-
newRepoInfo := f.RepoInfo(user)
-
newRepoInfo.Description = newDescription
-
-
rp.pages.RepoDescriptionFragment(w, pages.RepoDescriptionParams{
-
RepoInfo: newRepoInfo,
-
})
-
return
-
}
-
}
-
-
func (rp *Repo) RepoCommit(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoCommit")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to fully resolve repo", "err", err)
-
return
-
}
-
ref := chi.URLParam(r, "ref")
-
ref, _ = url.PathUnescape(ref)
-
-
var diffOpts types.DiffOpts
-
if d := r.URL.Query().Get("diff"); d == "split" {
-
diffOpts.Split = true
-
}
-
-
if !plumbing.IsHash(ref) {
-
rp.pages.Error404(w)
-
return
-
}
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
xrpcBytes, err := tangled.RepoDiff(r.Context(), xrpcc, ref, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.diff", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var result types.RepoCommitResponse
-
if err := json.Unmarshal(xrpcBytes, &result); err != nil {
-
l.Error("failed to decode XRPC response", "err", err)
-
rp.pages.Error503(w)
-
return
-
}
-
-
emailToDidMap, err := db.GetEmailToDid(rp.db, []string{result.Diff.Commit.Committer.Email, result.Diff.Commit.Author.Email}, true)
-
if err != nil {
-
l.Error("failed to get email to did mapping", "err", err)
-
}
-
-
vc, err := commitverify.GetVerifiedCommits(rp.db, emailToDidMap, []types.NiceDiff{*result.Diff})
-
if err != nil {
-
l.Error("failed to GetVerifiedCommits", "err", err)
-
}
-
-
user := rp.oauth.GetUser(r)
-
repoInfo := f.RepoInfo(user)
-
pipelines, err := getPipelineStatuses(rp.db, repoInfo, []string{result.Diff.Commit.This})
-
if err != nil {
-
l.Error("failed to getPipelineStatuses", "err", err)
-
// non-fatal
-
}
-
var pipeline *models.Pipeline
-
if p, ok := pipelines[result.Diff.Commit.This]; ok {
-
pipeline = &p
-
}
-
-
rp.pages.RepoCommit(w, pages.RepoCommitParams{
-
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
-
RepoCommitResponse: result,
-
EmailToDidOrHandle: emailToDidOrHandle(rp, emailToDidMap),
-
VerifiedCommit: vc,
-
Pipeline: pipeline,
-
DiffOpts: diffOpts,
-
})
-
}
-
-
func (rp *Repo) RepoTree(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoTree")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to fully resolve repo", "err", err)
-
return
-
}
-
-
ref := chi.URLParam(r, "ref")
-
ref, _ = url.PathUnescape(ref)
-
-
// if the tree path has a trailing slash, let's strip it
-
// so we don't 404
-
treePath := chi.URLParam(r, "*")
-
treePath, _ = url.PathUnescape(treePath)
-
treePath = strings.TrimSuffix(treePath, "/")
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
xrpcResp, err := tangled.RepoTree(r.Context(), xrpcc, treePath, ref, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.tree", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
// Convert XRPC response to internal types.RepoTreeResponse
-
files := make([]types.NiceTree, len(xrpcResp.Files))
-
for i, xrpcFile := range xrpcResp.Files {
-
file := types.NiceTree{
-
Name: xrpcFile.Name,
-
Mode: xrpcFile.Mode,
-
Size: int64(xrpcFile.Size),
-
IsFile: xrpcFile.Is_file,
-
IsSubtree: xrpcFile.Is_subtree,
-
}
-
-
// Convert last commit info if present
-
if xrpcFile.Last_commit != nil {
-
commitWhen, _ := time.Parse(time.RFC3339, xrpcFile.Last_commit.When)
-
file.LastCommit = &types.LastCommitInfo{
-
Hash: plumbing.NewHash(xrpcFile.Last_commit.Hash),
-
Message: xrpcFile.Last_commit.Message,
-
When: commitWhen,
-
}
-
}
-
-
files[i] = file
-
}
-
-
result := types.RepoTreeResponse{
-
Ref: xrpcResp.Ref,
-
Files: files,
-
}
-
-
if xrpcResp.Parent != nil {
-
result.Parent = *xrpcResp.Parent
-
}
-
if xrpcResp.Dotdot != nil {
-
result.DotDot = *xrpcResp.Dotdot
-
}
-
if xrpcResp.Readme != nil {
-
result.ReadmeFileName = xrpcResp.Readme.Filename
-
result.Readme = xrpcResp.Readme.Contents
-
}
-
-
// redirects tree paths trying to access a blob; in this case the result.Files is unpopulated,
-
// so we can safely redirect to the "parent" (which is the same file).
-
if len(result.Files) == 0 && result.Parent == treePath {
-
redirectTo := fmt.Sprintf("/%s/blob/%s/%s", f.OwnerSlashRepo(), url.PathEscape(ref), result.Parent)
-
http.Redirect(w, r, redirectTo, http.StatusFound)
-
return
-
}
-
-
user := rp.oauth.GetUser(r)
-
-
var breadcrumbs [][]string
-
breadcrumbs = append(breadcrumbs, []string{f.Name, fmt.Sprintf("/%s/tree/%s", f.OwnerSlashRepo(), url.PathEscape(ref))})
-
if treePath != "" {
-
for idx, elem := range strings.Split(treePath, "/") {
-
breadcrumbs = append(breadcrumbs, []string{elem, fmt.Sprintf("%s/%s", breadcrumbs[idx][1], url.PathEscape(elem))})
-
}
-
}
-
-
sortFiles(result.Files)
-
-
rp.pages.RepoTree(w, pages.RepoTreeParams{
-
LoggedInUser: user,
-
BreadCrumbs: breadcrumbs,
-
TreePath: treePath,
-
RepoInfo: f.RepoInfo(user),
-
RepoTreeResponse: result,
-
})
-
}
-
-
func (rp *Repo) RepoTags(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoTags")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
xrpcBytes, err := tangled.RepoTags(r.Context(), xrpcc, "", 0, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.tags", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var result types.RepoTagsResponse
-
if err := json.Unmarshal(xrpcBytes, &result); err != nil {
-
l.Error("failed to decode XRPC response", "err", err)
-
rp.pages.Error503(w)
-
return
-
}
-
-
artifacts, err := db.GetArtifact(rp.db, db.FilterEq("repo_at", f.RepoAt()))
-
if err != nil {
-
l.Error("failed grab artifacts", "err", err)
-
return
-
}
-
-
// convert artifacts to map for easy UI building
-
artifactMap := make(map[plumbing.Hash][]models.Artifact)
-
for _, a := range artifacts {
-
artifactMap[a.Tag] = append(artifactMap[a.Tag], a)
-
}
-
-
var danglingArtifacts []models.Artifact
-
for _, a := range artifacts {
-
found := false
-
for _, t := range result.Tags {
-
if t.Tag != nil {
-
if t.Tag.Hash == a.Tag {
-
found = true
-
}
-
}
-
}
-
-
if !found {
-
danglingArtifacts = append(danglingArtifacts, a)
-
}
-
}
-
-
user := rp.oauth.GetUser(r)
-
rp.pages.RepoTags(w, pages.RepoTagsParams{
-
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
-
RepoTagsResponse: result,
-
ArtifactMap: artifactMap,
-
DanglingArtifacts: danglingArtifacts,
-
})
-
}
-
-
func (rp *Repo) RepoBranches(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoBranches")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
xrpcBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var result types.RepoBranchesResponse
-
if err := json.Unmarshal(xrpcBytes, &result); err != nil {
-
l.Error("failed to decode XRPC response", "err", err)
-
rp.pages.Error503(w)
-
return
-
}
-
-
sortBranches(result.Branches)
-
-
user := rp.oauth.GetUser(r)
-
rp.pages.RepoBranches(w, pages.RepoBranchesParams{
-
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
-
RepoBranchesResponse: result,
-
})
-
}
-
-
func (rp *Repo) DeleteBranch(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "DeleteBranch")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
-
-
noticeId := "delete-branch-error"
-
fail := func(msg string, err error) {
-
l.Error(msg, "err", err)
-
rp.pages.Notice(w, noticeId, msg)
-
}
-
-
branch := r.FormValue("branch")
-
if branch == "" {
-
fail("No branch provided.", nil)
-
return
-
}
-
-
client, err := rp.oauth.ServiceClient(
-
r,
-
oauth.WithService(f.Knot),
-
oauth.WithLxm(tangled.RepoDeleteBranchNSID),
-
oauth.WithDev(rp.config.Core.Dev),
-
)
-
if err != nil {
-
fail("Failed to connect to knotserver", nil)
-
return
-
}
-
-
err = tangled.RepoDeleteBranch(
-
r.Context(),
-
client,
-
&tangled.RepoDeleteBranch_Input{
-
Branch: branch,
-
Repo: f.RepoAt().String(),
-
},
-
)
-
if err := xrpcclient.HandleXrpcErr(err); err != nil {
-
fail(fmt.Sprintf("Failed to delete branch: %s", err), err)
-
return
-
}
-
l.Error("deleted branch from knot", "branch", branch, "repo", f.RepoAt())
-
-
rp.pages.HxRefresh(w)
-
}
-
-
func (rp *Repo) RepoBlob(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoBlob")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
-
-
ref := chi.URLParam(r, "ref")
-
ref, _ = url.PathUnescape(ref)
-
-
filePath := chi.URLParam(r, "*")
-
filePath, _ = url.PathUnescape(filePath)
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Repo.Name)
-
resp, err := tangled.RepoBlob(r.Context(), xrpcc, filePath, false, ref, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.blob", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
// Use XRPC response directly instead of converting to internal types
-
-
var breadcrumbs [][]string
-
breadcrumbs = append(breadcrumbs, []string{f.Name, fmt.Sprintf("/%s/tree/%s", f.OwnerSlashRepo(), url.PathEscape(ref))})
-
if filePath != "" {
-
for idx, elem := range strings.Split(filePath, "/") {
-
breadcrumbs = append(breadcrumbs, []string{elem, fmt.Sprintf("%s/%s", breadcrumbs[idx][1], url.PathEscape(elem))})
-
}
-
}
-
-
showRendered := false
-
renderToggle := false
-
-
if markup.GetFormat(resp.Path) == markup.FormatMarkdown {
-
renderToggle = true
-
showRendered = r.URL.Query().Get("code") != "true"
-
}
-
-
var unsupported bool
-
var isImage bool
-
var isVideo bool
-
var contentSrc string
-
-
if resp.IsBinary != nil && *resp.IsBinary {
-
ext := strings.ToLower(filepath.Ext(resp.Path))
-
switch ext {
-
case ".jpg", ".jpeg", ".png", ".gif", ".svg", ".webp":
-
isImage = true
-
case ".mp4", ".webm", ".ogg", ".mov", ".avi":
-
isVideo = true
-
default:
-
unsupported = true
-
}
-
-
// fetch the raw binary content using sh.tangled.repo.blob xrpc
-
repoName := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
-
baseURL := &url.URL{
-
Scheme: scheme,
-
Host: f.Knot,
-
Path: "/xrpc/sh.tangled.repo.blob",
-
}
-
query := baseURL.Query()
-
query.Set("repo", repoName)
-
query.Set("ref", ref)
-
query.Set("path", filePath)
-
query.Set("raw", "true")
-
baseURL.RawQuery = query.Encode()
-
blobURL := baseURL.String()
-
-
contentSrc = blobURL
-
if !rp.config.Core.Dev {
-
contentSrc = markup.GenerateCamoURL(rp.config.Camo.Host, rp.config.Camo.SharedSecret, blobURL)
-
}
-
}
-
-
lines := 0
-
if resp.IsBinary == nil || !*resp.IsBinary {
-
lines = strings.Count(resp.Content, "\n") + 1
-
}
-
-
var sizeHint uint64
-
if resp.Size != nil {
-
sizeHint = uint64(*resp.Size)
-
} else {
-
sizeHint = uint64(len(resp.Content))
-
}
-
-
user := rp.oauth.GetUser(r)
-
-
// Determine if content is binary (dereference pointer)
-
isBinary := false
-
if resp.IsBinary != nil {
-
isBinary = *resp.IsBinary
-
}
-
-
rp.pages.RepoBlob(w, pages.RepoBlobParams{
-
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
-
BreadCrumbs: breadcrumbs,
-
ShowRendered: showRendered,
-
RenderToggle: renderToggle,
-
Unsupported: unsupported,
-
IsImage: isImage,
-
IsVideo: isVideo,
-
ContentSrc: contentSrc,
-
RepoBlob_Output: resp,
-
Contents: resp.Content,
-
Lines: lines,
-
SizeHint: sizeHint,
-
IsBinary: isBinary,
-
})
-
}
-
-
func (rp *Repo) RepoBlobRaw(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoBlobRaw")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
w.WriteHeader(http.StatusBadRequest)
-
return
-
}
-
-
ref := chi.URLParam(r, "ref")
-
ref, _ = url.PathUnescape(ref)
-
-
filePath := chi.URLParam(r, "*")
-
filePath, _ = url.PathUnescape(filePath)
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Repo.Name)
-
baseURL := &url.URL{
-
Scheme: scheme,
-
Host: f.Knot,
-
Path: "/xrpc/sh.tangled.repo.blob",
-
}
-
query := baseURL.Query()
-
query.Set("repo", repo)
-
query.Set("ref", ref)
-
query.Set("path", filePath)
-
query.Set("raw", "true")
-
baseURL.RawQuery = query.Encode()
-
blobURL := baseURL.String()
-
-
req, err := http.NewRequest("GET", blobURL, nil)
-
if err != nil {
-
l.Error("failed to create request", "err", err)
-
return
-
}
-
-
// forward the If-None-Match header
-
if clientETag := r.Header.Get("If-None-Match"); clientETag != "" {
-
req.Header.Set("If-None-Match", clientETag)
-
}
-
-
client := &http.Client{}
-
resp, err := client.Do(req)
-
if err != nil {
-
l.Error("failed to reach knotserver", "err", err)
-
rp.pages.Error503(w)
-
return
-
}
-
defer resp.Body.Close()
-
-
// forward 304 not modified
-
if resp.StatusCode == http.StatusNotModified {
-
w.WriteHeader(http.StatusNotModified)
-
return
-
}
-
-
if resp.StatusCode != http.StatusOK {
-
l.Error("knotserver returned non-OK status for raw blob", "url", blobURL, "statuscode", resp.StatusCode)
-
w.WriteHeader(resp.StatusCode)
-
_, _ = io.Copy(w, resp.Body)
-
return
-
}
-
-
contentType := resp.Header.Get("Content-Type")
-
body, err := io.ReadAll(resp.Body)
-
if err != nil {
-
l.Error("error reading response body from knotserver", "err", err)
-
w.WriteHeader(http.StatusInternalServerError)
-
return
-
}
-
-
if strings.HasPrefix(contentType, "text/") || isTextualMimeType(contentType) {
-
// serve all textual content as text/plain
-
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
-
w.Write(body)
-
} else if strings.HasPrefix(contentType, "image/") || strings.HasPrefix(contentType, "video/") {
-
// serve images and videos with their original content type
-
w.Header().Set("Content-Type", contentType)
-
w.Write(body)
-
} else {
-
w.WriteHeader(http.StatusUnsupportedMediaType)
-
w.Write([]byte("unsupported content type"))
-
return
-
}
-
}
-
-
// isTextualMimeType returns true if the MIME type represents textual content
-
// that should be served as text/plain
-
func isTextualMimeType(mimeType string) bool {
-
textualTypes := []string{
-
"application/json",
-
"application/xml",
-
"application/yaml",
-
"application/x-yaml",
-
"application/toml",
-
"application/javascript",
-
"application/ecmascript",
-
"message/",
-
}
-
-
return slices.Contains(textualTypes, mimeType)
-
}
-
// modify the spindle configured for this repo
func (rp *Repo) EditSpindle(w http.ResponseWriter, r *http.Request) {
user := rp.oauth.GetUser(r)
···
}
}
-
newRepo := f.Repo
+
newRepo := *f
newRepo.Spindle = newSpindle
record := newRepo.AsRecord()
···
l.Info("wrote label record to PDS")
// update the repo to subscribe to this label
-
newRepo := f.Repo
+
newRepo := *f
newRepo.Labels = append(newRepo.Labels, aturi)
repoRecord := newRepo.AsRecord()
···
// get form values
labelId := r.FormValue("label-id")
-
label, err := db.GetLabelDefinition(rp.db, db.FilterEq("id", labelId))
+
label, err := db.GetLabelDefinition(rp.db, orm.FilterEq("id", labelId))
if err != nil {
fail("Failed to find label definition.", err)
return
···
}
// update repo record to remove the label reference
-
newRepo := f.Repo
+
newRepo := *f
var updated []string
removedAt := label.AtUri().String()
for _, l := range newRepo.Labels {
···
err = db.UnsubscribeLabel(
tx,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterEq("label_at", removedAt),
+
orm.FilterEq("repo_at", f.RepoAt()),
+
orm.FilterEq("label_at", removedAt),
)
if err != nil {
fail("Failed to unsubscribe label.", err)
return
}
-
err = db.DeleteLabelDefinition(tx, db.FilterEq("id", label.Id))
+
err = db.DeleteLabelDefinition(tx, orm.FilterEq("id", label.Id))
if err != nil {
fail("Failed to delete label definition.", err)
return
···
}
labelAts := r.Form["label"]
-
_, err = db.GetLabelDefinitions(rp.db, db.FilterIn("at_uri", labelAts))
+
_, err = db.GetLabelDefinitions(rp.db, orm.FilterIn("at_uri", labelAts))
if err != nil {
fail("Failed to subscribe to label.", err)
return
}
-
newRepo := f.Repo
+
newRepo := *f
newRepo.Labels = append(newRepo.Labels, labelAts...)
// dedup
···
return
}
-
ex, err := comatproto.RepoGetRecord(r.Context(), client, "", tangled.RepoNSID, f.Repo.Did, f.Repo.Rkey)
+
ex, err := comatproto.RepoGetRecord(r.Context(), client, "", tangled.RepoNSID, f.Did, f.Rkey)
if err != nil {
fail("Failed to update labels, no record found on PDS.", err)
return
···
}
labelAts := r.Form["label"]
-
_, err = db.GetLabelDefinitions(rp.db, db.FilterIn("at_uri", labelAts))
+
_, err = db.GetLabelDefinitions(rp.db, orm.FilterIn("at_uri", labelAts))
if err != nil {
fail("Failed to unsubscribe to label.", err)
return
}
// update repo record to remove the label reference
-
newRepo := f.Repo
+
newRepo := *f
var updated []string
for _, l := range newRepo.Labels {
if !slices.Contains(labelAts, l) {
···
return
}
-
ex, err := comatproto.RepoGetRecord(r.Context(), client, "", tangled.RepoNSID, f.Repo.Did, f.Repo.Rkey)
+
ex, err := comatproto.RepoGetRecord(r.Context(), client, "", tangled.RepoNSID, f.Did, f.Rkey)
if err != nil {
fail("Failed to update labels, no record found on PDS.", err)
return
···
err = db.UnsubscribeLabel(
rp.db,
-
db.FilterEq("repo_at", f.RepoAt()),
-
db.FilterIn("label_at", labelAts),
+
orm.FilterEq("repo_at", f.RepoAt()),
+
orm.FilterIn("label_at", labelAts),
)
if err != nil {
fail("Failed to unsubscribe label.", err)
···
labelDefs, err := db.GetLabelDefinitions(
rp.db,
-
db.FilterIn("at_uri", f.Repo.Labels),
-
db.FilterContains("scope", subject.Collection().String()),
+
orm.FilterIn("at_uri", f.Labels),
+
orm.FilterContains("scope", subject.Collection().String()),
)
if err != nil {
l.Error("failed to fetch label defs", "err", err)
···
defs[l.AtUri().String()] = &l
}
-
states, err := db.GetLabels(rp.db, db.FilterEq("subject", subject))
+
states, err := db.GetLabels(rp.db, orm.FilterEq("subject", subject))
if err != nil {
l.Error("failed to build label state", "err", err)
return
···
user := rp.oauth.GetUser(r)
rp.pages.LabelPanel(w, pages.LabelPanelParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Defs: defs,
Subject: subject.String(),
State: state,
···
labelDefs, err := db.GetLabelDefinitions(
rp.db,
-
db.FilterIn("at_uri", f.Repo.Labels),
-
db.FilterContains("scope", subject.Collection().String()),
+
orm.FilterIn("at_uri", f.Labels),
+
orm.FilterContains("scope", subject.Collection().String()),
)
if err != nil {
l.Error("failed to fetch labels", "err", err)
···
defs[l.AtUri().String()] = &l
}
-
states, err := db.GetLabels(rp.db, db.FilterEq("subject", subject))
+
states, err := db.GetLabels(rp.db, orm.FilterEq("subject", subject))
if err != nil {
l.Error("failed to build label state", "err", err)
return
···
user := rp.oauth.GetUser(r)
rp.pages.EditLabelPanel(w, pages.EditLabelPanelParams{
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
Defs: defs,
Subject: subject.String(),
State: state,
···
r.Context(),
client,
&tangled.RepoDelete_Input{
-
Did: f.OwnerDid(),
+
Did: f.Did,
Name: f.Name,
Rkey: f.Rkey,
},
···
l.Info("removed collaborators")
// remove repo RBAC
-
err = rp.enforcer.RemoveRepo(f.OwnerDid(), f.Knot, f.DidSlashRepo())
+
err = rp.enforcer.RemoveRepo(f.Did, f.Knot, f.DidSlashRepo())
if err != nil {
rp.pages.Notice(w, noticeId, "Failed to update RBAC rules")
return
}
// remove repo from db
-
err = db.RemoveRepo(tx, f.OwnerDid(), f.Name)
+
err = db.RemoveRepo(tx, f.Did, f.Name)
if err != nil {
rp.pages.Notice(w, noticeId, "Failed to update appview")
return
···
return
}
-
rp.pages.HxRedirect(w, fmt.Sprintf("/%s", f.OwnerDid()))
-
}
-
-
func (rp *Repo) SetDefaultBranch(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "SetDefaultBranch")
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
-
-
noticeId := "operation-error"
-
branch := r.FormValue("branch")
-
if branch == "" {
-
http.Error(w, "malformed form", http.StatusBadRequest)
-
return
-
}
-
-
client, err := rp.oauth.ServiceClient(
-
r,
-
oauth.WithService(f.Knot),
-
oauth.WithLxm(tangled.RepoSetDefaultBranchNSID),
-
oauth.WithDev(rp.config.Core.Dev),
-
)
-
if err != nil {
-
l.Error("failed to connect to knot server", "err", err)
-
rp.pages.Notice(w, noticeId, "Failed to connect to knot server.")
-
return
-
}
-
-
xe := tangled.RepoSetDefaultBranch(
-
r.Context(),
-
client,
-
&tangled.RepoSetDefaultBranch_Input{
-
Repo: f.RepoAt().String(),
-
DefaultBranch: branch,
-
},
-
)
-
if err := xrpcclient.HandleXrpcErr(xe); err != nil {
-
l.Error("xrpc failed", "err", xe)
-
rp.pages.Notice(w, noticeId, err.Error())
-
return
-
}
-
-
rp.pages.HxRefresh(w)
-
}
-
-
func (rp *Repo) Secrets(w http.ResponseWriter, r *http.Request) {
-
user := rp.oauth.GetUser(r)
-
l := rp.logger.With("handler", "Secrets")
-
l = l.With("did", user.Did)
-
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
-
-
if f.Spindle == "" {
-
l.Error("empty spindle cannot add/rm secret", "err", err)
-
return
-
}
-
-
lxm := tangled.RepoAddSecretNSID
-
if r.Method == http.MethodDelete {
-
lxm = tangled.RepoRemoveSecretNSID
-
}
-
-
spindleClient, err := rp.oauth.ServiceClient(
-
r,
-
oauth.WithService(f.Spindle),
-
oauth.WithLxm(lxm),
-
oauth.WithExp(60),
-
oauth.WithDev(rp.config.Core.Dev),
-
)
-
if err != nil {
-
l.Error("failed to create spindle client", "err", err)
-
return
-
}
-
-
key := r.FormValue("key")
-
if key == "" {
-
w.WriteHeader(http.StatusBadRequest)
-
return
-
}
-
-
switch r.Method {
-
case http.MethodPut:
-
errorId := "add-secret-error"
-
-
value := r.FormValue("value")
-
if value == "" {
-
w.WriteHeader(http.StatusBadRequest)
-
return
-
}
-
-
err = tangled.RepoAddSecret(
-
r.Context(),
-
spindleClient,
-
&tangled.RepoAddSecret_Input{
-
Repo: f.RepoAt().String(),
-
Key: key,
-
Value: value,
-
},
-
)
-
if err != nil {
-
l.Error("Failed to add secret.", "err", err)
-
rp.pages.Notice(w, errorId, "Failed to add secret.")
-
return
-
}
-
-
case http.MethodDelete:
-
errorId := "operation-error"
-
-
err = tangled.RepoRemoveSecret(
-
r.Context(),
-
spindleClient,
-
&tangled.RepoRemoveSecret_Input{
-
Repo: f.RepoAt().String(),
-
Key: key,
-
},
-
)
-
if err != nil {
-
l.Error("Failed to delete secret.", "err", err)
-
rp.pages.Notice(w, errorId, "Failed to delete secret.")
-
return
-
}
-
}
-
-
rp.pages.HxRefresh(w)
-
}
-
-
type tab = map[string]any
-
-
var (
-
// would be great to have ordered maps right about now
-
settingsTabs []tab = []tab{
-
{"Name": "general", "Icon": "sliders-horizontal"},
-
{"Name": "access", "Icon": "users"},
-
{"Name": "pipelines", "Icon": "layers-2"},
-
}
-
)
-
-
func (rp *Repo) RepoSettings(w http.ResponseWriter, r *http.Request) {
-
tabVal := r.URL.Query().Get("tab")
-
if tabVal == "" {
-
tabVal = "general"
-
}
-
-
switch tabVal {
-
case "general":
-
rp.generalSettings(w, r)
-
-
case "access":
-
rp.accessSettings(w, r)
-
-
case "pipelines":
-
rp.pipelineSettings(w, r)
-
}
-
}
-
-
func (rp *Repo) generalSettings(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "generalSettings")
-
-
f, err := rp.repoResolver.Resolve(r)
-
user := rp.oauth.GetUser(r)
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
xrpcBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var result types.RepoBranchesResponse
-
if err := json.Unmarshal(xrpcBytes, &result); err != nil {
-
l.Error("failed to decode XRPC response", "err", err)
-
rp.pages.Error503(w)
-
return
-
}
-
-
defaultLabels, err := db.GetLabelDefinitions(rp.db, db.FilterIn("at_uri", models.DefaultLabelDefs()))
-
if err != nil {
-
l.Error("failed to fetch labels", "err", err)
-
rp.pages.Error503(w)
-
return
-
}
-
-
labels, err := db.GetLabelDefinitions(rp.db, db.FilterIn("at_uri", f.Repo.Labels))
-
if err != nil {
-
l.Error("failed to fetch labels", "err", err)
-
rp.pages.Error503(w)
-
return
-
}
-
// remove default labels from the labels list, if present
-
defaultLabelMap := make(map[string]bool)
-
for _, dl := range defaultLabels {
-
defaultLabelMap[dl.AtUri().String()] = true
-
}
-
n := 0
-
for _, l := range labels {
-
if !defaultLabelMap[l.AtUri().String()] {
-
labels[n] = l
-
n++
-
}
-
}
-
labels = labels[:n]
-
-
subscribedLabels := make(map[string]struct{})
-
for _, l := range f.Repo.Labels {
-
subscribedLabels[l] = struct{}{}
-
}
-
-
// if there is atleast 1 unsubbed default label, show the "subscribe all" button,
-
// if all default labels are subbed, show the "unsubscribe all" button
-
shouldSubscribeAll := false
-
for _, dl := range defaultLabels {
-
if _, ok := subscribedLabels[dl.AtUri().String()]; !ok {
-
// one of the default labels is not subscribed to
-
shouldSubscribeAll = true
-
break
-
}
-
}
-
-
rp.pages.RepoGeneralSettings(w, pages.RepoGeneralSettingsParams{
-
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
-
Branches: result.Branches,
-
Labels: labels,
-
DefaultLabels: defaultLabels,
-
SubscribedLabels: subscribedLabels,
-
ShouldSubscribeAll: shouldSubscribeAll,
-
Tabs: settingsTabs,
-
Tab: "general",
-
})
-
}
-
-
func (rp *Repo) accessSettings(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "accessSettings")
-
-
f, err := rp.repoResolver.Resolve(r)
-
user := rp.oauth.GetUser(r)
-
-
repoCollaborators, err := f.Collaborators(r.Context())
-
if err != nil {
-
l.Error("failed to get collaborators", "err", err)
-
}
-
-
rp.pages.RepoAccessSettings(w, pages.RepoAccessSettingsParams{
-
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
-
Tabs: settingsTabs,
-
Tab: "access",
-
Collaborators: repoCollaborators,
-
})
-
}
-
-
func (rp *Repo) pipelineSettings(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "pipelineSettings")
-
-
f, err := rp.repoResolver.Resolve(r)
-
user := rp.oauth.GetUser(r)
-
-
// all spindles that the repo owner is a member of
-
spindles, err := rp.enforcer.GetSpindlesForUser(f.OwnerDid())
-
if err != nil {
-
l.Error("failed to fetch spindles", "err", err)
-
return
-
}
-
-
var secrets []*tangled.RepoListSecrets_Secret
-
if f.Spindle != "" {
-
if spindleClient, err := rp.oauth.ServiceClient(
-
r,
-
oauth.WithService(f.Spindle),
-
oauth.WithLxm(tangled.RepoListSecretsNSID),
-
oauth.WithExp(60),
-
oauth.WithDev(rp.config.Core.Dev),
-
); err != nil {
-
l.Error("failed to create spindle client", "err", err)
-
} else if resp, err := tangled.RepoListSecrets(r.Context(), spindleClient, f.RepoAt().String()); err != nil {
-
l.Error("failed to fetch secrets", "err", err)
-
} else {
-
secrets = resp.Secrets
-
}
-
}
-
-
slices.SortFunc(secrets, func(a, b *tangled.RepoListSecrets_Secret) int {
-
return strings.Compare(a.Key, b.Key)
-
})
-
-
var dids []string
-
for _, s := range secrets {
-
dids = append(dids, s.CreatedBy)
-
}
-
resolvedIdents := rp.idResolver.ResolveIdents(r.Context(), dids)
-
-
// convert to a more manageable form
-
var niceSecret []map[string]any
-
for id, s := range secrets {
-
when, _ := time.Parse(time.RFC3339, s.CreatedAt)
-
niceSecret = append(niceSecret, map[string]any{
-
"Id": id,
-
"Key": s.Key,
-
"CreatedAt": when,
-
"CreatedBy": resolvedIdents[id].Handle.String(),
-
})
-
}
-
-
rp.pages.RepoPipelineSettings(w, pages.RepoPipelineSettingsParams{
-
LoggedInUser: user,
-
RepoInfo: f.RepoInfo(user),
-
Tabs: settingsTabs,
-
Tab: "pipelines",
-
Spindles: spindles,
-
CurrentSpindle: f.Spindle,
-
Secrets: niceSecret,
-
})
+
rp.pages.HxRedirect(w, fmt.Sprintf("/%s", f.Did))
}
func (rp *Repo) SyncRepoFork(w http.ResponseWriter, r *http.Request) {
···
return
}
-
repoInfo := f.RepoInfo(user)
-
if repoInfo.Source == nil {
+
if f.Source == "" {
rp.pages.Notice(w, "repo", "This repository is not a fork.")
return
}
···
&tangled.RepoForkSync_Input{
Did: user.Did,
Name: f.Name,
-
Source: repoInfo.Source.RepoAt().String(),
+
Source: f.Source,
Branch: ref,
},
)
···
rp.pages.ForkRepo(w, pages.ForkRepoParams{
LoggedInUser: user,
Knots: knots,
-
RepoInfo: f.RepoInfo(user),
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
})
case http.MethodPost:
···
// in the user's account.
existingRepo, err := db.GetRepo(
rp.db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("name", forkName),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("name", forkName),
if err != nil {
if !errors.Is(err, sql.ErrNoRows) {
···
uri = "http"
-
forkSourceUrl := fmt.Sprintf("%s://%s/%s/%s", uri, f.Knot, f.OwnerDid(), f.Repo.Name)
+
forkSourceUrl := fmt.Sprintf("%s://%s/%s/%s", uri, f.Knot, f.Did, f.Name)
l = l.With("cloneUrl", forkSourceUrl)
sourceAt := f.RepoAt().String()
···
Knot: targetKnot,
Rkey: rkey,
Source: sourceAt,
-
Description: f.Repo.Description,
+
Description: f.Description,
Created: time.Now(),
-
Labels: models.DefaultLabelDefs(),
+
Labels: rp.config.Label.DefaultLabelDefs,
record := repo.AsRecord()
···
defer rollback()
+
// TODO: this could coordinate better with the knot to recieve a clone status
client, err := rp.oauth.ServiceClient(
r,
oauth.WithService(targetKnot),
oauth.WithLxm(tangled.RepoCreateNSID),
oauth.WithDev(rp.config.Core.Dev),
+
oauth.WithTimeout(time.Second*20), // big repos take time to clone
if err != nil {
l.Error("could not create service client", "err", err)
···
aturi = ""
rp.notifier.NewRepo(r.Context(), repo)
-
rp.pages.HxLocation(w, fmt.Sprintf("/@%s/%s", user.Did, forkName))
+
rp.pages.HxLocation(w, fmt.Sprintf("/%s/%s", user.Did, forkName))
···
})
return err
-
-
func (rp *Repo) RepoCompareNew(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoCompareNew")
-
-
user := rp.oauth.GetUser(r)
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
branchBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var branchResult types.RepoBranchesResponse
-
if err := json.Unmarshal(branchBytes, &branchResult); err != nil {
-
l.Error("failed to decode XRPC branches response", "err", err)
-
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
-
return
-
}
-
branches := branchResult.Branches
-
-
sortBranches(branches)
-
-
var defaultBranch string
-
for _, b := range branches {
-
if b.IsDefault {
-
defaultBranch = b.Name
-
}
-
}
-
-
base := defaultBranch
-
head := defaultBranch
-
-
params := r.URL.Query()
-
queryBase := params.Get("base")
-
queryHead := params.Get("head")
-
if queryBase != "" {
-
base = queryBase
-
}
-
if queryHead != "" {
-
head = queryHead
-
}
-
-
tagBytes, err := tangled.RepoTags(r.Context(), xrpcc, "", 0, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.tags", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var tags types.RepoTagsResponse
-
if err := json.Unmarshal(tagBytes, &tags); err != nil {
-
l.Error("failed to decode XRPC tags response", "err", err)
-
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
-
return
-
}
-
-
repoinfo := f.RepoInfo(user)
-
-
rp.pages.RepoCompareNew(w, pages.RepoCompareNewParams{
-
LoggedInUser: user,
-
RepoInfo: repoinfo,
-
Branches: branches,
-
Tags: tags.Tags,
-
Base: base,
-
Head: head,
-
})
-
}
-
-
func (rp *Repo) RepoCompare(w http.ResponseWriter, r *http.Request) {
-
l := rp.logger.With("handler", "RepoCompare")
-
-
user := rp.oauth.GetUser(r)
-
f, err := rp.repoResolver.Resolve(r)
-
if err != nil {
-
l.Error("failed to get repo and knot", "err", err)
-
return
-
}
-
-
var diffOpts types.DiffOpts
-
if d := r.URL.Query().Get("diff"); d == "split" {
-
diffOpts.Split = true
-
}
-
-
// if user is navigating to one of
-
// /compare/{base}/{head}
-
// /compare/{base}...{head}
-
base := chi.URLParam(r, "base")
-
head := chi.URLParam(r, "head")
-
if base == "" && head == "" {
-
rest := chi.URLParam(r, "*") // master...feature/xyz
-
parts := strings.SplitN(rest, "...", 2)
-
if len(parts) == 2 {
-
base = parts[0]
-
head = parts[1]
-
}
-
}
-
-
base, _ = url.PathUnescape(base)
-
head, _ = url.PathUnescape(head)
-
-
if base == "" || head == "" {
-
l.Error("invalid comparison")
-
rp.pages.Error404(w)
-
return
-
}
-
-
scheme := "http"
-
if !rp.config.Core.Dev {
-
scheme = "https"
-
}
-
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
-
xrpcc := &indigoxrpc.Client{
-
Host: host,
-
}
-
-
repo := fmt.Sprintf("%s/%s", f.OwnerDid(), f.Name)
-
-
branchBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var branches types.RepoBranchesResponse
-
if err := json.Unmarshal(branchBytes, &branches); err != nil {
-
l.Error("failed to decode XRPC branches response", "err", err)
-
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
-
return
-
}
-
-
tagBytes, err := tangled.RepoTags(r.Context(), xrpcc, "", 0, repo)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.tags", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var tags types.RepoTagsResponse
-
if err := json.Unmarshal(tagBytes, &tags); err != nil {
-
l.Error("failed to decode XRPC tags response", "err", err)
-
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
-
return
-
}
-
-
compareBytes, err := tangled.RepoCompare(r.Context(), xrpcc, repo, base, head)
-
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
-
l.Error("failed to call XRPC repo.compare", "err", xrpcerr)
-
rp.pages.Error503(w)
-
return
-
}
-
-
var formatPatch types.RepoFormatPatchResponse
-
if err := json.Unmarshal(compareBytes, &formatPatch); err != nil {
-
l.Error("failed to decode XRPC compare response", "err", err)
-
rp.pages.Notice(w, "compare-error", "Failed to produce comparison. Try again later.")
-
return
-
}
-
-
var diff types.NiceDiff
-
if formatPatch.CombinedPatchRaw != "" {
-
diff = patchutil.AsNiceDiff(formatPatch.CombinedPatchRaw, base)
-
} else {
-
diff = patchutil.AsNiceDiff(formatPatch.FormatPatchRaw, base)
-
}
-
-
repoinfo := f.RepoInfo(user)
-
-
rp.pages.RepoCompare(w, pages.RepoCompareParams{
-
LoggedInUser: user,
-
RepoInfo: repoinfo,
-
Branches: branches.Branches,
-
Tags: tags.Tags,
-
Base: base,
-
Head: head,
-
Diff: &diff,
-
DiffOpts: diffOpts,
-
})
-
-
}
+20 -70
appview/repo/repo_util.go
···
package repo
import (
-
"context"
-
"crypto/rand"
-
"fmt"
-
"math/big"
+
"maps"
"slices"
"sort"
"strings"
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
-
"tangled.org/core/appview/pages/repoinfo"
+
"tangled.org/core/orm"
"tangled.org/core/types"
-
-
"github.com/go-git/go-git/v5/plumbing/object"
)
func sortFiles(files []types.NiceTree) {
sort.Slice(files, func(i, j int) bool {
-
iIsFile := files[i].IsFile
-
jIsFile := files[j].IsFile
+
iIsFile := files[i].IsFile()
+
jIsFile := files[j].IsFile()
if iIsFile != jIsFile {
return !iIsFile
}
···
})
}
-
func uniqueEmails(commits []*object.Commit) []string {
+
func uniqueEmails(commits []types.Commit) []string {
emails := make(map[string]struct{})
for _, commit := range commits {
-
if commit.Author.Email != "" {
-
emails[commit.Author.Email] = struct{}{}
-
}
-
if commit.Committer.Email != "" {
-
emails[commit.Committer.Email] = struct{}{}
+
emails[commit.Author.Email] = struct{}{}
+
emails[commit.Committer.Email] = struct{}{}
+
for _, c := range commit.CoAuthors() {
+
emails[c.Email] = struct{}{}
}
}
-
var uniqueEmails []string
-
for email := range emails {
-
uniqueEmails = append(uniqueEmails, email)
-
}
-
return uniqueEmails
+
+
// delete empty emails if any, from the set
+
delete(emails, "")
+
+
return slices.Collect(maps.Keys(emails))
}
func balanceIndexItems(commitCount, branchCount, tagCount, fileCount int) (commitsTrunc int, branchesTrunc int, tagsTrunc int) {
···
return
}
-
// emailToDidOrHandle takes an emailToDidMap from db.GetEmailToDid
-
// and resolves all dids to handles and returns a new map[string]string
-
func emailToDidOrHandle(r *Repo, emailToDidMap map[string]string) map[string]string {
-
if emailToDidMap == nil {
-
return nil
-
}
-
-
var dids []string
-
for _, v := range emailToDidMap {
-
dids = append(dids, v)
-
}
-
resolvedIdents := r.idResolver.ResolveIdents(context.Background(), dids)
-
-
didHandleMap := make(map[string]string)
-
for _, identity := range resolvedIdents {
-
if !identity.Handle.IsInvalidHandle() {
-
didHandleMap[identity.DID.String()] = fmt.Sprintf("@%s", identity.Handle.String())
-
} else {
-
didHandleMap[identity.DID.String()] = identity.DID.String()
-
}
-
}
-
-
// Create map of email to didOrHandle for commit display
-
emailToDidOrHandle := make(map[string]string)
-
for email, did := range emailToDidMap {
-
if didOrHandle, ok := didHandleMap[did]; ok {
-
emailToDidOrHandle[email] = didOrHandle
-
}
-
}
-
-
return emailToDidOrHandle
-
}
-
-
func randomString(n int) string {
-
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
-
result := make([]byte, n)
-
-
for i := 0; i < n; i++ {
-
n, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letters))))
-
result[i] = letters[n.Int64()]
-
}
-
-
return string(result)
-
}
-
// grab pipelines from DB and munge that into a hashmap with commit sha as key
//
// golang is so blessed that it requires 35 lines of imperative code for this
func getPipelineStatuses(
d *db.DB,
-
repoInfo repoinfo.RepoInfo,
+
repo *models.Repo,
shas []string,
) (map[string]models.Pipeline, error) {
m := make(map[string]models.Pipeline)
···
ps, err := db.GetPipelineStatuses(
d,
-
db.FilterEq("repo_owner", repoInfo.OwnerDid),
-
db.FilterEq("repo_name", repoInfo.Name),
-
db.FilterEq("knot", repoInfo.Knot),
-
db.FilterIn("sha", shas),
+
len(shas),
+
orm.FilterEq("repo_owner", repo.Did),
+
orm.FilterEq("repo_name", repo.Name),
+
orm.FilterEq("knot", repo.Knot),
+
orm.FilterIn("sha", shas),
)
if err != nil {
return nil, err
+14 -20
appview/repo/router.go
···
func (rp *Repo) Router(mw *middleware.Middleware) http.Handler {
r := chi.NewRouter()
-
r.Get("/", rp.RepoIndex)
-
r.Get("/opengraph", rp.RepoOpenGraphSummary)
-
r.Get("/feed.atom", rp.RepoAtomFeed)
-
r.Get("/commits/{ref}", rp.RepoLog)
+
r.Get("/", rp.Index)
+
r.Get("/opengraph", rp.Opengraph)
+
r.Get("/feed.atom", rp.AtomFeed)
+
r.Get("/commits/{ref}", rp.Log)
r.Route("/tree/{ref}", func(r chi.Router) {
-
r.Get("/", rp.RepoIndex)
-
r.Get("/*", rp.RepoTree)
+
r.Get("/", rp.Index)
+
r.Get("/*", rp.Tree)
})
-
r.Get("/commit/{ref}", rp.RepoCommit)
-
r.Get("/branches", rp.RepoBranches)
+
r.Get("/commit/{ref}", rp.Commit)
+
r.Get("/branches", rp.Branches)
r.Delete("/branches", rp.DeleteBranch)
r.Route("/tags", func(r chi.Router) {
-
r.Get("/", rp.RepoTags)
+
r.Get("/", rp.Tags)
r.Route("/{tag}", func(r chi.Router) {
r.Get("/download/{file}", rp.DownloadArtifact)
···
})
})
})
-
r.Get("/blob/{ref}/*", rp.RepoBlob)
+
r.Get("/blob/{ref}/*", rp.Blob)
r.Get("/raw/{ref}/*", rp.RepoBlobRaw)
// intentionally doesn't use /* as this isn't
···
})
r.Route("/compare", func(r chi.Router) {
-
r.Get("/", rp.RepoCompareNew) // start an new comparison
+
r.Get("/", rp.CompareNew) // start an new comparison
// we have to wildcard here since we want to support GitHub's compare syntax
// /compare/{ref1}...{ref2}
// for example:
// /compare/master...some/feature
// /compare/master...example.com:another/feature <- this is a fork
-
r.Get("/{base}/{head}", rp.RepoCompare)
-
r.Get("/*", rp.RepoCompare)
+
r.Get("/*", rp.Compare)
})
// label panel in issues/pulls/discussions/tasks
···
// settings routes, needs auth
r.Group(func(r chi.Router) {
r.Use(middleware.AuthMiddleware(rp.oauth))
-
// repo description can only be edited by owner
-
r.With(mw.RepoPermissionMiddleware("repo:owner")).Route("/description", func(r chi.Router) {
-
r.Put("/", rp.RepoDescription)
-
r.Get("/", rp.RepoDescription)
-
r.Get("/edit", rp.RepoDescriptionEdit)
-
})
r.With(mw.RepoPermissionMiddleware("repo:settings")).Route("/settings", func(r chi.Router) {
-
r.Get("/", rp.RepoSettings)
+
r.Get("/", rp.Settings)
+
r.With(mw.RepoPermissionMiddleware("repo:owner")).Put("/base", rp.EditBaseSettings)
r.With(mw.RepoPermissionMiddleware("repo:owner")).Post("/spindle", rp.EditSpindle)
r.With(mw.RepoPermissionMiddleware("repo:owner")).Put("/label", rp.AddLabelDef)
r.With(mw.RepoPermissionMiddleware("repo:owner")).Delete("/label", rp.DeleteLabelDef)
+471
appview/repo/settings.go
···
+
package repo
+
+
import (
+
"encoding/json"
+
"fmt"
+
"net/http"
+
"slices"
+
"strings"
+
"time"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/appview/db"
+
"tangled.org/core/appview/models"
+
"tangled.org/core/appview/oauth"
+
"tangled.org/core/appview/pages"
+
xrpcclient "tangled.org/core/appview/xrpcclient"
+
"tangled.org/core/orm"
+
"tangled.org/core/types"
+
+
comatproto "github.com/bluesky-social/indigo/api/atproto"
+
lexutil "github.com/bluesky-social/indigo/lex/util"
+
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
+
)
+
+
type tab = map[string]any
+
+
var (
+
// would be great to have ordered maps right about now
+
settingsTabs []tab = []tab{
+
{"Name": "general", "Icon": "sliders-horizontal"},
+
{"Name": "access", "Icon": "users"},
+
{"Name": "pipelines", "Icon": "layers-2"},
+
}
+
)
+
+
func (rp *Repo) SetDefaultBranch(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "SetDefaultBranch")
+
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
return
+
}
+
+
noticeId := "operation-error"
+
branch := r.FormValue("branch")
+
if branch == "" {
+
http.Error(w, "malformed form", http.StatusBadRequest)
+
return
+
}
+
+
client, err := rp.oauth.ServiceClient(
+
r,
+
oauth.WithService(f.Knot),
+
oauth.WithLxm(tangled.RepoSetDefaultBranchNSID),
+
oauth.WithDev(rp.config.Core.Dev),
+
)
+
if err != nil {
+
l.Error("failed to connect to knot server", "err", err)
+
rp.pages.Notice(w, noticeId, "Failed to connect to knot server.")
+
return
+
}
+
+
xe := tangled.RepoSetDefaultBranch(
+
r.Context(),
+
client,
+
&tangled.RepoSetDefaultBranch_Input{
+
Repo: f.RepoAt().String(),
+
DefaultBranch: branch,
+
},
+
)
+
if err := xrpcclient.HandleXrpcErr(xe); err != nil {
+
l.Error("xrpc failed", "err", xe)
+
rp.pages.Notice(w, noticeId, err.Error())
+
return
+
}
+
+
rp.pages.HxRefresh(w)
+
}
+
+
func (rp *Repo) Secrets(w http.ResponseWriter, r *http.Request) {
+
user := rp.oauth.GetUser(r)
+
l := rp.logger.With("handler", "Secrets")
+
l = l.With("did", user.Did)
+
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
return
+
}
+
+
if f.Spindle == "" {
+
l.Error("empty spindle cannot add/rm secret", "err", err)
+
return
+
}
+
+
lxm := tangled.RepoAddSecretNSID
+
if r.Method == http.MethodDelete {
+
lxm = tangled.RepoRemoveSecretNSID
+
}
+
+
spindleClient, err := rp.oauth.ServiceClient(
+
r,
+
oauth.WithService(f.Spindle),
+
oauth.WithLxm(lxm),
+
oauth.WithExp(60),
+
oauth.WithDev(rp.config.Core.Dev),
+
)
+
if err != nil {
+
l.Error("failed to create spindle client", "err", err)
+
return
+
}
+
+
key := r.FormValue("key")
+
if key == "" {
+
w.WriteHeader(http.StatusBadRequest)
+
return
+
}
+
+
switch r.Method {
+
case http.MethodPut:
+
errorId := "add-secret-error"
+
+
value := r.FormValue("value")
+
if value == "" {
+
w.WriteHeader(http.StatusBadRequest)
+
return
+
}
+
+
err = tangled.RepoAddSecret(
+
r.Context(),
+
spindleClient,
+
&tangled.RepoAddSecret_Input{
+
Repo: f.RepoAt().String(),
+
Key: key,
+
Value: value,
+
},
+
)
+
if err != nil {
+
l.Error("Failed to add secret.", "err", err)
+
rp.pages.Notice(w, errorId, "Failed to add secret.")
+
return
+
}
+
+
case http.MethodDelete:
+
errorId := "operation-error"
+
+
err = tangled.RepoRemoveSecret(
+
r.Context(),
+
spindleClient,
+
&tangled.RepoRemoveSecret_Input{
+
Repo: f.RepoAt().String(),
+
Key: key,
+
},
+
)
+
if err != nil {
+
l.Error("Failed to delete secret.", "err", err)
+
rp.pages.Notice(w, errorId, "Failed to delete secret.")
+
return
+
}
+
}
+
+
rp.pages.HxRefresh(w)
+
}
+
+
func (rp *Repo) Settings(w http.ResponseWriter, r *http.Request) {
+
tabVal := r.URL.Query().Get("tab")
+
if tabVal == "" {
+
tabVal = "general"
+
}
+
+
switch tabVal {
+
case "general":
+
rp.generalSettings(w, r)
+
+
case "access":
+
rp.accessSettings(w, r)
+
+
case "pipelines":
+
rp.pipelineSettings(w, r)
+
}
+
}
+
+
func (rp *Repo) generalSettings(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "generalSettings")
+
+
f, err := rp.repoResolver.Resolve(r)
+
user := rp.oauth.GetUser(r)
+
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
+
xrpcBytes, err := tangled.RepoBranches(r.Context(), xrpcc, "", 0, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.branches", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
+
var result types.RepoBranchesResponse
+
if err := json.Unmarshal(xrpcBytes, &result); err != nil {
+
l.Error("failed to decode XRPC response", "err", err)
+
rp.pages.Error503(w)
+
return
+
}
+
+
defaultLabels, err := db.GetLabelDefinitions(rp.db, orm.FilterIn("at_uri", rp.config.Label.DefaultLabelDefs))
+
if err != nil {
+
l.Error("failed to fetch labels", "err", err)
+
rp.pages.Error503(w)
+
return
+
}
+
+
labels, err := db.GetLabelDefinitions(rp.db, orm.FilterIn("at_uri", f.Labels))
+
if err != nil {
+
l.Error("failed to fetch labels", "err", err)
+
rp.pages.Error503(w)
+
return
+
}
+
// remove default labels from the labels list, if present
+
defaultLabelMap := make(map[string]bool)
+
for _, dl := range defaultLabels {
+
defaultLabelMap[dl.AtUri().String()] = true
+
}
+
n := 0
+
for _, l := range labels {
+
if !defaultLabelMap[l.AtUri().String()] {
+
labels[n] = l
+
n++
+
}
+
}
+
labels = labels[:n]
+
+
subscribedLabels := make(map[string]struct{})
+
for _, l := range f.Labels {
+
subscribedLabels[l] = struct{}{}
+
}
+
+
// if there is atleast 1 unsubbed default label, show the "subscribe all" button,
+
// if all default labels are subbed, show the "unsubscribe all" button
+
shouldSubscribeAll := false
+
for _, dl := range defaultLabels {
+
if _, ok := subscribedLabels[dl.AtUri().String()]; !ok {
+
// one of the default labels is not subscribed to
+
shouldSubscribeAll = true
+
break
+
}
+
}
+
+
rp.pages.RepoGeneralSettings(w, pages.RepoGeneralSettingsParams{
+
LoggedInUser: user,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
Branches: result.Branches,
+
Labels: labels,
+
DefaultLabels: defaultLabels,
+
SubscribedLabels: subscribedLabels,
+
ShouldSubscribeAll: shouldSubscribeAll,
+
Tabs: settingsTabs,
+
Tab: "general",
+
})
+
}
+
+
func (rp *Repo) accessSettings(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "accessSettings")
+
+
f, err := rp.repoResolver.Resolve(r)
+
user := rp.oauth.GetUser(r)
+
+
collaborators, err := func(repo *models.Repo) ([]pages.Collaborator, error) {
+
repoCollaborators, err := rp.enforcer.E.GetImplicitUsersForResourceByDomain(repo.DidSlashRepo(), repo.Knot)
+
if err != nil {
+
return nil, err
+
}
+
var collaborators []pages.Collaborator
+
for _, item := range repoCollaborators {
+
// currently only two roles: owner and member
+
var role string
+
switch item[3] {
+
case "repo:owner":
+
role = "owner"
+
case "repo:collaborator":
+
role = "collaborator"
+
default:
+
continue
+
}
+
+
did := item[0]
+
+
c := pages.Collaborator{
+
Did: did,
+
Role: role,
+
}
+
collaborators = append(collaborators, c)
+
}
+
return collaborators, nil
+
}(f)
+
if err != nil {
+
l.Error("failed to get collaborators", "err", err)
+
}
+
+
rp.pages.RepoAccessSettings(w, pages.RepoAccessSettingsParams{
+
LoggedInUser: user,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
Tabs: settingsTabs,
+
Tab: "access",
+
Collaborators: collaborators,
+
})
+
}
+
+
func (rp *Repo) pipelineSettings(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "pipelineSettings")
+
+
f, err := rp.repoResolver.Resolve(r)
+
user := rp.oauth.GetUser(r)
+
+
// all spindles that the repo owner is a member of
+
spindles, err := rp.enforcer.GetSpindlesForUser(f.Did)
+
if err != nil {
+
l.Error("failed to fetch spindles", "err", err)
+
return
+
}
+
+
var secrets []*tangled.RepoListSecrets_Secret
+
if f.Spindle != "" {
+
if spindleClient, err := rp.oauth.ServiceClient(
+
r,
+
oauth.WithService(f.Spindle),
+
oauth.WithLxm(tangled.RepoListSecretsNSID),
+
oauth.WithExp(60),
+
oauth.WithDev(rp.config.Core.Dev),
+
); err != nil {
+
l.Error("failed to create spindle client", "err", err)
+
} else if resp, err := tangled.RepoListSecrets(r.Context(), spindleClient, f.RepoAt().String()); err != nil {
+
l.Error("failed to fetch secrets", "err", err)
+
} else {
+
secrets = resp.Secrets
+
}
+
}
+
+
slices.SortFunc(secrets, func(a, b *tangled.RepoListSecrets_Secret) int {
+
return strings.Compare(a.Key, b.Key)
+
})
+
+
var dids []string
+
for _, s := range secrets {
+
dids = append(dids, s.CreatedBy)
+
}
+
resolvedIdents := rp.idResolver.ResolveIdents(r.Context(), dids)
+
+
// convert to a more manageable form
+
var niceSecret []map[string]any
+
for id, s := range secrets {
+
when, _ := time.Parse(time.RFC3339, s.CreatedAt)
+
niceSecret = append(niceSecret, map[string]any{
+
"Id": id,
+
"Key": s.Key,
+
"CreatedAt": when,
+
"CreatedBy": resolvedIdents[id].Handle.String(),
+
})
+
}
+
+
rp.pages.RepoPipelineSettings(w, pages.RepoPipelineSettingsParams{
+
LoggedInUser: user,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
Tabs: settingsTabs,
+
Tab: "pipelines",
+
Spindles: spindles,
+
CurrentSpindle: f.Spindle,
+
Secrets: niceSecret,
+
})
+
}
+
+
func (rp *Repo) EditBaseSettings(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "EditBaseSettings")
+
+
noticeId := "repo-base-settings-error"
+
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
w.WriteHeader(http.StatusBadRequest)
+
return
+
}
+
+
client, err := rp.oauth.AuthorizedClient(r)
+
if err != nil {
+
l.Error("failed to get client")
+
rp.pages.Notice(w, noticeId, "Failed to update repository information, try again later.")
+
return
+
}
+
+
var (
+
description = r.FormValue("description")
+
website = r.FormValue("website")
+
topicStr = r.FormValue("topics")
+
)
+
+
err = rp.validator.ValidateURI(website)
+
if website != "" && err != nil {
+
l.Error("invalid uri", "err", err)
+
rp.pages.Notice(w, noticeId, err.Error())
+
return
+
}
+
+
topics, err := rp.validator.ValidateRepoTopicStr(topicStr)
+
if err != nil {
+
l.Error("invalid topics", "err", err)
+
rp.pages.Notice(w, noticeId, err.Error())
+
return
+
}
+
l.Debug("got", "topicsStr", topicStr, "topics", topics)
+
+
newRepo := *f
+
newRepo.Description = description
+
newRepo.Website = website
+
newRepo.Topics = topics
+
record := newRepo.AsRecord()
+
+
tx, err := rp.db.BeginTx(r.Context(), nil)
+
if err != nil {
+
l.Error("failed to begin transaction", "err", err)
+
rp.pages.Notice(w, noticeId, "Failed to save repository information.")
+
return
+
}
+
defer tx.Rollback()
+
+
err = db.PutRepo(tx, newRepo)
+
if err != nil {
+
l.Error("failed to update repository", "err", err)
+
rp.pages.Notice(w, noticeId, "Failed to save repository information.")
+
return
+
}
+
+
ex, err := comatproto.RepoGetRecord(r.Context(), client, "", tangled.RepoNSID, newRepo.Did, newRepo.Rkey)
+
if err != nil {
+
// failed to get record
+
l.Error("failed to get repo record", "err", err)
+
rp.pages.Notice(w, noticeId, "Failed to save repository information, no record found on PDS.")
+
return
+
}
+
_, err = comatproto.RepoPutRecord(r.Context(), client, &comatproto.RepoPutRecord_Input{
+
Collection: tangled.RepoNSID,
+
Repo: newRepo.Did,
+
Rkey: newRepo.Rkey,
+
SwapRecord: ex.Cid,
+
Record: &lexutil.LexiconTypeDecoder{
+
Val: &record,
+
},
+
})
+
+
if err != nil {
+
l.Error("failed to perferom update-repo query", "err", err)
+
// failed to get record
+
rp.pages.Notice(w, noticeId, "Failed to save repository information, unable to save to PDS.")
+
return
+
}
+
+
err = tx.Commit()
+
if err != nil {
+
l.Error("failed to commit", "err", err)
+
}
+
+
rp.pages.HxRefresh(w)
+
}
+80
appview/repo/tags.go
···
+
package repo
+
+
import (
+
"encoding/json"
+
"fmt"
+
"net/http"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/appview/db"
+
"tangled.org/core/appview/models"
+
"tangled.org/core/appview/pages"
+
xrpcclient "tangled.org/core/appview/xrpcclient"
+
"tangled.org/core/orm"
+
"tangled.org/core/types"
+
+
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
+
"github.com/go-git/go-git/v5/plumbing"
+
)
+
+
func (rp *Repo) Tags(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "RepoTags")
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to get repo and knot", "err", err)
+
return
+
}
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
+
xrpcBytes, err := tangled.RepoTags(r.Context(), xrpcc, "", 0, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.tags", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
var result types.RepoTagsResponse
+
if err := json.Unmarshal(xrpcBytes, &result); err != nil {
+
l.Error("failed to decode XRPC response", "err", err)
+
rp.pages.Error503(w)
+
return
+
}
+
artifacts, err := db.GetArtifact(rp.db, orm.FilterEq("repo_at", f.RepoAt()))
+
if err != nil {
+
l.Error("failed grab artifacts", "err", err)
+
return
+
}
+
// convert artifacts to map for easy UI building
+
artifactMap := make(map[plumbing.Hash][]models.Artifact)
+
for _, a := range artifacts {
+
artifactMap[a.Tag] = append(artifactMap[a.Tag], a)
+
}
+
var danglingArtifacts []models.Artifact
+
for _, a := range artifacts {
+
found := false
+
for _, t := range result.Tags {
+
if t.Tag != nil {
+
if t.Tag.Hash == a.Tag {
+
found = true
+
}
+
}
+
}
+
if !found {
+
danglingArtifacts = append(danglingArtifacts, a)
+
}
+
}
+
user := rp.oauth.GetUser(r)
+
rp.pages.RepoTags(w, pages.RepoTagsParams{
+
LoggedInUser: user,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
RepoTagsResponse: result,
+
ArtifactMap: artifactMap,
+
DanglingArtifacts: danglingArtifacts,
+
})
+
}
+108
appview/repo/tree.go
···
+
package repo
+
+
import (
+
"fmt"
+
"net/http"
+
"net/url"
+
"strings"
+
"time"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/appview/pages"
+
"tangled.org/core/appview/reporesolver"
+
xrpcclient "tangled.org/core/appview/xrpcclient"
+
"tangled.org/core/types"
+
+
indigoxrpc "github.com/bluesky-social/indigo/xrpc"
+
"github.com/go-chi/chi/v5"
+
"github.com/go-git/go-git/v5/plumbing"
+
)
+
+
func (rp *Repo) Tree(w http.ResponseWriter, r *http.Request) {
+
l := rp.logger.With("handler", "RepoTree")
+
f, err := rp.repoResolver.Resolve(r)
+
if err != nil {
+
l.Error("failed to fully resolve repo", "err", err)
+
return
+
}
+
ref := chi.URLParam(r, "ref")
+
ref, _ = url.PathUnescape(ref)
+
// if the tree path has a trailing slash, let's strip it
+
// so we don't 404
+
treePath := chi.URLParam(r, "*")
+
treePath, _ = url.PathUnescape(treePath)
+
treePath = strings.TrimSuffix(treePath, "/")
+
scheme := "http"
+
if !rp.config.Core.Dev {
+
scheme = "https"
+
}
+
host := fmt.Sprintf("%s://%s", scheme, f.Knot)
+
xrpcc := &indigoxrpc.Client{
+
Host: host,
+
}
+
repo := fmt.Sprintf("%s/%s", f.Did, f.Name)
+
xrpcResp, err := tangled.RepoTree(r.Context(), xrpcc, treePath, ref, repo)
+
if xrpcerr := xrpcclient.HandleXrpcErr(err); xrpcerr != nil {
+
l.Error("failed to call XRPC repo.tree", "err", xrpcerr)
+
rp.pages.Error503(w)
+
return
+
}
+
// Convert XRPC response to internal types.RepoTreeResponse
+
files := make([]types.NiceTree, len(xrpcResp.Files))
+
for i, xrpcFile := range xrpcResp.Files {
+
file := types.NiceTree{
+
Name: xrpcFile.Name,
+
Mode: xrpcFile.Mode,
+
Size: int64(xrpcFile.Size),
+
}
+
// Convert last commit info if present
+
if xrpcFile.Last_commit != nil {
+
commitWhen, _ := time.Parse(time.RFC3339, xrpcFile.Last_commit.When)
+
file.LastCommit = &types.LastCommitInfo{
+
Hash: plumbing.NewHash(xrpcFile.Last_commit.Hash),
+
Message: xrpcFile.Last_commit.Message,
+
When: commitWhen,
+
}
+
}
+
files[i] = file
+
}
+
result := types.RepoTreeResponse{
+
Ref: xrpcResp.Ref,
+
Files: files,
+
}
+
if xrpcResp.Parent != nil {
+
result.Parent = *xrpcResp.Parent
+
}
+
if xrpcResp.Dotdot != nil {
+
result.DotDot = *xrpcResp.Dotdot
+
}
+
if xrpcResp.Readme != nil {
+
result.ReadmeFileName = xrpcResp.Readme.Filename
+
result.Readme = xrpcResp.Readme.Contents
+
}
+
ownerSlashRepo := reporesolver.GetBaseRepoPath(r, f)
+
// redirects tree paths trying to access a blob; in this case the result.Files is unpopulated,
+
// so we can safely redirect to the "parent" (which is the same file).
+
if len(result.Files) == 0 && result.Parent == treePath {
+
redirectTo := fmt.Sprintf("/%s/blob/%s/%s", ownerSlashRepo, url.PathEscape(ref), result.Parent)
+
http.Redirect(w, r, redirectTo, http.StatusFound)
+
return
+
}
+
user := rp.oauth.GetUser(r)
+
var breadcrumbs [][]string
+
breadcrumbs = append(breadcrumbs, []string{f.Name, fmt.Sprintf("/%s/tree/%s", ownerSlashRepo, url.PathEscape(ref))})
+
if treePath != "" {
+
for idx, elem := range strings.Split(treePath, "/") {
+
breadcrumbs = append(breadcrumbs, []string{elem, fmt.Sprintf("%s/%s", breadcrumbs[idx][1], url.PathEscape(elem))})
+
}
+
}
+
sortFiles(result.Files)
+
+
rp.pages.RepoTree(w, pages.RepoTreeParams{
+
LoggedInUser: user,
+
BreadCrumbs: breadcrumbs,
+
TreePath: treePath,
+
RepoInfo: rp.repoResolver.GetRepoInfo(r, user),
+
RepoTreeResponse: result,
+
})
+
}
+76 -162
appview/reporesolver/resolver.go
···
package reporesolver
import (
-
"context"
-
"database/sql"
-
"errors"
"fmt"
"log"
"net/http"
···
"strings"
"github.com/bluesky-social/indigo/atproto/identity"
-
securejoin "github.com/cyphar/filepath-securejoin"
"github.com/go-chi/chi/v5"
"tangled.org/core/appview/config"
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/oauth"
-
"tangled.org/core/appview/pages"
"tangled.org/core/appview/pages/repoinfo"
-
"tangled.org/core/idresolver"
"tangled.org/core/rbac"
)
-
type ResolvedRepo struct {
-
models.Repo
-
OwnerId identity.Identity
-
CurrentDir string
-
Ref string
-
-
rr *RepoResolver
+
type RepoResolver struct {
+
config *config.Config
+
enforcer *rbac.Enforcer
+
execer db.Execer
}
-
type RepoResolver struct {
-
config *config.Config
-
enforcer *rbac.Enforcer
-
idResolver *idresolver.Resolver
-
execer db.Execer
+
func New(config *config.Config, enforcer *rbac.Enforcer, execer db.Execer) *RepoResolver {
+
return &RepoResolver{config: config, enforcer: enforcer, execer: execer}
}
-
func New(config *config.Config, enforcer *rbac.Enforcer, resolver *idresolver.Resolver, execer db.Execer) *RepoResolver {
-
return &RepoResolver{config: config, enforcer: enforcer, idResolver: resolver, execer: execer}
+
// NOTE: this... should not even be here. the entire package will be removed in future refactor
+
func GetBaseRepoPath(r *http.Request, repo *models.Repo) string {
+
var (
+
user = chi.URLParam(r, "user")
+
name = chi.URLParam(r, "repo")
+
)
+
if user == "" || name == "" {
+
return repo.DidSlashRepo()
+
}
+
return path.Join(user, name)
}
-
func (rr *RepoResolver) Resolve(r *http.Request) (*ResolvedRepo, error) {
+
// TODO: move this out of `RepoResolver` struct
+
func (rr *RepoResolver) Resolve(r *http.Request) (*models.Repo, error) {
repo, ok := r.Context().Value("repo").(*models.Repo)
if !ok {
log.Println("malformed middleware: `repo` not exist in context")
return nil, fmt.Errorf("malformed middleware")
}
-
id, ok := r.Context().Value("resolvedId").(identity.Identity)
-
if !ok {
-
log.Println("malformed middleware")
-
return nil, fmt.Errorf("malformed middleware")
-
}
-
currentDir := path.Dir(extractPathAfterRef(r.URL.EscapedPath()))
-
ref := chi.URLParam(r, "ref")
-
-
return &ResolvedRepo{
-
Repo: *repo,
-
OwnerId: id,
-
CurrentDir: currentDir,
-
Ref: ref,
-
-
rr: rr,
-
}, nil
+
return repo, nil
}
-
func (f *ResolvedRepo) OwnerDid() string {
-
return f.OwnerId.DID.String()
-
}
-
-
func (f *ResolvedRepo) OwnerHandle() string {
-
return f.OwnerId.Handle.String()
-
}
-
-
func (f *ResolvedRepo) OwnerSlashRepo() string {
-
handle := f.OwnerId.Handle
-
-
var p string
-
if handle != "" && !handle.IsInvalidHandle() {
-
p, _ = securejoin.SecureJoin(fmt.Sprintf("@%s", handle), f.Name)
-
} else {
-
p, _ = securejoin.SecureJoin(f.OwnerDid(), f.Name)
+
// 1. [x] replace `RepoInfo` to `reporesolver.GetRepoInfo(r *http.Request, repo, user)`
+
// 2. [x] remove `rr`, `CurrentDir`, `Ref` fields from `ResolvedRepo`
+
// 3. [x] remove `ResolvedRepo`
+
// 4. [ ] replace reporesolver to reposervice
+
func (rr *RepoResolver) GetRepoInfo(r *http.Request, user *oauth.User) repoinfo.RepoInfo {
+
ownerId, ook := r.Context().Value("resolvedId").(identity.Identity)
+
repo, rok := r.Context().Value("repo").(*models.Repo)
+
if !ook || !rok {
+
log.Println("malformed request, failed to get repo from context")
}
-
return p
-
}
+
// get dir/ref
+
currentDir := path.Dir(extractPathAfterRef(r.URL.EscapedPath()))
+
ref := chi.URLParam(r, "ref")
-
func (f *ResolvedRepo) Collaborators(ctx context.Context) ([]pages.Collaborator, error) {
-
repoCollaborators, err := f.rr.enforcer.E.GetImplicitUsersForResourceByDomain(f.DidSlashRepo(), f.Knot)
-
if err != nil {
-
return nil, err
+
repoAt := repo.RepoAt()
+
isStarred := false
+
roles := repoinfo.RolesInRepo{}
+
if user != nil {
+
isStarred = db.GetStarStatus(rr.execer, user.Did, repoAt)
+
roles.Roles = rr.enforcer.GetPermissionsInRepo(user.Did, repo.Knot, repo.DidSlashRepo())
}
-
var collaborators []pages.Collaborator
-
for _, item := range repoCollaborators {
-
// currently only two roles: owner and member
-
var role string
-
switch item[3] {
-
case "repo:owner":
-
role = "owner"
-
case "repo:collaborator":
-
role = "collaborator"
-
default:
-
continue
+
stats := repo.RepoStats
+
if stats == nil {
+
starCount, err := db.GetStarCount(rr.execer, repoAt)
+
if err != nil {
+
log.Println("failed to get star count for ", repoAt)
}
-
-
did := item[0]
-
-
c := pages.Collaborator{
-
Did: did,
-
Handle: "",
-
Role: role,
+
issueCount, err := db.GetIssueCount(rr.execer, repoAt)
+
if err != nil {
+
log.Println("failed to get issue count for ", repoAt)
}
-
collaborators = append(collaborators, c)
-
}
-
-
// populate all collborators with handles
-
identsToResolve := make([]string, len(collaborators))
-
for i, collab := range collaborators {
-
identsToResolve[i] = collab.Did
-
}
-
-
resolvedIdents := f.rr.idResolver.ResolveIdents(ctx, identsToResolve)
-
for i, resolved := range resolvedIdents {
-
if resolved != nil {
-
collaborators[i].Handle = resolved.Handle.String()
+
pullCount, err := db.GetPullCount(rr.execer, repoAt)
+
if err != nil {
+
log.Println("failed to get pull count for ", repoAt)
}
-
}
-
-
return collaborators, nil
-
}
-
-
// this function is a bit weird since it now returns RepoInfo from an entirely different
-
// package. we should refactor this or get rid of RepoInfo entirely.
-
func (f *ResolvedRepo) RepoInfo(user *oauth.User) repoinfo.RepoInfo {
-
repoAt := f.RepoAt()
-
isStarred := false
-
if user != nil {
-
isStarred = db.GetStarStatus(f.rr.execer, user.Did, repoAt)
-
}
-
-
starCount, err := db.GetStarCount(f.rr.execer, repoAt)
-
if err != nil {
-
log.Println("failed to get star count for ", repoAt)
-
}
-
issueCount, err := db.GetIssueCount(f.rr.execer, repoAt)
-
if err != nil {
-
log.Println("failed to get issue count for ", repoAt)
-
}
-
pullCount, err := db.GetPullCount(f.rr.execer, repoAt)
-
if err != nil {
-
log.Println("failed to get issue count for ", repoAt)
-
}
-
source, err := db.GetRepoSource(f.rr.execer, repoAt)
-
if errors.Is(err, sql.ErrNoRows) {
-
source = ""
-
} else if err != nil {
-
log.Println("failed to get repo source for ", repoAt, err)
+
stats = &models.RepoStats{
+
StarCount: starCount,
+
IssueCount: issueCount,
+
PullCount: pullCount,
+
}
}
var sourceRepo *models.Repo
-
if source != "" {
-
sourceRepo, err = db.GetRepoByAtUri(f.rr.execer, source)
+
var err error
+
if repo.Source != "" {
+
sourceRepo, err = db.GetRepoByAtUri(rr.execer, repo.Source)
if err != nil {
log.Println("failed to get repo by at uri", err)
}
}
-
var sourceHandle *identity.Identity
-
if sourceRepo != nil {
-
sourceHandle, err = f.rr.idResolver.ResolveIdent(context.Background(), sourceRepo.Did)
-
if err != nil {
-
log.Println("failed to resolve source repo", err)
-
}
-
}
+
repoInfo := repoinfo.RepoInfo{
+
// this is basically a models.Repo
+
OwnerDid: ownerId.DID.String(),
+
OwnerHandle: ownerId.Handle.String(),
+
Name: repo.Name,
+
Rkey: repo.Rkey,
+
Description: repo.Description,
+
Website: repo.Website,
+
Topics: repo.Topics,
+
Knot: repo.Knot,
+
Spindle: repo.Spindle,
+
Stats: *stats,
-
knot := f.Knot
+
// fork repo upstream
+
Source: sourceRepo,
-
repoInfo := repoinfo.RepoInfo{
-
OwnerDid: f.OwnerDid(),
-
OwnerHandle: f.OwnerHandle(),
-
Name: f.Name,
-
Rkey: f.Repo.Rkey,
-
RepoAt: repoAt,
-
Description: f.Description,
-
IsStarred: isStarred,
-
Knot: knot,
-
Spindle: f.Spindle,
-
Roles: f.RolesInRepo(user),
-
Stats: models.RepoStats{
-
StarCount: starCount,
-
IssueCount: issueCount,
-
PullCount: pullCount,
-
},
-
CurrentDir: f.CurrentDir,
-
Ref: f.Ref,
-
}
+
// page context
+
CurrentDir: currentDir,
+
Ref: ref,
-
if sourceRepo != nil {
-
repoInfo.Source = sourceRepo
-
repoInfo.SourceHandle = sourceHandle.Handle.String()
+
// info related to the session
+
IsStarred: isStarred,
+
Roles: roles,
}
return repoInfo
-
}
-
-
func (f *ResolvedRepo) RolesInRepo(u *oauth.User) repoinfo.RolesInRepo {
-
if u != nil {
-
r := f.rr.enforcer.GetPermissionsInRepo(u.Did, f.Knot, f.DidSlashRepo())
-
return repoinfo.RolesInRepo{Roles: r}
-
} else {
-
return repoinfo.RolesInRepo{}
-
}
}
// extractPathAfterRef gets the actual repository path
+5 -4
appview/serververify/verify.go
···
"tangled.org/core/api/tangled"
"tangled.org/core/appview/db"
"tangled.org/core/appview/xrpcclient"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
)
···
// mark this spindle as verified in the db
rowId, err := db.VerifySpindle(
tx,
-
db.FilterEq("owner", owner),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("owner", owner),
+
orm.FilterEq("instance", instance),
)
if err != nil {
return 0, fmt.Errorf("failed to write to DB: %w", err)
···
// mark as registered
err = db.MarkRegistered(
tx,
-
db.FilterEq("did", owner),
-
db.FilterEq("domain", domain),
+
orm.FilterEq("did", owner),
+
orm.FilterEq("domain", domain),
)
if err != nil {
return fmt.Errorf("failed to register domain: %w", err)
+3
appview/settings/settings.go
···
{"Name": "keys", "Icon": "key"},
{"Name": "emails", "Icon": "mail"},
{"Name": "notifications", "Icon": "bell"},
+
{"Name": "knots", "Icon": "volleyball"},
+
{"Name": "spindles", "Icon": "spool"},
}
)
···
PullCommented: r.FormValue("pull_commented") == "on",
PullMerged: r.FormValue("pull_merged") == "on",
Followed: r.FormValue("followed") == "on",
+
UserMentioned: r.FormValue("user_mentioned") == "on",
EmailNotifications: r.FormValue("email_notifications") == "on",
}
+53 -26
appview/spindles/spindles.go
···
"log/slog"
"net/http"
"slices"
+
"strings"
"time"
"github.com/go-chi/chi/v5"
···
"tangled.org/core/appview/serververify"
"tangled.org/core/appview/xrpcclient"
"tangled.org/core/idresolver"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
"tangled.org/core/tid"
···
Logger *slog.Logger
}
+
type tab = map[string]any
+
+
var (
+
spindlesTabs []tab = []tab{
+
{"Name": "profile", "Icon": "user"},
+
{"Name": "keys", "Icon": "key"},
+
{"Name": "emails", "Icon": "mail"},
+
{"Name": "notifications", "Icon": "bell"},
+
{"Name": "knots", "Icon": "volleyball"},
+
{"Name": "spindles", "Icon": "spool"},
+
}
+
)
+
func (s *Spindles) Router() http.Handler {
r := chi.NewRouter()
···
user := s.OAuth.GetUser(r)
all, err := db.GetSpindles(
s.Db,
-
db.FilterEq("owner", user.Did),
+
orm.FilterEq("owner", user.Did),
)
if err != nil {
s.Logger.Error("failed to fetch spindles", "err", err)
···
s.Pages.Spindles(w, pages.SpindlesParams{
LoggedInUser: user,
Spindles: all,
+
Tabs: spindlesTabs,
+
Tab: "spindles",
})
}
···
spindles, err := db.GetSpindles(
s.Db,
-
db.FilterEq("instance", instance),
-
db.FilterEq("owner", user.Did),
-
db.FilterIsNot("verified", "null"),
+
orm.FilterEq("instance", instance),
+
orm.FilterEq("owner", user.Did),
+
orm.FilterIsNot("verified", "null"),
)
if err != nil || len(spindles) != 1 {
l.Error("failed to get spindle", "err", err, "len(spindles)", len(spindles))
···
repos, err := db.GetRepos(
s.Db,
0,
-
db.FilterEq("spindle", instance),
+
orm.FilterEq("spindle", instance),
)
if err != nil {
l.Error("failed to get spindle repos", "err", err)
···
Spindle: spindle,
Members: members,
Repos: repoMap,
+
Tabs: spindlesTabs,
+
Tab: "spindles",
})
}
···
}
instance := r.FormValue("instance")
+
// Strip protocol, trailing slashes, and whitespace
+
// Rkey cannot contain slashes
+
instance = strings.TrimSpace(instance)
+
instance = strings.TrimPrefix(instance, "https://")
+
instance = strings.TrimPrefix(instance, "http://")
+
instance = strings.TrimSuffix(instance, "/")
if instance == "" {
s.Pages.Notice(w, noticeId, "Incomplete form.")
return
···
spindles, err := db.GetSpindles(
s.Db,
-
db.FilterEq("owner", user.Did),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("owner", user.Did),
+
orm.FilterEq("instance", instance),
)
if err != nil || len(spindles) != 1 {
l.Error("failed to retrieve instance", "err", err, "len(spindles)", len(spindles))
···
// remove spindle members first
err = db.RemoveSpindleMember(
tx,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("instance", instance),
)
if err != nil {
l.Error("failed to remove spindle members", "err", err)
···
err = db.DeleteSpindle(
tx,
-
db.FilterEq("owner", user.Did),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("owner", user.Did),
+
orm.FilterEq("instance", instance),
)
if err != nil {
l.Error("failed to delete spindle", "err", err)
···
shouldRedirect := r.Header.Get("shouldRedirect")
if shouldRedirect == "true" {
-
s.Pages.HxRedirect(w, "/spindles")
+
s.Pages.HxRedirect(w, "/settings/spindles")
return
}
···
spindles, err := db.GetSpindles(
s.Db,
-
db.FilterEq("owner", user.Did),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("owner", user.Did),
+
orm.FilterEq("instance", instance),
)
if err != nil || len(spindles) != 1 {
l.Error("failed to retrieve instance", "err", err, "len(spindles)", len(spindles))
···
verifiedSpindle, err := db.GetSpindles(
s.Db,
-
db.FilterEq("id", rowId),
+
orm.FilterEq("id", rowId),
)
if err != nil || len(verifiedSpindle) != 1 {
l.Error("failed get new spindle", "err", err)
···
spindles, err := db.GetSpindles(
s.Db,
-
db.FilterEq("owner", user.Did),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("owner", user.Did),
+
orm.FilterEq("instance", instance),
)
if err != nil || len(spindles) != 1 {
l.Error("failed to retrieve instance", "err", err, "len(spindles)", len(spindles))
···
}
member := r.FormValue("member")
+
member = strings.TrimPrefix(member, "@")
if member == "" {
l.Error("empty member")
s.Pages.Notice(w, noticeId, "Failed to add member, empty form.")
···
}
// success
-
s.Pages.HxRedirect(w, fmt.Sprintf("/spindles/%s", instance))
+
s.Pages.HxRedirect(w, fmt.Sprintf("/settings/spindles/%s", instance))
}
func (s *Spindles) removeMember(w http.ResponseWriter, r *http.Request) {
···
spindles, err := db.GetSpindles(
s.Db,
-
db.FilterEq("owner", user.Did),
-
db.FilterEq("instance", instance),
+
orm.FilterEq("owner", user.Did),
+
orm.FilterEq("instance", instance),
)
if err != nil || len(spindles) != 1 {
l.Error("failed to retrieve instance", "err", err, "len(spindles)", len(spindles))
···
}
member := r.FormValue("member")
+
member = strings.TrimPrefix(member, "@")
if member == "" {
l.Error("empty member")
s.Pages.Notice(w, noticeId, "Failed to remove member, empty form.")
···
// get the record from the DB first:
members, err := db.GetSpindleMembers(
s.Db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("instance", instance),
-
db.FilterEq("subject", memberId.DID),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("instance", instance),
+
orm.FilterEq("subject", memberId.DID),
)
if err != nil || len(members) != 1 {
l.Error("failed to get member", "err", err)
···
// remove from db
if err = db.RemoveSpindleMember(
tx,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("instance", instance),
-
db.FilterEq("subject", memberId.DID),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("instance", instance),
+
orm.FilterEq("subject", memberId.DID),
); err != nil {
l.Error("failed to remove spindle member", "err", err)
fail()
+1
appview/state/follow.go
···
subjectIdent, err := s.idResolver.ResolveIdent(r.Context(), subject)
if err != nil {
log.Println("failed to follow, invalid did")
+
return
}
if currentUser.Did == subjectIdent.DID.String() {
+16 -12
appview/state/gfi.go
···
package state
import (
-
"fmt"
"log"
"net/http"
"sort"
"github.com/bluesky-social/indigo/atproto/syntax"
-
"tangled.org/core/api/tangled"
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/pages"
"tangled.org/core/appview/pagination"
"tangled.org/core/consts"
+
"tangled.org/core/orm"
)
func (s *State) GoodFirstIssues(w http.ResponseWriter, r *http.Request) {
user := s.oauth.GetUser(r)
-
page, ok := r.Context().Value("page").(pagination.Page)
-
if !ok {
-
page = pagination.FirstPage()
-
}
+
page := pagination.FromContext(r.Context())
-
goodFirstIssueLabel := fmt.Sprintf("at://%s/%s/%s", consts.TangledDid, tangled.LabelDefinitionNSID, "good-first-issue")
+
goodFirstIssueLabel := s.config.Label.GoodFirstIssue
-
repoLabels, err := db.GetRepoLabels(s.db, db.FilterEq("label_at", goodFirstIssueLabel))
+
gfiLabelDef, err := db.GetLabelDefinition(s.db, orm.FilterEq("at_uri", goodFirstIssueLabel))
+
if err != nil {
+
log.Println("failed to get gfi label def", err)
+
s.pages.Error500(w)
+
return
+
}
+
+
repoLabels, err := db.GetRepoLabels(s.db, orm.FilterEq("label_at", goodFirstIssueLabel))
if err != nil {
log.Println("failed to get repo labels", err)
s.pages.Error503(w)
···
RepoGroups: []*models.RepoGroup{},
LabelDefs: make(map[string]*models.LabelDefinition),
Page: page,
+
GfiLabel: gfiLabelDef,
})
return
}
···
pagination.Page{
Limit: 500,
},
-
db.FilterIn("repo_at", repoUris),
-
db.FilterEq("open", 1),
+
orm.FilterIn("repo_at", repoUris),
+
orm.FilterEq("open", 1),
)
if err != nil {
log.Println("failed to get issues", err)
···
}
if len(uriList) > 0 {
-
allLabelDefs, err = db.GetLabelDefinitions(s.db, db.FilterIn("at_uri", uriList))
+
allLabelDefs, err = db.GetLabelDefinitions(s.db, orm.FilterIn("at_uri", uriList))
if err != nil {
log.Println("failed to fetch labels", err)
}
···
RepoGroups: paginatedGroups,
LabelDefs: labelDefsMap,
Page: page,
-
GfiLabel: labelDefsMap[goodFirstIssueLabel],
+
GfiLabel: gfiLabelDef,
})
}
+6 -5
appview/state/knotstream.go
···
ec "tangled.org/core/eventconsumer"
"tangled.org/core/eventconsumer/cursor"
"tangled.org/core/log"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
"tangled.org/core/workflow"
···
knots, err := db.GetRegistrations(
d,
-
db.FilterIsNot("registered", "null"),
+
orm.FilterIsNot("registered", "null"),
)
if err != nil {
return nil, err
···
repos, err := db.GetRepos(
d,
0,
-
db.FilterEq("did", record.RepoDid),
-
db.FilterEq("name", record.RepoName),
+
orm.FilterEq("did", record.RepoDid),
+
orm.FilterEq("name", record.RepoName),
)
if err != nil {
return fmt.Errorf("failed to look for repo in DB (%s/%s): %w", record.RepoDid, record.RepoName, err)
···
repos, err := db.GetRepos(
d,
0,
-
db.FilterEq("did", record.TriggerMetadata.Repo.Did),
-
db.FilterEq("name", record.TriggerMetadata.Repo.Repo),
+
orm.FilterEq("did", record.TriggerMetadata.Repo.Did),
+
orm.FilterEq("name", record.TriggerMetadata.Repo.Repo),
)
if err != nil {
return fmt.Errorf("failed to look for repo in DB: nsid %s, rkey %s, %w", msg.Nsid, msg.Rkey, err)
+3
appview/state/login.go
···
switch r.Method {
case http.MethodGet:
returnURL := r.URL.Query().Get("return_url")
+
errorCode := r.URL.Query().Get("error")
s.pages.Login(w, pages.LoginParams{
ReturnUrl: returnURL,
+
ErrorCode: errorCode,
})
case http.MethodPost:
handle := r.FormValue("handle")
···
redirectURL, err := s.oauth.ClientApp.StartAuthFlow(r.Context(), handle)
if err != nil {
+
l.Error("failed to start auth", "err", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
+30 -21
appview/state/profile.go
···
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
"tangled.org/core/appview/pages"
+
"tangled.org/core/orm"
)
func (s *State) Profile(w http.ResponseWriter, r *http.Request) {
···
return nil, fmt.Errorf("failed to get profile: %w", err)
}
-
repoCount, err := db.CountRepos(s.db, db.FilterEq("did", did))
+
repoCount, err := db.CountRepos(s.db, orm.FilterEq("did", did))
if err != nil {
return nil, fmt.Errorf("failed to get repo count: %w", err)
}
-
stringCount, err := db.CountStrings(s.db, db.FilterEq("did", did))
+
stringCount, err := db.CountStrings(s.db, orm.FilterEq("did", did))
if err != nil {
return nil, fmt.Errorf("failed to get string count: %w", err)
}
-
starredCount, err := db.CountStars(s.db, db.FilterEq("starred_by_did", did))
+
starredCount, err := db.CountStars(s.db, orm.FilterEq("did", did))
if err != nil {
return nil, fmt.Errorf("failed to get starred repo count: %w", err)
}
···
startOfYear := time.Date(now.Year(), 1, 1, 0, 0, 0, 0, time.UTC)
punchcard, err := db.MakePunchcard(
s.db,
-
db.FilterEq("did", did),
-
db.FilterGte("date", startOfYear.Format(time.DateOnly)),
-
db.FilterLte("date", now.Format(time.DateOnly)),
+
orm.FilterEq("did", did),
+
orm.FilterGte("date", startOfYear.Format(time.DateOnly)),
+
orm.FilterLte("date", now.Format(time.DateOnly)),
)
if err != nil {
return nil, fmt.Errorf("failed to get punchcard for %s: %w", did, err)
···
return &pages.ProfileCard{
UserDid: did,
-
UserHandle: ident.Handle.String(),
Profile: profile,
FollowStatus: followStatus,
Stats: pages.ProfileStats{
···
s.pages.Error500(w)
return
}
-
l = l.With("profileDid", profile.UserDid, "profileHandle", profile.UserHandle)
+
l = l.With("profileDid", profile.UserDid)
repos, err := db.GetRepos(
s.db,
0,
-
db.FilterEq("did", profile.UserDid),
+
orm.FilterEq("did", profile.UserDid),
)
if err != nil {
l.Error("failed to fetch repos", "err", err)
···
l.Error("failed to create timeline", "err", err)
}
+
// populate commit counts in the timeline, using the punchcard
+
currentMonth := time.Now().Month()
+
for _, p := range profile.Punchcard.Punches {
+
idx := currentMonth - p.Date.Month()
+
if int(idx) < len(timeline.ByMonth) {
+
timeline.ByMonth[idx].Commits += p.Count
+
}
+
}
+
s.pages.ProfileOverview(w, pages.ProfileOverviewParams{
LoggedInUser: s.oauth.GetUser(r),
Card: profile,
···
s.pages.Error500(w)
return
}
-
l = l.With("profileDid", profile.UserDid, "profileHandle", profile.UserHandle)
+
l = l.With("profileDid", profile.UserDid)
repos, err := db.GetRepos(
s.db,
0,
-
db.FilterEq("did", profile.UserDid),
+
orm.FilterEq("did", profile.UserDid),
)
if err != nil {
l.Error("failed to get repos", "err", err)
···
s.pages.Error500(w)
return
}
-
l = l.With("profileDid", profile.UserDid, "profileHandle", profile.UserHandle)
+
l = l.With("profileDid", profile.UserDid)
-
stars, err := db.GetStars(s.db, 0, db.FilterEq("starred_by_did", profile.UserDid))
+
stars, err := db.GetRepoStars(s.db, 0, orm.FilterEq("did", profile.UserDid))
if err != nil {
l.Error("failed to get stars", "err", err)
s.pages.Error500(w)
···
}
var repos []models.Repo
for _, s := range stars {
-
if s.Repo != nil {
-
repos = append(repos, *s.Repo)
-
}
+
repos = append(repos, *s.Repo)
}
err = s.pages.ProfileStarred(w, pages.ProfileStarredParams{
···
s.pages.Error500(w)
return
}
-
l = l.With("profileDid", profile.UserDid, "profileHandle", profile.UserHandle)
+
l = l.With("profileDid", profile.UserDid)
-
strings, err := db.GetStrings(s.db, 0, db.FilterEq("did", profile.UserDid))
+
strings, err := db.GetStrings(s.db, 0, orm.FilterEq("did", profile.UserDid))
if err != nil {
l.Error("failed to get strings", "err", err)
s.pages.Error500(w)
···
if err != nil {
return nil, err
}
-
l = l.With("profileDid", profile.UserDid, "profileHandle", profile.UserHandle)
+
l = l.With("profileDid", profile.UserDid)
loggedInUser := s.oauth.GetUser(r)
params := FollowsPageParams{
···
followDids = append(followDids, extractDid(follow))
}
-
profiles, err := db.GetProfiles(s.db, db.FilterIn("did", followDids))
+
profiles, err := db.GetProfiles(s.db, orm.FilterIn("did", followDids))
if err != nil {
l.Error("failed to get profiles", "followDids", followDids, "err", err)
return &params, err
···
profile.Description = r.FormValue("description")
profile.IncludeBluesky = r.FormValue("includeBluesky") == "on"
profile.Location = r.FormValue("location")
+
profile.Pronouns = r.FormValue("pronouns")
var links [5]string
for i := range 5 {
···
Location: &profile.Location,
PinnedRepositories: pinnedRepoStrings,
Stats: vanityStats[:],
+
Pronouns: &profile.Pronouns,
}},
SwapRecord: cid,
})
···
log.Printf("getting profile data for %s: %s", user.Did, err)
}
-
repos, err := db.GetRepos(s.db, 0, db.FilterEq("did", user.Did))
+
repos, err := db.GetRepos(s.db, 0, orm.FilterEq("did", user.Did))
if err != nil {
log.Printf("getting repos for %s: %s", user.Did, err)
}
+51 -39
appview/state/router.go
···
router.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
pat := chi.URLParam(r, "*")
-
if strings.HasPrefix(pat, "did:") || strings.HasPrefix(pat, "@") {
-
userRouter.ServeHTTP(w, r)
-
} else {
-
// Check if the first path element is a valid handle without '@' or a flattened DID
-
pathParts := strings.SplitN(pat, "/", 2)
-
if len(pathParts) > 0 {
-
if userutil.IsHandleNoAt(pathParts[0]) {
-
// Redirect to the same path but with '@' prefixed to the handle
-
redirectPath := "@" + pat
-
http.Redirect(w, r, "/"+redirectPath, http.StatusFound)
-
return
-
} else if userutil.IsFlattenedDid(pathParts[0]) {
-
// Redirect to the unflattened DID version
-
unflattenedDid := userutil.UnflattenDid(pathParts[0])
-
var redirectPath string
-
if len(pathParts) > 1 {
-
redirectPath = unflattenedDid + "/" + pathParts[1]
-
} else {
-
redirectPath = unflattenedDid
-
}
-
http.Redirect(w, r, "/"+redirectPath, http.StatusFound)
-
return
-
}
+
pathParts := strings.SplitN(pat, "/", 2)
+
+
if len(pathParts) > 0 {
+
firstPart := pathParts[0]
+
+
// if using a DID or handle, just continue as per usual
+
if userutil.IsDid(firstPart) || userutil.IsHandle(firstPart) {
+
userRouter.ServeHTTP(w, r)
+
return
+
}
+
+
// if using a flattened DID (like you would in go modules), unflatten
+
if userutil.IsFlattenedDid(firstPart) {
+
unflattenedDid := userutil.UnflattenDid(firstPart)
+
redirectPath := strings.Join(append([]string{unflattenedDid}, pathParts[1:]...), "/")
+
+
redirectURL := *r.URL
+
redirectURL.Path = "/" + redirectPath
+
+
http.Redirect(w, r, redirectURL.String(), http.StatusFound)
+
return
}
-
standardRouter.ServeHTTP(w, r)
+
+
// if using a handle with @, rewrite to work without @
+
if normalized := strings.TrimPrefix(firstPart, "@"); userutil.IsHandle(normalized) {
+
redirectPath := strings.Join(append([]string{normalized}, pathParts[1:]...), "/")
+
+
redirectURL := *r.URL
+
redirectURL.Path = "/" + redirectPath
+
+
http.Redirect(w, r, redirectURL.String(), http.StatusFound)
+
return
+
}
+
}
+
+
standardRouter.ServeHTTP(w, r)
})
return router
···
r.Get("/", s.Profile)
r.Get("/feed.atom", s.AtomFeedPage)
-
// redirect /@handle/repo.git -> /@handle/repo
-
r.Get("/{repo}.git", func(w http.ResponseWriter, r *http.Request) {
-
nonDotGitPath := strings.TrimSuffix(r.URL.Path, ".git")
-
http.Redirect(w, r, nonDotGitPath, http.StatusMovedPermanently)
-
})
-
r.With(mw.ResolveRepo()).Route("/{repo}", func(r chi.Router) {
r.Use(mw.GoImport())
r.Mount("/", s.RepoRouter(mw))
r.Mount("/issues", s.IssuesRouter(mw))
r.Mount("/pulls", s.PullsRouter(mw))
-
r.Mount("/pipelines", s.PipelinesRouter(mw))
-
r.Mount("/labels", s.LabelsRouter(mw))
+
r.Mount("/pipelines", s.PipelinesRouter())
+
r.Mount("/labels", s.LabelsRouter())
// These routes get proxied to the knot
r.Get("/info/refs", s.InfoRefs)
···
// r.Post("/import", s.ImportRepo)
})
-
r.Get("/goodfirstissues", s.GoodFirstIssues)
+
r.With(middleware.Paginate).Get("/goodfirstissues", s.GoodFirstIssues)
r.With(middleware.AuthMiddleware(s.oauth)).Route("/follow", func(r chi.Router) {
r.Post("/", s.Follow)
···
r.Mount("/settings", s.SettingsRouter())
r.Mount("/strings", s.StringsRouter(mw))
-
r.Mount("/knots", s.KnotsRouter())
-
r.Mount("/spindles", s.SpindlesRouter())
+
+
r.Mount("/settings/knots", s.KnotsRouter())
+
r.Mount("/settings/spindles", s.SpindlesRouter())
+
r.Mount("/notifications", s.NotificationsRouter(mw))
r.Mount("/signup", s.SignupRouter())
···
issues := issues.New(
s.oauth,
s.repoResolver,
+
s.enforcer,
s.pages,
s.idResolver,
+
s.mentionsResolver,
s.db,
s.config,
s.notifier,
s.validator,
+
s.indexer.Issues,
log.SubLogger(s.logger, "issues"),
)
return issues.Router(mw)
···
s.repoResolver,
s.pages,
s.idResolver,
+
s.mentionsResolver,
s.db,
s.config,
s.notifier,
s.enforcer,
s.validator,
+
s.indexer.Pulls,
log.SubLogger(s.logger, "pulls"),
)
return pulls.Router(mw)
···
return repo.Router(mw)
}
-
func (s *State) PipelinesRouter(mw *middleware.Middleware) http.Handler {
+
func (s *State) PipelinesRouter() http.Handler {
pipes := pipelines.New(
s.oauth,
s.repoResolver,
···
s.enforcer,
log.SubLogger(s.logger, "pipelines"),
)
-
return pipes.Router(mw)
+
return pipes.Router()
}
-
func (s *State) LabelsRouter(mw *middleware.Middleware) http.Handler {
+
func (s *State) LabelsRouter() http.Handler {
ls := labels.New(
s.oauth,
s.pages,
···
s.enforcer,
log.SubLogger(s.logger, "labels"),
)
-
return ls.Router(mw)
+
return ls.Router()
}
func (s *State) NotificationsRouter(mw *middleware.Middleware) http.Handler {
+2 -1
appview/state/spindlestream.go
···
ec "tangled.org/core/eventconsumer"
"tangled.org/core/eventconsumer/cursor"
"tangled.org/core/log"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
spindle "tangled.org/core/spindle/models"
)
···
spindles, err := db.GetSpindles(
d,
-
db.FilterIsNot("verified", "null"),
+
orm.FilterIsNot("verified", "null"),
)
if err != nil {
return nil, err
+9 -13
appview/state/star.go
···
log.Println("created atproto record: ", resp.Uri)
star := &models.Star{
-
StarredByDid: currentUser.Did,
-
RepoAt: subjectUri,
-
Rkey: rkey,
+
Did: currentUser.Did,
+
RepoAt: subjectUri,
+
Rkey: rkey,
}
err = db.AddStar(s.db, star)
···
s.notifier.NewStar(r.Context(), star)
-
s.pages.RepoStarFragment(w, pages.RepoStarFragmentParams{
+
s.pages.StarBtnFragment(w, pages.StarBtnFragmentParams{
IsStarred: true,
-
RepoAt: subjectUri,
-
Stats: models.RepoStats{
-
StarCount: starCount,
-
},
+
SubjectAt: subjectUri,
+
StarCount: starCount,
})
return
···
s.notifier.DeleteStar(r.Context(), star)
-
s.pages.RepoStarFragment(w, pages.RepoStarFragmentParams{
+
s.pages.StarBtnFragment(w, pages.StarBtnFragmentParams{
IsStarred: false,
-
RepoAt: subjectUri,
-
Stats: models.RepoStats{
-
StarCount: starCount,
-
},
+
SubjectAt: subjectUri,
+
StarCount: starCount,
})
return
+50 -34
appview/state/state.go
···
"tangled.org/core/appview"
"tangled.org/core/appview/config"
"tangled.org/core/appview/db"
+
"tangled.org/core/appview/indexer"
+
"tangled.org/core/appview/mentions"
"tangled.org/core/appview/models"
"tangled.org/core/appview/notify"
dbnotify "tangled.org/core/appview/notify/db"
···
"tangled.org/core/jetstream"
"tangled.org/core/log"
tlog "tangled.org/core/log"
+
"tangled.org/core/orm"
"tangled.org/core/rbac"
"tangled.org/core/tid"
···
)
type State struct {
-
db *db.DB
-
notifier notify.Notifier
-
oauth *oauth.OAuth
-
enforcer *rbac.Enforcer
-
pages *pages.Pages
-
idResolver *idresolver.Resolver
-
posthog posthog.Client
-
jc *jetstream.JetstreamClient
-
config *config.Config
-
repoResolver *reporesolver.RepoResolver
-
knotstream *eventconsumer.Consumer
-
spindlestream *eventconsumer.Consumer
-
logger *slog.Logger
-
validator *validator.Validator
+
db *db.DB
+
notifier notify.Notifier
+
indexer *indexer.Indexer
+
oauth *oauth.OAuth
+
enforcer *rbac.Enforcer
+
pages *pages.Pages
+
idResolver *idresolver.Resolver
+
mentionsResolver *mentions.Resolver
+
posthog posthog.Client
+
jc *jetstream.JetstreamClient
+
config *config.Config
+
repoResolver *reporesolver.RepoResolver
+
knotstream *eventconsumer.Consumer
+
spindlestream *eventconsumer.Consumer
+
logger *slog.Logger
+
validator *validator.Validator
}
func Make(ctx context.Context, config *config.Config) (*State, error) {
···
return nil, fmt.Errorf("failed to create db: %w", err)
}
+
indexer := indexer.New(log.SubLogger(logger, "indexer"))
+
err = indexer.Init(ctx, d)
+
if err != nil {
+
return nil, fmt.Errorf("failed to create indexer: %w", err)
+
}
+
enforcer, err := rbac.NewEnforcer(config.Core.DbPath)
if err != nil {
return nil, fmt.Errorf("failed to create enforcer: %w", err)
}
-
res, err := idresolver.RedisResolver(config.Redis.ToURL())
+
res, err := idresolver.RedisResolver(config.Redis.ToURL(), config.Plc.PLCURL)
if err != nil {
logger.Error("failed to create redis resolver", "err", err)
-
res = idresolver.DefaultResolver()
+
res = idresolver.DefaultResolver(config.Plc.PLCURL)
}
posthog, err := posthog.NewWithConfig(config.Posthog.ApiKey, posthog.Config{Endpoint: config.Posthog.Endpoint})
···
}
validator := validator.New(d, res, enforcer)
-
repoResolver := reporesolver.New(config, enforcer, res, d)
+
repoResolver := reporesolver.New(config, enforcer, d)
+
+
mentionsResolver := mentions.New(config, res, d, log.SubLogger(logger, "mentionsResolver"))
wrapper := db.DbWrapper{Execer: d}
jc, err := jetstream.NewJetstreamClient(
···
return nil, fmt.Errorf("failed to create jetstream client: %w", err)
}
-
if err := BackfillDefaultDefs(d, res); err != nil {
+
if err := BackfillDefaultDefs(d, res, config.Label.DefaultLabelDefs); err != nil {
return nil, fmt.Errorf("failed to backfill default label defs: %w", err)
}
···
if !config.Core.Dev {
notifiers = append(notifiers, phnotify.NewPosthogNotifier(posthog))
}
-
notifier := notify.NewMergedNotifier(notifiers...)
+
notifiers = append(notifiers, indexer)
+
notifier := notify.NewMergedNotifier(notifiers, tlog.SubLogger(logger, "notify"))
state := &State{
d,
notifier,
+
indexer,
oauth,
enforcer,
pages,
res,
+
mentionsResolver,
posthog,
jc,
config,
···
return
}
-
gfiLabel, err := db.GetLabelDefinition(s.db, db.FilterEq("at_uri", models.LabelGoodFirstIssue))
+
gfiLabel, err := db.GetLabelDefinition(s.db, orm.FilterEq("at_uri", s.config.Label.GoodFirstIssue))
if err != nil {
// non-fatal
}
···
regs, err := db.GetRegistrations(
s.db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("needs_upgrade", 1),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("needs_upgrade", 1),
)
if err != nil {
l.Error("non-fatal: failed to get registrations", "err", err)
···
spindles, err := db.GetSpindles(
s.db,
-
db.FilterEq("owner", user.Did),
-
db.FilterEq("needs_upgrade", 1),
+
orm.FilterEq("owner", user.Did),
+
orm.FilterEq("needs_upgrade", 1),
)
if err != nil {
l.Error("non-fatal: failed to get spindles", "err", err)
···
pubKeys, err := db.GetPublicKeysForDid(s.db, id.DID.String())
if err != nil {
-
w.WriteHeader(http.StatusNotFound)
+
s.logger.Error("failed to get public keys", "err", err)
+
http.Error(w, "failed to get public keys", http.StatusInternalServerError)
return
}
if len(pubKeys) == 0 {
-
w.WriteHeader(http.StatusNotFound)
+
w.WriteHeader(http.StatusNoContent)
return
}
···
// Check for existing repos
existingRepo, err := db.GetRepo(
s.db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("name", repoName),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("name", repoName),
)
if err == nil && existingRepo != nil {
l.Info("repo exists")
···
Rkey: rkey,
Description: description,
Created: time.Now(),
-
Labels: models.DefaultLabelDefs(),
+
Labels: s.config.Label.DefaultLabelDefs,
}
record := repo.AsRecord()
···
aturi = ""
s.notifier.NewRepo(r.Context(), repo)
-
s.pages.HxLocation(w, fmt.Sprintf("/@%s/%s", user.Did, repoName))
+
s.pages.HxLocation(w, fmt.Sprintf("/%s/%s", user.Did, repoName))
}
}
···
return err
}
-
func BackfillDefaultDefs(e db.Execer, r *idresolver.Resolver) error {
-
defaults := models.DefaultLabelDefs()
-
defaultLabels, err := db.GetLabelDefinitions(e, db.FilterIn("at_uri", defaults))
+
func BackfillDefaultDefs(e db.Execer, r *idresolver.Resolver, defaults []string) error {
+
defaultLabels, err := db.GetLabelDefinitions(e, orm.FilterIn("at_uri", defaults))
if err != nil {
return err
}
···
return nil
}
-
labelDefs, err := models.FetchDefaultDefs(r)
+
labelDefs, err := models.FetchLabelDefs(r, defaults)
if err != nil {
return err
}
+6 -6
appview/state/userutil/userutil.go
···
didRegex = regexp.MustCompile(`^did:[a-z]+:[a-zA-Z0-9._:%-]*[a-zA-Z0-9._-]$`)
)
-
func IsHandleNoAt(s string) bool {
+
func IsHandle(s string) bool {
// ref: https://atproto.com/specs/handle
return handleRegex.MatchString(s)
+
}
+
+
// IsDid checks if the given string is a standard DID.
+
func IsDid(s string) bool {
+
return didRegex.MatchString(s)
}
func UnflattenDid(s string) string {
···
return strings.Replace(s, ":", "-", 2)
}
return s
-
}
-
-
// IsDid checks if the given string is a standard DID.
-
func IsDid(s string) bool {
-
return didRegex.MatchString(s)
}
var subdomainRegex = regexp.MustCompile(`^[a-z0-9]([a-z0-9-]{2,61}[a-z0-9])?$`)
+21 -8
appview/strings/strings.go
···
"tangled.org/core/appview/pages"
"tangled.org/core/appview/pages/markup"
"tangled.org/core/idresolver"
+
"tangled.org/core/orm"
"tangled.org/core/tid"
"github.com/bluesky-social/indigo/api/atproto"
···
strings, err := db.GetStrings(
s.Db,
0,
-
db.FilterEq("did", id.DID),
-
db.FilterEq("rkey", rkey),
+
orm.FilterEq("did", id.DID),
+
orm.FilterEq("rkey", rkey),
)
if err != nil {
l.Error("failed to fetch string", "err", err)
···
showRendered = r.URL.Query().Get("code") != "true"
}
+
starCount, err := db.GetStarCount(s.Db, string.AtUri())
+
if err != nil {
+
l.Error("failed to get star count", "err", err)
+
}
+
user := s.OAuth.GetUser(r)
+
isStarred := false
+
if user != nil {
+
isStarred = db.GetStarStatus(s.Db, user.Did, string.AtUri())
+
}
+
s.Pages.SingleString(w, pages.SingleStringParams{
-
LoggedInUser: s.OAuth.GetUser(r),
+
LoggedInUser: user,
RenderToggle: renderToggle,
ShowRendered: showRendered,
-
String: string,
+
String: &string,
Stats: string.Stats(),
+
IsStarred: isStarred,
+
StarCount: starCount,
Owner: id,
})
}
···
all, err := db.GetStrings(
s.Db,
0,
-
db.FilterEq("did", id.DID),
-
db.FilterEq("rkey", rkey),
+
orm.FilterEq("did", id.DID),
+
orm.FilterEq("rkey", rkey),
)
if err != nil {
l.Error("failed to fetch string", "err", err)
···
if err := db.DeleteString(
s.Db,
-
db.FilterEq("did", user.Did),
-
db.FilterEq("rkey", rkey),
+
orm.FilterEq("did", user.Did),
+
orm.FilterEq("rkey", rkey),
); err != nil {
fail("Failed to delete string.", err)
return
+2 -1
appview/validator/issue.go
···
"tangled.org/core/appview/db"
"tangled.org/core/appview/models"
+
"tangled.org/core/orm"
)
func (v *Validator) ValidateIssueComment(comment *models.IssueComment) error {
// if comments have parents, only ingest ones that are 1 level deep
if comment.ReplyTo != nil {
-
parents, err := db.GetIssueComments(v.db, db.FilterEq("at_uri", *comment.ReplyTo))
+
parents, err := db.GetIssueComments(v.db, orm.FilterEq("at_uri", *comment.ReplyTo))
if err != nil {
return fmt.Errorf("failed to fetch parent comment: %w", err)
}
+53
appview/validator/repo_topics.go
···
+
package validator
+
+
import (
+
"fmt"
+
"maps"
+
"regexp"
+
"slices"
+
"strings"
+
)
+
+
const (
+
maxTopicLen = 50
+
maxTopics = 20
+
)
+
+
var (
+
topicRE = regexp.MustCompile(`\A[a-z0-9-]+\z`)
+
)
+
+
// ValidateRepoTopicStr parses and validates whitespace-separated topic string.
+
//
+
// Rules:
+
// - topics are separated by whitespace
+
// - each topic may contain lowercase letters, digits, and hyphens only
+
// - each topic must be <= 50 characters long
+
// - no more than 20 topics allowed
+
// - duplicates are removed
+
func (v *Validator) ValidateRepoTopicStr(topicsStr string) ([]string, error) {
+
topicsStr = strings.TrimSpace(topicsStr)
+
if topicsStr == "" {
+
return nil, nil
+
}
+
parts := strings.Fields(topicsStr)
+
if len(parts) > maxTopics {
+
return nil, fmt.Errorf("too many topics: %d (maximum %d)", len(parts), maxTopics)
+
}
+
+
topicSet := make(map[string]struct{})
+
+
for _, t := range parts {
+
if _, exists := topicSet[t]; exists {
+
continue
+
}
+
if len(t) > maxTopicLen {
+
return nil, fmt.Errorf("topic '%s' is too long (maximum %d characters)", t, maxTopics)
+
}
+
if !topicRE.MatchString(t) {
+
return nil, fmt.Errorf("topic '%s' contains invalid characters (allowed: lowercase letters, digits, hyphens)", t)
+
}
+
topicSet[t] = struct{}{}
+
}
+
return slices.Collect(maps.Keys(topicSet)), nil
+
}
+17
appview/validator/uri.go
···
+
package validator
+
+
import (
+
"fmt"
+
"net/url"
+
)
+
+
func (v *Validator) ValidateURI(uri string) error {
+
parsed, err := url.Parse(uri)
+
if err != nil {
+
return fmt.Errorf("invalid uri format")
+
}
+
if parsed.Scheme == "" {
+
return fmt.Errorf("uri scheme missing")
+
}
+
return nil
+
}
-43
cmd/genjwks/main.go
···
-
// adapted from https://tangled.org/anirudh.fi/atproto-oauth
-
-
package main
-
-
import (
-
"crypto/ecdsa"
-
"crypto/elliptic"
-
"crypto/rand"
-
"encoding/json"
-
"fmt"
-
"time"
-
-
"github.com/lestrrat-go/jwx/v2/jwk"
-
)
-
-
func main() {
-
privKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
-
if err != nil {
-
panic(err)
-
}
-
-
key, err := jwk.FromRaw(privKey)
-
if err != nil {
-
panic(err)
-
}
-
-
kid := fmt.Sprintf("%d", time.Now().Unix())
-
-
if err := key.Set(jwk.KeyIDKey, kid); err != nil {
-
panic(err)
-
}
-
-
if err := key.Set("use", "sig"); err != nil {
-
panic(err)
-
}
-
-
b, err := json.Marshal(key)
-
if err != nil {
-
panic(err)
-
}
-
-
fmt.Println(string(b))
-
}
+1 -34
crypto/verify.go
···
"crypto/sha256"
"encoding/base64"
"fmt"
-
"strings"
"github.com/hiddeco/sshsig"
"golang.org/x/crypto/ssh"
-
"tangled.org/core/types"
)
func VerifySignature(pubKey, signature, payload []byte) (error, bool) {
···
// multiple algorithms but sha-512 is most secure, and git's ssh signing defaults
// to sha-512 for all key types anyway.
err = sshsig.Verify(buf, sig, pub, sshsig.HashSHA512, "git")
-
return err, err == nil
-
}
-
// VerifyCommitSignature reconstructs the payload used to sign a commit. This is
-
// essentially the git cat-file output but without the gpgsig header.
-
//
-
// Caveats: signature verification will fail on commits with more than one parent,
-
// i.e. merge commits, because types.NiceDiff doesn't carry more than one Parent field
-
// and we are unable to reconstruct the payload correctly.
-
//
-
// Ideally this should directly operate on an *object.Commit.
-
func VerifyCommitSignature(pubKey string, commit types.NiceDiff) (error, bool) {
-
signature := commit.Commit.PGPSignature
-
-
author := bytes.NewBuffer([]byte{})
-
committer := bytes.NewBuffer([]byte{})
-
commit.Commit.Author.Encode(author)
-
commit.Commit.Committer.Encode(committer)
-
-
payload := strings.Builder{}
-
-
fmt.Fprintf(&payload, "tree %s\n", commit.Commit.Tree)
-
if commit.Commit.Parent != "" {
-
fmt.Fprintf(&payload, "parent %s\n", commit.Commit.Parent)
-
}
-
fmt.Fprintf(&payload, "author %s\n", author.String())
-
fmt.Fprintf(&payload, "committer %s\n", committer.String())
-
if commit.Commit.ChangedId != "" {
-
fmt.Fprintf(&payload, "change-id %s\n", commit.Commit.ChangedId)
-
}
-
fmt.Fprintf(&payload, "\n%s", commit.Commit.Message)
-
-
return VerifySignature([]byte(pubKey), []byte(signature), []byte(payload.String()))
+
return err, err == nil
}
// SSHFingerprint computes the fingerprint of the supplied ssh pubkey.
+19 -9
docs/hacking.md
···
```
# oauth jwks should already be setup by the nix devshell:
-
echo $TANGLED_OAUTH_JWKS
-
{"crv":"P-256","d":"tELKHYH-Dko6qo4ozYcVPE1ah6LvXHFV2wpcWpi8ab4","kid":"1753352226","kty":"EC","x":"mRzYpLzAGq74kJez9UbgGfV040DxgsXpMbaVsdy8RZs","y":"azqqXzUYywMlLb2Uc5AVG18nuLXyPnXr4kI4T39eeIc"}
+
echo $TANGLED_OAUTH_CLIENT_SECRET
+
z42ty4RT1ovnTopY8B8ekz9NuziF2CuMkZ7rbRFpAR9jBqMc
+
+
echo $TANGLED_OAUTH_CLIENT_KID
+
1761667908
# if not, you can set it up yourself:
-
go build -o genjwks.out ./cmd/genjwks
-
export TANGLED_OAUTH_JWKS="$(./genjwks.out)"
+
goat key generate -t P-256
+
Key Type: P-256 / secp256r1 / ES256 private key
+
Secret Key (Multibase Syntax): save this securely (eg, add to password manager)
+
z42tuPDKRfM2mz2Kv953ARen2jmrPA8S9LX9tRq4RVcUMwwL
+
Public Key (DID Key Syntax): share or publish this (eg, in DID document)
+
did:key:zDnaeUBxtG6Xuv3ATJE4GaWeyXM3jyamJsZw3bSPpxx4bNXDR
+
+
# the secret key from above
+
export TANGLED_OAUTH_CLIENT_SECRET="z42tuP..."
# run redis in at a new shell to store oauth sessions
redis-server
···
# type `poweroff` at the shell to exit the VM
```
-
This starts a knot on port 6000, a spindle on port 6555
+
This starts a knot on port 6444, a spindle on port 6555
with `ssh` exposed on port 2222.
Once the services are running, head to
-
http://localhost:3000/knots and hit verify. It should
+
http://localhost:3000/settings/knots and hit verify. It should
verify the ownership of the services instantly if everything
went smoothly.
···
### running a spindle
The above VM should already be running a spindle on
-
`localhost:6555`. Head to http://localhost:3000/spindles and
+
`localhost:6555`. Head to http://localhost:3000/settings/spindles and
hit verify. You can then configure each repository to use
this spindle and run CI jobs.
···
If for any reason you wish to disable either one of the
services in the VM, modify [nix/vm.nix](/nix/vm.nix) and set
-
`services.tangled-spindle.enable` (or
-
`services.tangled-knot.enable`) to `false`.
+
`services.tangled.spindle.enable` (or
+
`services.tangled.knot.enable`) to `false`.
+1 -1
docs/knot-hosting.md
···
You should now have a running knot server! You can finalize
your registration by hitting the `verify` button on the
-
[/knots](https://tangled.org/knots) page. This simply creates
+
[/settings/knots](https://tangled.org/settings/knots) page. This simply creates
a record on your PDS to announce the existence of the knot.
### custom paths
+4 -4
docs/migrations.md
···
For knots:
- Upgrade to latest tag (v1.9.0 or above)
-
- Head to the [knot dashboard](https://tangled.org/knots) and
+
- Head to the [knot dashboard](https://tangled.org/settings/knots) and
hit the "retry" button to verify your knot
For spindles:
- Upgrade to latest tag (v1.9.0 or above)
- Head to the [spindle
-
dashboard](https://tangled.org/spindles) and hit the
+
dashboard](https://tangled.org/settings/spindles) and hit the
"retry" button to verify your spindle
## Upgrading from v1.7.x
···
[settings](https://tangled.org/settings) page.
- Restart your knot once you have replaced the environment
variable
-
- Head to the [knot dashboard](https://tangled.org/knots) and
+
- Head to the [knot dashboard](https://tangled.org/settings/knots) and
hit the "retry" button to verify your knot. This simply
writes a `sh.tangled.knot` record to your PDS.
···
latest revision, and change your config block like so:
```diff
-
services.tangled-knot = {
+
services.tangled.knot = {
enable = true;
server = {
- secretFile = /path/to/secret;
+19 -1
docs/spindle/pipeline.md
···
- `push`: The workflow should run every time a commit is pushed to the repository.
- `pull_request`: The workflow should run every time a pull request is made or updated.
- `manual`: The workflow can be triggered manually.
-
- `branch`: This is a **required** field that defines which branches the workflow should run for. If used with the `push` event, commits to the branch(es) listed here will trigger the workflow. If used with the `pull_request` event, updates to pull requests targeting the branch(es) listed here will trigger the workflow. This field has no effect with the `manual` event.
+
- `branch`: Defines which branches the workflow should run for. If used with the `push` event, commits to the branch(es) listed here will trigger the workflow. If used with the `pull_request` event, updates to pull requests targeting the branch(es) listed here will trigger the workflow. This field has no effect with the `manual` event. Supports glob patterns using `*` and `**` (e.g., `main`, `develop`, `release-*`). Either `branch` or `tag` (or both) must be specified for `push` events.
+
- `tag`: Defines which tags the workflow should run for. Only used with the `push` event - when tags matching the pattern(s) listed here are pushed, the workflow will trigger. This field has no effect with `pull_request` or `manual` events. Supports glob patterns using `*` and `**` (e.g., `v*`, `v1.*`, `release-**`). Either `branch` or `tag` (or both) must be specified for `push` events.
For example, if you'd like to define a workflow that runs when commits are pushed to the `main` and `develop` branches, or when pull requests that target the `main` branch are updated, or manually, you can do so with:
···
branch: ["main", "develop"]
- event: ["pull_request"]
branch: ["main"]
+
```
+
+
You can also trigger workflows on tag pushes. For instance, to run a deployment workflow when tags matching `v*` are pushed:
+
+
```yaml
+
when:
+
- event: ["push"]
+
tag: ["v*"]
+
```
+
+
You can even combine branch and tag patterns in a single constraint (the workflow triggers if either matches):
+
+
```yaml
+
when:
+
- event: ["push"]
+
branch: ["main", "release-*"]
+
tag: ["v*", "stable"]
```
## Engine
+20 -3
flake.lock
···
{
"nodes": {
+
"actor-typeahead-src": {
+
"flake": false,
+
"locked": {
+
"lastModified": 1762835797,
+
"narHash": "sha256-heizoWUKDdar6ymfZTnj3ytcEv/L4d4fzSmtr0HlXsQ=",
+
"ref": "refs/heads/main",
+
"rev": "677fe7f743050a4e7f09d4a6f87bbf1325a06f6b",
+
"revCount": 6,
+
"type": "git",
+
"url": "https://tangled.org/@jakelazaroff.com/actor-typeahead"
+
},
+
"original": {
+
"type": "git",
+
"url": "https://tangled.org/@jakelazaroff.com/actor-typeahead"
+
}
+
},
"flake-compat": {
"flake": false,
"locked": {
···
},
"nixpkgs": {
"locked": {
-
"lastModified": 1751984180,
-
"narHash": "sha256-LwWRsENAZJKUdD3SpLluwDmdXY9F45ZEgCb0X+xgOL0=",
+
"lastModified": 1765186076,
+
"narHash": "sha256-hM20uyap1a0M9d344I692r+ik4gTMyj60cQWO+hAYP8=",
"owner": "nixos",
"repo": "nixpkgs",
-
"rev": "9807714d6944a957c2e036f84b0ff8caf9930bc0",
+
"rev": "addf7cf5f383a3101ecfba091b98d0a1263dc9b8",
"type": "github"
},
"original": {
···
},
"root": {
"inputs": {
+
"actor-typeahead-src": "actor-typeahead-src",
"flake-compat": "flake-compat",
"gomod2nix": "gomod2nix",
"htmx-src": "htmx-src",
+21 -16
flake.nix
···
url = "https://github.com/rsms/inter/releases/download/v4.1/Inter-4.1.zip";
flake = false;
};
+
actor-typeahead-src = {
+
url = "git+https://tangled.org/@jakelazaroff.com/actor-typeahead";
+
flake = false;
+
};
ibm-plex-mono-src = {
url = "https://github.com/IBM/plex/releases/download/%40ibm%2Fplex-mono%401.1.0/ibm-plex-mono.zip";
flake = false;
···
inter-fonts-src,
sqlite-lib-src,
ibm-plex-mono-src,
+
actor-typeahead-src,
...
}: let
supportedSystems = ["x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin"];
···
}).buildGoApplication;
modules = ./nix/gomod2nix.toml;
sqlite-lib = self.callPackage ./nix/pkgs/sqlite-lib.nix {
-
inherit (pkgs) gcc;
inherit sqlite-lib-src;
};
-
genjwks = self.callPackage ./nix/pkgs/genjwks.nix {};
lexgen = self.callPackage ./nix/pkgs/lexgen.nix {inherit indigo;};
+
goat = self.callPackage ./nix/pkgs/goat.nix {inherit indigo;};
appview-static-files = self.callPackage ./nix/pkgs/appview-static-files.nix {
-
inherit htmx-src htmx-ws-src lucide-src inter-fonts-src ibm-plex-mono-src;
+
inherit htmx-src htmx-ws-src lucide-src inter-fonts-src ibm-plex-mono-src actor-typeahead-src;
};
appview = self.callPackage ./nix/pkgs/appview.nix {};
spindle = self.callPackage ./nix/pkgs/spindle.nix {};
···
});
in {
overlays.default = final: prev: {
-
inherit (mkPackageSet final) lexgen sqlite-lib genjwks spindle knot-unwrapped knot appview;
+
inherit (mkPackageSet final) lexgen goat sqlite-lib spindle knot-unwrapped knot appview;
};
packages = forAllSystems (system: let
···
staticPackages = mkPackageSet pkgs.pkgsStatic;
crossPackages = mkPackageSet pkgs.pkgsCross.gnu64.pkgsStatic;
in {
-
inherit (packages) appview appview-static-files lexgen genjwks spindle knot knot-unwrapped sqlite-lib;
+
inherit (packages) appview appview-static-files lexgen goat spindle knot knot-unwrapped sqlite-lib;
pkgsStatic-appview = staticPackages.appview;
pkgsStatic-knot = staticPackages.knot;
···
nativeBuildInputs = [
pkgs.go
pkgs.air
-
pkgs.tilt
pkgs.gopls
pkgs.httpie
pkgs.litecli
···
mkdir -p appview/pages/static
# no preserve is needed because watch-tailwind will want to be able to overwrite
cp -fr --no-preserve=ownership ${packages'.appview-static-files}/* appview/pages/static
-
export TANGLED_OAUTH_JWKS="$(${packages'.genjwks}/bin/genjwks)"
+
export TANGLED_OAUTH_CLIENT_KID="$(date +%s)"
+
export TANGLED_OAUTH_CLIENT_SECRET="$(${packages'.goat}/bin/goat key generate -t P-256 | grep -A1 "Secret Key" | tail -n1 | awk '{print $1}')"
'';
env.CGO_ENABLED = 1;
};
···
air-watcher = name: arg:
pkgs.writeShellScriptBin "run"
''
-
${pkgs.air}/bin/air -c /dev/null \
-
-build.cmd "${pkgs.go}/bin/go build -o ./out/${name}.out ./cmd/${name}/main.go" \
-
-build.bin "./out/${name}.out" \
-
-build.args_bin "${arg}" \
-
-build.stop_on_error "true" \
-
-build.include_ext "go"
+
export PATH=${pkgs.go}/bin:$PATH
+
${pkgs.air}/bin/air -c ./.air/${name}.toml \
+
-build.args_bin "${arg}"
'';
tailwind-watcher =
pkgs.writeShellScriptBin "run"
···
watch-knot = {
type = "app";
program = ''${air-watcher "knot" "server"}/bin/run'';
+
};
+
watch-spindle = {
+
type = "app";
+
program = ''${air-watcher "spindle" ""}/bin/run'';
};
watch-tailwind = {
type = "app";
···
}: {
imports = [./nix/modules/appview.nix];
-
services.tangled-appview.package = lib.mkDefault self.packages.${pkgs.system}.appview;
+
services.tangled.appview.package = lib.mkDefault self.packages.${pkgs.stdenv.hostPlatform.system}.appview;
};
nixosModules.knot = {
lib,
···
}: {
imports = [./nix/modules/knot.nix];
-
services.tangled-knot.package = lib.mkDefault self.packages.${pkgs.system}.knot;
+
services.tangled.knot.package = lib.mkDefault self.packages.${pkgs.stdenv.hostPlatform.system}.knot;
};
nixosModules.spindle = {
lib,
···
}: {
imports = [./nix/modules/spindle.nix];
-
services.tangled-spindle.package = lib.mkDefault self.packages.${pkgs.system}.spindle;
+
services.tangled.spindle.package = lib.mkDefault self.packages.${pkgs.stdenv.hostPlatform.system}.spindle;
};
};
}
+31 -14
go.mod
···
module tangled.org/core
-
go 1.24.4
+
go 1.25.0
require (
github.com/Blank-Xu/sql-adapter v1.1.1
github.com/alecthomas/assert/v2 v2.11.0
github.com/alecthomas/chroma/v2 v2.15.0
github.com/avast/retry-go/v4 v4.6.1
+
github.com/blevesearch/bleve/v2 v2.5.3
github.com/bluekeyes/go-gitdiff v0.8.1
github.com/bluesky-social/indigo v0.0.0-20251003000214-3259b215110e
github.com/bluesky-social/jetstream v0.0.0-20241210005130-ea96859b93d1
+
github.com/bmatcuk/doublestar/v4 v4.9.1
github.com/carlmjohnson/versioninfo v0.22.5
github.com/casbin/casbin/v2 v2.103.0
+
github.com/charmbracelet/log v0.4.2
github.com/cloudflare/cloudflare-go v0.115.0
github.com/cyphar/filepath-securejoin v0.4.1
github.com/dgraph-io/ristretto v0.2.0
···
github.com/hiddeco/sshsig v0.2.0
github.com/hpcloud/tail v1.0.0
github.com/ipfs/go-cid v0.5.0
-
github.com/lestrrat-go/jwx/v2 v2.1.6
github.com/mattn/go-sqlite3 v1.14.24
github.com/microcosm-cc/bluemonday v1.0.27
github.com/openbao/openbao/api/v2 v2.3.0
···
github.com/stretchr/testify v1.10.0
github.com/urfave/cli/v3 v3.3.3
github.com/whyrusleeping/cbor-gen v0.3.1
-
github.com/wyatt915/goldmark-treeblood v0.0.0-20250825231212-5dcbdb2f4b57
github.com/yuin/goldmark v1.7.13
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
+
gitlab.com/staticnoise/goldmark-callout v0.0.0-20240609120641-6366b799e4ab
golang.org/x/crypto v0.40.0
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
golang.org/x/image v0.31.0
···
dario.cat/mergo v1.0.1 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/ProtonMail/go-crypto v1.3.0 // indirect
+
github.com/RoaringBitmap/roaring/v2 v2.4.5 // indirect
github.com/alecthomas/repr v0.4.0 // indirect
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
-
github.com/bmatcuk/doublestar/v4 v4.7.1 // indirect
+
github.com/bits-and-blooms/bitset v1.22.0 // indirect
+
github.com/blevesearch/bleve_index_api v1.2.8 // indirect
+
github.com/blevesearch/geo v0.2.4 // indirect
+
github.com/blevesearch/go-faiss v1.0.25 // indirect
+
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect
+
github.com/blevesearch/gtreap v0.1.1 // indirect
+
github.com/blevesearch/mmap-go v1.0.4 // indirect
+
github.com/blevesearch/scorch_segment_api/v2 v2.3.10 // indirect
+
github.com/blevesearch/segment v0.9.1 // indirect
+
github.com/blevesearch/snowballstem v0.9.0 // indirect
+
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect
+
github.com/blevesearch/vellum v1.1.0 // indirect
+
github.com/blevesearch/zapx/v11 v11.4.2 // indirect
+
github.com/blevesearch/zapx/v12 v12.4.2 // indirect
+
github.com/blevesearch/zapx/v13 v13.4.2 // indirect
+
github.com/blevesearch/zapx/v14 v14.4.2 // indirect
+
github.com/blevesearch/zapx/v15 v15.4.2 // indirect
+
github.com/blevesearch/zapx/v16 v16.2.4 // indirect
github.com/casbin/govaluate v1.3.0 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/charmbracelet/lipgloss v1.1.0 // indirect
-
github.com/charmbracelet/log v0.4.2 // indirect
github.com/charmbracelet/x/ansi v0.8.0 // indirect
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
···
github.com/containerd/errdefs/pkg v0.3.0 // indirect
github.com/containerd/log v0.1.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
-
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/distribution/reference v0.6.0 // indirect
github.com/dlclark/regexp2 v1.11.5 // indirect
···
github.com/golang-jwt/jwt/v5 v5.2.3 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/golang/mock v1.6.0 // indirect
+
github.com/golang/protobuf v1.5.4 // indirect
+
github.com/golang/snappy v0.0.4 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/gorilla/securecookie v1.1.2 // indirect
···
github.com/ipfs/go-log v1.0.5 // indirect
github.com/ipfs/go-log/v2 v2.6.0 // indirect
github.com/ipfs/go-metrics-interface v0.3.0 // indirect
+
github.com/json-iterator/go v1.1.12 // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
-
github.com/lestrrat-go/blackmagic v1.0.4 // indirect
-
github.com/lestrrat-go/httpcc v1.0.1 // indirect
-
github.com/lestrrat-go/httprc v1.0.6 // indirect
-
github.com/lestrrat-go/iter v1.0.2 // indirect
-
github.com/lestrrat-go/option v1.0.1 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
···
github.com/moby/docker-image-spec v1.3.1 // indirect
github.com/moby/sys/atomicwriter v0.1.0 // indirect
github.com/moby/term v0.5.2 // indirect
+
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/morikuni/aec v1.0.0 // indirect
github.com/mr-tron/base58 v1.2.0 // indirect
+
github.com/mschoch/smat v0.2.0 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/multiformats/go-base32 v0.1.0 // indirect
github.com/multiformats/go-base36 v0.2.0 // indirect
···
github.com/prometheus/procfs v0.16.1 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/ryanuber/go-glob v1.0.0 // indirect
-
github.com/segmentio/asm v1.2.0 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/vmihailenco/go-tinylfu v0.2.2 // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
-
github.com/wyatt915/treeblood v0.1.15 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
-
gitlab.com/staticnoise/goldmark-callout v0.0.0-20240609120641-6366b799e4ab // indirect
gitlab.com/yawning/secp256k1-voi v0.0.0-20230925100816-f2616030848b // indirect
gitlab.com/yawning/tuplehash v0.0.0-20230713102510-df83abbf9a02 // indirect
+
go.etcd.io/bbolt v1.4.0 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 // indirect
go.opentelemetry.io/otel v1.37.0 // indirect
+60 -21
go.sum
···
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw=
github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
+
github.com/RoaringBitmap/roaring/v2 v2.4.5 h1:uGrrMreGjvAtTBobc0g5IrW1D5ldxDQYe2JW2gggRdg=
+
github.com/RoaringBitmap/roaring/v2 v2.4.5/go.mod h1:FiJcsfkGje/nZBZgCu0ZxCPOKD/hVXDS2dXi7/eUFE0=
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
···
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
+
github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
+
github.com/bits-and-blooms/bitset v1.22.0 h1:Tquv9S8+SGaS3EhyA+up3FXzmkhxPGjQQCkcs2uw7w4=
+
github.com/bits-and-blooms/bitset v1.22.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
+
github.com/blevesearch/bleve/v2 v2.5.3 h1:9l1xtKaETv64SZc1jc4Sy0N804laSa/LeMbYddq1YEM=
+
github.com/blevesearch/bleve/v2 v2.5.3/go.mod h1:Z/e8aWjiq8HeX+nW8qROSxiE0830yQA071dwR3yoMzw=
+
github.com/blevesearch/bleve_index_api v1.2.8 h1:Y98Pu5/MdlkRyLM0qDHostYo7i+Vv1cDNhqTeR4Sy6Y=
+
github.com/blevesearch/bleve_index_api v1.2.8/go.mod h1:rKQDl4u51uwafZxFrPD1R7xFOwKnzZW7s/LSeK4lgo0=
+
github.com/blevesearch/geo v0.2.4 h1:ECIGQhw+QALCZaDcogRTNSJYQXRtC8/m8IKiA706cqk=
+
github.com/blevesearch/geo v0.2.4/go.mod h1:K56Q33AzXt2YExVHGObtmRSFYZKYGv0JEN5mdacJJR8=
+
github.com/blevesearch/go-faiss v1.0.25 h1:lel1rkOUGbT1CJ0YgzKwC7k+XH0XVBHnCVWahdCXk4U=
+
github.com/blevesearch/go-faiss v1.0.25/go.mod h1:OMGQwOaRRYxrmeNdMrXJPvVx8gBnvE5RYrr0BahNnkk=
+
github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
+
github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M=
+
github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZGW8Y=
+
github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk=
+
github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
+
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
+
github.com/blevesearch/scorch_segment_api/v2 v2.3.10 h1:Yqk0XD1mE0fDZAJXTjawJ8If/85JxnLd8v5vG/jWE/s=
+
github.com/blevesearch/scorch_segment_api/v2 v2.3.10/go.mod h1:Z3e6ChN3qyN35yaQpl00MfI5s8AxUJbpTR/DL8QOQ+8=
+
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
+
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
+
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
+
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
+
github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A=
+
github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ=
+
github.com/blevesearch/vellum v1.1.0 h1:CinkGyIsgVlYf8Y2LUQHvdelgXr6PYuvoDIajq6yR9w=
+
github.com/blevesearch/vellum v1.1.0/go.mod h1:QgwWryE8ThtNPxtgWJof5ndPfx0/YMBh+W2weHKPw8Y=
+
github.com/blevesearch/zapx/v11 v11.4.2 h1:l46SV+b0gFN+Rw3wUI1YdMWdSAVhskYuvxlcgpQFljs=
+
github.com/blevesearch/zapx/v11 v11.4.2/go.mod h1:4gdeyy9oGa/lLa6D34R9daXNUvfMPZqUYjPwiLmekwc=
+
github.com/blevesearch/zapx/v12 v12.4.2 h1:fzRbhllQmEMUuAQ7zBuMvKRlcPA5ESTgWlDEoB9uQNE=
+
github.com/blevesearch/zapx/v12 v12.4.2/go.mod h1:TdFmr7afSz1hFh/SIBCCZvcLfzYvievIH6aEISCte58=
+
github.com/blevesearch/zapx/v13 v13.4.2 h1:46PIZCO/ZuKZYgxI8Y7lOJqX3Irkc3N8W82QTK3MVks=
+
github.com/blevesearch/zapx/v13 v13.4.2/go.mod h1:knK8z2NdQHlb5ot/uj8wuvOq5PhDGjNYQQy0QDnopZk=
+
github.com/blevesearch/zapx/v14 v14.4.2 h1:2SGHakVKd+TrtEqpfeq8X+So5PShQ5nW6GNxT7fWYz0=
+
github.com/blevesearch/zapx/v14 v14.4.2/go.mod h1:rz0XNb/OZSMjNorufDGSpFpjoFKhXmppH9Hi7a877D8=
+
github.com/blevesearch/zapx/v15 v15.4.2 h1:sWxpDE0QQOTjyxYbAVjt3+0ieu8NCE0fDRaFxEsp31k=
+
github.com/blevesearch/zapx/v15 v15.4.2/go.mod h1:1pssev/59FsuWcgSnTa0OeEpOzmhtmr/0/11H0Z8+Nw=
+
github.com/blevesearch/zapx/v16 v16.2.4 h1:tGgfvleXTAkwsD5mEzgM3zCS/7pgocTCnO1oyAUjlww=
+
github.com/blevesearch/zapx/v16 v16.2.4/go.mod h1:Rti/REtuuMmzwsI8/C/qIzRaEoSK/wiFYw5e5ctUKKs=
github.com/bluesky-social/indigo v0.0.0-20251003000214-3259b215110e h1:IutKPwmbU0LrYqw03EuwJtMdAe67rDTrL1U8S8dicRU=
github.com/bluesky-social/indigo v0.0.0-20251003000214-3259b215110e/go.mod h1:n6QE1NDPFoi7PRbMUZmc2y7FibCqiVU4ePpsvhHUBR8=
github.com/bluesky-social/jetstream v0.0.0-20241210005130-ea96859b93d1 h1:CFvRtYNSnWRAi/98M3O466t9dYuwtesNbu6FVPymRrA=
github.com/bluesky-social/jetstream v0.0.0-20241210005130-ea96859b93d1/go.mod h1:WiYEeyJSdUwqoaZ71KJSpTblemUCpwJfh5oVXplK6T4=
github.com/bmatcuk/doublestar/v4 v4.6.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
-
github.com/bmatcuk/doublestar/v4 v4.7.1 h1:fdDeAqgT47acgwd9bd9HxJRDmc9UAmPpc+2m0CXv75Q=
github.com/bmatcuk/doublestar/v4 v4.7.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
+
github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE=
+
github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
···
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc=
-
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40=
github.com/dgraph-io/ristretto v0.2.0 h1:XAfl+7cmoUDWW/2Lx8TGZQjjxIQ2Ley9DSf52dru4WE=
github.com/dgraph-io/ristretto v0.2.0/go.mod h1:8uBHCU/PBV4Ag0CJrP47b9Ofby5dqWNh4FicAdoqFNU=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y=
···
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
+
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
+
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
+
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
···
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
+
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
···
github.com/ipfs/go-log/v2 v2.6.0/go.mod h1:p+Efr3qaY5YXpx9TX7MoLCSEZX5boSWj9wh86P5HJa8=
github.com/ipfs/go-metrics-interface v0.3.0 h1:YwG7/Cy4R94mYDUuwsBfeziJCVm9pBMJ6q/JR9V40TU=
github.com/ipfs/go-metrics-interface v0.3.0/go.mod h1:OxxQjZDGocXVdyTPocns6cOLwHieqej/jos7H4POwoY=
+
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
···
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
-
github.com/lestrrat-go/blackmagic v1.0.4 h1:IwQibdnf8l2KoO+qC3uT4OaTWsW7tuRQXy9TRN9QanA=
-
github.com/lestrrat-go/blackmagic v1.0.4/go.mod h1:6AWFyKNNj0zEXQYfTMPfZrAXUWUfTIZ5ECEUEJaijtw=
-
github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE=
-
github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E=
-
github.com/lestrrat-go/httprc v1.0.6 h1:qgmgIRhpvBqexMJjA/PmwSvhNk679oqD1RbovdCGW8k=
-
github.com/lestrrat-go/httprc v1.0.6/go.mod h1:mwwz3JMTPBjHUkkDv/IGJ39aALInZLrhBp0X7KGUZlo=
-
github.com/lestrrat-go/iter v1.0.2 h1:gMXo1q4c2pHmC3dn8LzRhJfP1ceCbgSiT9lUydIzltI=
-
github.com/lestrrat-go/iter v1.0.2/go.mod h1:Momfcq3AnRlRjI5b5O8/G5/BvpzrhoFTZcn06fEOPt4=
-
github.com/lestrrat-go/jwx/v2 v2.1.6 h1:hxM1gfDILk/l5ylers6BX/Eq1m/pnxe9NBwW6lVfecA=
-
github.com/lestrrat-go/jwx/v2 v2.1.6/go.mod h1:Y722kU5r/8mV7fYDifjug0r8FK8mZdw0K0GpJw/l8pU=
-
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
-
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
···
github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko=
github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ=
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc=
+
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o=
github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc=
+
github.com/mschoch/smat v0.2.0 h1:8imxQsjDm8yFEAVBe7azKmKSgzSkZXDuKkSq9374khM=
+
github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
github.com/multiformats/go-base32 v0.1.0 h1:pVx9xoSPqEIQG8o+UbAe7DNi51oej1NtK+aGkbLYxPE=
···
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk=
github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
-
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
-
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sethvargo/go-envconfig v1.1.0 h1:cWZiJxeTm7AlCvzGXrEXaSTCNgip5oJepekh/BOQuog=
···
github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw=
github.com/whyrusleeping/cbor-gen v0.3.1 h1:82ioxmhEYut7LBVGhGq8xoRkXPLElVuh5mV67AFfdv0=
github.com/whyrusleeping/cbor-gen v0.3.1/go.mod h1:pM99HXyEbSQHcosHc0iW7YFmwnscr+t9Te4ibko05so=
-
github.com/wyatt915/goldmark-treeblood v0.0.0-20250825231212-5dcbdb2f4b57 h1:UqtQdzLXnvdBdqn/go53qGyncw1wJ7Mq5SQdieM1/Ew=
-
github.com/wyatt915/goldmark-treeblood v0.0.0-20250825231212-5dcbdb2f4b57/go.mod h1:BxSCWByWSRSuembL3cDG1IBUbkBoO/oW/6tF19aA4hs=
-
github.com/wyatt915/treeblood v0.1.15 h1:3KZ3o2LpcKZAzOLqMoW9qeUzKEaKArKpbcPpTkNfQC8=
-
github.com/wyatt915/treeblood v0.1.15/go.mod h1:i7+yhhmzdDP17/97pIsOSffw74EK/xk+qJ0029cSXUY=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
···
gitlab.com/yawning/secp256k1-voi v0.0.0-20230925100816-f2616030848b/go.mod h1:/y/V339mxv2sZmYYR64O07VuCpdNZqCTwO8ZcouTMI8=
gitlab.com/yawning/tuplehash v0.0.0-20230713102510-df83abbf9a02 h1:qwDnMxjkyLmAFgcfgTnfJrmYKWhHnci3GjDqcZp1M3Q=
gitlab.com/yawning/tuplehash v0.0.0-20230713102510-df83abbf9a02/go.mod h1:JTnUj0mpYiAsuZLmKjTx/ex3AtMowcCgnE7YNyCEP0I=
+
go.etcd.io/bbolt v1.4.0 h1:TU77id3TnN/zKr7CO/uk+fBCwF2jGcMuw2B/FMAzYIk=
+
go.etcd.io/bbolt v1.4.0/go.mod h1:AsD+OCi/qPN1giOX1aiLAha3o1U8rAz65bvN4j0sRuk=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU=
···
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+
gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
+36 -61
guard/guard.go
···
"os/exec"
"strings"
-
"github.com/bluesky-social/indigo/atproto/identity"
securejoin "github.com/cyphar/filepath-securejoin"
"github.com/urfave/cli/v3"
-
"tangled.org/core/idresolver"
"tangled.org/core/log"
)
···
"command", sshCommand,
"client", clientIP)
+
// TODO: greet user with their resolved handle instead of did
if sshCommand == "" {
l.Info("access denied: no interactive shells", "user", incomingUser)
fmt.Fprintf(os.Stderr, "Hi @%s! You've successfully authenticated.\n", incomingUser)
···
}
gitCommand := cmdParts[0]
-
-
// did:foo/repo-name or
-
// handle/repo-name or
-
// any of the above with a leading slash (/)
-
-
components := strings.Split(strings.TrimPrefix(strings.Trim(cmdParts[1], "'"), "/"), "/")
-
l.Info("command components", "components", components)
-
-
if len(components) != 2 {
-
l.Error("invalid repo format", "components", components)
-
fmt.Fprintln(os.Stderr, "invalid repo format, needs <user>/<repo> or /<user>/<repo>")
-
os.Exit(-1)
-
}
-
-
didOrHandle := components[0]
-
identity := resolveIdentity(ctx, l, didOrHandle)
-
did := identity.DID.String()
-
repoName := components[1]
-
qualifiedRepoName, _ := securejoin.SecureJoin(did, repoName)
+
repoPath := cmdParts[1]
validCommands := map[string]bool{
"git-receive-pack": true,
···
return fmt.Errorf("access denied: invalid git command")
}
-
if gitCommand != "git-upload-pack" {
-
if !isPushPermitted(l, incomingUser, qualifiedRepoName, endpoint) {
-
l.Error("access denied: user not allowed",
-
"did", incomingUser,
-
"reponame", qualifiedRepoName)
-
fmt.Fprintln(os.Stderr, "access denied: user not allowed")
-
os.Exit(-1)
-
}
+
// qualify repo path from internal server which holds the knot config
+
qualifiedRepoPath, err := guardAndQualifyRepo(l, endpoint, incomingUser, repoPath, gitCommand)
+
if err != nil {
+
l.Error("failed to run guard", "err", err)
+
fmt.Fprintln(os.Stderr, err)
+
os.Exit(1)
}
-
fullPath, _ := securejoin.SecureJoin(gitDir, qualifiedRepoName)
+
fullPath, _ := securejoin.SecureJoin(gitDir, qualifiedRepoPath)
l.Info("processing command",
"user", incomingUser,
"command", gitCommand,
-
"repo", repoName,
+
"repo", repoPath,
"fullPath", fullPath,
"client", clientIP)
···
gitCmd.Stdin = os.Stdin
gitCmd.Env = append(os.Environ(),
fmt.Sprintf("GIT_USER_DID=%s", incomingUser),
-
fmt.Sprintf("GIT_USER_PDS_ENDPOINT=%s", identity.PDSEndpoint()),
)
if err := gitCmd.Run(); err != nil {
···
l.Info("command completed",
"user", incomingUser,
"command", gitCommand,
-
"repo", repoName,
+
"repo", repoPath,
"success", true)
return nil
}
-
func resolveIdentity(ctx context.Context, l *slog.Logger, didOrHandle string) *identity.Identity {
-
resolver := idresolver.DefaultResolver()
-
ident, err := resolver.ResolveIdent(ctx, didOrHandle)
+
// runs guardAndQualifyRepo logic
+
func guardAndQualifyRepo(l *slog.Logger, endpoint, incomingUser, repo, gitCommand string) (string, error) {
+
u, _ := url.Parse(endpoint + "/guard")
+
q := u.Query()
+
q.Add("user", incomingUser)
+
q.Add("repo", repo)
+
q.Add("gitCmd", gitCommand)
+
u.RawQuery = q.Encode()
+
+
resp, err := http.Get(u.String())
if err != nil {
-
l.Error("Error resolving handle", "error", err, "handle", didOrHandle)
-
fmt.Fprintf(os.Stderr, "error resolving handle: %v\n", err)
-
os.Exit(1)
-
}
-
if ident.Handle.IsInvalidHandle() {
-
l.Error("Error resolving handle", "invalid handle", didOrHandle)
-
fmt.Fprintf(os.Stderr, "error resolving handle: invalid handle\n")
-
os.Exit(1)
+
return "", err
}
-
return ident
-
}
+
defer resp.Body.Close()
-
func isPushPermitted(l *slog.Logger, user, qualifiedRepoName, endpoint string) bool {
-
u, _ := url.Parse(endpoint + "/push-allowed")
-
q := u.Query()
-
q.Add("user", user)
-
q.Add("repo", qualifiedRepoName)
-
u.RawQuery = q.Encode()
+
l.Info("Running guard", "url", u.String(), "status", resp.Status)
-
req, err := http.Get(u.String())
+
body, err := io.ReadAll(resp.Body)
if err != nil {
-
l.Error("Error verifying permissions", "error", err)
-
fmt.Fprintf(os.Stderr, "error verifying permissions: %v\n", err)
-
os.Exit(1)
+
return "", err
}
-
-
l.Info("Checking push permission",
-
"url", u.String(),
-
"status", req.Status)
+
text := string(body)
-
return req.StatusCode == http.StatusNoContent
+
switch resp.StatusCode {
+
case http.StatusOK:
+
return text, nil
+
case http.StatusForbidden:
+
l.Error("access denied: user not allowed", "did", incomingUser, "reponame", text)
+
return text, errors.New("access denied: user not allowed")
+
default:
+
return "", errors.New(text)
+
}
}
+17 -8
idresolver/resolver.go
···
directory identity.Directory
}
-
func BaseDirectory() identity.Directory {
+
func BaseDirectory(plcUrl string) identity.Directory {
base := identity.BaseDirectory{
-
PLCURL: identity.DefaultPLCURL,
+
PLCURL: plcUrl,
HTTPClient: http.Client{
Timeout: time.Second * 10,
Transport: &http.Transport{
···
return &base
}
-
func RedisDirectory(url string) (identity.Directory, error) {
+
func RedisDirectory(url, plcUrl string) (identity.Directory, error) {
hitTTL := time.Hour * 24
errTTL := time.Second * 30
invalidHandleTTL := time.Minute * 5
-
return redisdir.NewRedisDirectory(BaseDirectory(), url, hitTTL, errTTL, invalidHandleTTL, 10000)
+
return redisdir.NewRedisDirectory(
+
BaseDirectory(plcUrl),
+
url,
+
hitTTL,
+
errTTL,
+
invalidHandleTTL,
+
10000,
+
)
}
-
func DefaultResolver() *Resolver {
+
func DefaultResolver(plcUrl string) *Resolver {
+
base := BaseDirectory(plcUrl)
+
cached := identity.NewCacheDirectory(base, 250_000, time.Hour*24, time.Minute*2, time.Minute*5)
return &Resolver{
-
directory: identity.DefaultDirectory(),
+
directory: &cached,
}
}
-
func RedisResolver(redisUrl string) (*Resolver, error) {
-
directory, err := RedisDirectory(redisUrl)
+
func RedisResolver(redisUrl, plcUrl string) (*Resolver, error) {
+
directory, err := RedisDirectory(redisUrl, plcUrl)
if err != nil {
return nil, err
}
+38
input.css
···
@apply no-underline;
}
+
.prose a.mention {
+
@apply no-underline hover:underline;
+
}
+
.prose li {
@apply my-0 py-0;
}
···
details[data-callout] > summary::-webkit-details-marker {
display: none;
}
+
}
@layer utilities {
.error {
···
text-decoration: underline;
}
}
+
+
actor-typeahead {
+
--color-background: #ffffff;
+
--color-border: #d1d5db;
+
--color-shadow: #000000;
+
--color-hover: #f9fafb;
+
--color-avatar-fallback: #e5e7eb;
+
--radius: 0.0;
+
--padding-menu: 0.0rem;
+
z-index: 1000;
+
}
+
+
actor-typeahead::part(handle) {
+
color: #111827;
+
}
+
+
actor-typeahead::part(menu) {
+
box-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1);
+
}
+
+
@media (prefers-color-scheme: dark) {
+
actor-typeahead {
+
--color-background: #1f2937;
+
--color-border: #4b5563;
+
--color-shadow: #000000;
+
--color-hover: #374151;
+
--color-avatar-fallback: #4b5563;
+
}
+
+
actor-typeahead::part(handle) {
+
color: #f9fafb;
+
}
+
}
+15 -4
jetstream/jetstream.go
···
// existing instances of the closure when j.WantedDids is mutated
return func(ctx context.Context, evt *models.Event) error {
+
j.mu.RLock()
// empty filter => all dids allowed
-
if len(j.wantedDids) == 0 {
-
return processFunc(ctx, evt)
+
matches := len(j.wantedDids) == 0
+
if !matches {
+
if _, ok := j.wantedDids[evt.Did]; ok {
+
matches = true
+
}
}
+
j.mu.RUnlock()
-
if _, ok := j.wantedDids[evt.Did]; ok {
+
if matches {
return processFunc(ctx, evt)
} else {
return nil
···
go func() {
if j.waitForDid {
-
for len(j.wantedDids) == 0 {
+
for {
+
j.mu.RLock()
+
hasDid := len(j.wantedDids) != 0
+
j.mu.RUnlock()
+
if hasDid {
+
break
+
}
time.Sleep(time.Second)
}
}
+1
knotserver/config/config.go
···
InternalListenAddr string `env:"INTERNAL_LISTEN_ADDR, default=127.0.0.1:5444"`
DBPath string `env:"DB_PATH, default=knotserver.db"`
Hostname string `env:"HOSTNAME, required"`
+
PlcUrl string `env:"PLC_URL, default=https://plc.directory"`
JetstreamEndpoint string `env:"JETSTREAM_ENDPOINT, default=wss://jetstream1.us-west.bsky.network/subscribe"`
Owner string `env:"OWNER, required"`
LogDids bool `env:"LOG_DIDS, default=true"`
+81
knotserver/db/db.go
···
+
package db
+
+
import (
+
"context"
+
"database/sql"
+
"log/slog"
+
"strings"
+
+
_ "github.com/mattn/go-sqlite3"
+
"tangled.org/core/log"
+
)
+
+
type DB struct {
+
db *sql.DB
+
logger *slog.Logger
+
}
+
+
func Setup(ctx context.Context, dbPath string) (*DB, error) {
+
// https://github.com/mattn/go-sqlite3#connection-string
+
opts := []string{
+
"_foreign_keys=1",
+
"_journal_mode=WAL",
+
"_synchronous=NORMAL",
+
"_auto_vacuum=incremental",
+
}
+
+
logger := log.FromContext(ctx)
+
logger = log.SubLogger(logger, "db")
+
+
db, err := sql.Open("sqlite3", dbPath+"?"+strings.Join(opts, "&"))
+
if err != nil {
+
return nil, err
+
}
+
+
conn, err := db.Conn(ctx)
+
if err != nil {
+
return nil, err
+
}
+
defer conn.Close()
+
+
_, err = conn.ExecContext(ctx, `
+
create table if not exists known_dids (
+
did text primary key
+
);
+
+
create table if not exists public_keys (
+
id integer primary key autoincrement,
+
did text not null,
+
key text not null,
+
created text not null default (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')),
+
unique(did, key),
+
foreign key (did) references known_dids(did) on delete cascade
+
);
+
+
create table if not exists _jetstream (
+
id integer primary key autoincrement,
+
last_time_us integer not null
+
);
+
+
create table if not exists events (
+
rkey text not null,
+
nsid text not null,
+
event text not null, -- json
+
created integer not null default (strftime('%s', 'now')),
+
primary key (rkey, nsid)
+
);
+
+
create table if not exists migrations (
+
id integer primary key autoincrement,
+
name text unique
+
);
+
`)
+
if err != nil {
+
return nil, err
+
}
+
+
return &DB{
+
db: db,
+
logger: logger,
+
}, nil
+
}
-64
knotserver/db/init.go
···
-
package db
-
-
import (
-
"database/sql"
-
"strings"
-
-
_ "github.com/mattn/go-sqlite3"
-
)
-
-
type DB struct {
-
db *sql.DB
-
}
-
-
func Setup(dbPath string) (*DB, error) {
-
// https://github.com/mattn/go-sqlite3#connection-string
-
opts := []string{
-
"_foreign_keys=1",
-
"_journal_mode=WAL",
-
"_synchronous=NORMAL",
-
"_auto_vacuum=incremental",
-
}
-
-
db, err := sql.Open("sqlite3", dbPath+"?"+strings.Join(opts, "&"))
-
if err != nil {
-
return nil, err
-
}
-
-
// NOTE: If any other migration is added here, you MUST
-
// copy the pattern in appview: use a single sql.Conn
-
// for every migration.
-
-
_, err = db.Exec(`
-
create table if not exists known_dids (
-
did text primary key
-
);
-
-
create table if not exists public_keys (
-
id integer primary key autoincrement,
-
did text not null,
-
key text not null,
-
created text not null default (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')),
-
unique(did, key),
-
foreign key (did) references known_dids(did) on delete cascade
-
);
-
-
create table if not exists _jetstream (
-
id integer primary key autoincrement,
-
last_time_us integer not null
-
);
-
-
create table if not exists events (
-
rkey text not null,
-
nsid text not null,
-
event text not null, -- json
-
created integer not null default (strftime('%s', 'now')),
-
primary key (rkey, nsid)
-
);
-
`)
-
if err != nil {
-
return nil, err
-
}
-
-
return &DB{db: db}, nil
-
}
+1 -17
knotserver/git/diff.go
···
nd.Diff = append(nd.Diff, ndiff)
}
-
nd.Stat.FilesChanged = len(diffs)
-
nd.Commit.This = c.Hash.String()
-
nd.Commit.PGPSignature = c.PGPSignature
-
nd.Commit.Committer = c.Committer
-
nd.Commit.Tree = c.TreeHash.String()
-
-
if parent.Hash.IsZero() {
-
nd.Commit.Parent = ""
-
} else {
-
nd.Commit.Parent = parent.Hash.String()
-
}
-
nd.Commit.Author = c.Author
-
nd.Commit.Message = c.Message
-
-
if v, ok := c.ExtraHeaders["change-id"]; ok {
-
nd.Commit.ChangedId = string(v)
-
}
+
nd.Commit.FromGoGitCommit(c)
return &nd, nil
}
+38 -2
knotserver/git/fork.go
···
import (
"errors"
"fmt"
+
"log/slog"
+
"net/url"
"os/exec"
+
"path/filepath"
"github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/config"
+
knotconfig "tangled.org/core/knotserver/config"
)
-
func Fork(repoPath, source string) error {
-
cloneCmd := exec.Command("git", "clone", "--bare", source, repoPath)
+
func Fork(repoPath, source string, cfg *knotconfig.Config) error {
+
u, err := url.Parse(source)
+
if err != nil {
+
return fmt.Errorf("failed to parse source URL: %w", err)
+
}
+
+
if o := optimizeClone(u, cfg); o != nil {
+
u = o
+
}
+
+
cloneCmd := exec.Command("git", "clone", "--bare", u.String(), repoPath)
if err := cloneCmd.Run(); err != nil {
return fmt.Errorf("failed to bare clone repository: %w", err)
}
···
}
return nil
+
}
+
+
func optimizeClone(u *url.URL, cfg *knotconfig.Config) *url.URL {
+
// only optimize if it's the same host
+
if u.Host != cfg.Server.Hostname {
+
return nil
+
}
+
+
local := filepath.Join(cfg.Repo.ScanPath, u.Path)
+
+
// sanity check: is there a git repo there?
+
if _, err := PlainOpen(local); err != nil {
+
return nil
+
}
+
+
// create optimized file:// URL
+
optimized := &url.URL{
+
Scheme: "file",
+
Path: local,
+
}
+
+
slog.Debug("performing local clone", "url", optimized.String())
+
return optimized
}
func (g *GitRepo) Sync() error {
+60 -2
knotserver/git/git.go
···
import (
"archive/tar"
"bytes"
+
"errors"
"fmt"
"io"
"io/fs"
···
"time"
"github.com/go-git/go-git/v5"
+
"github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object"
)
var (
-
ErrBinaryFile = fmt.Errorf("binary file")
-
ErrNotBinaryFile = fmt.Errorf("not binary file")
+
ErrBinaryFile = errors.New("binary file")
+
ErrNotBinaryFile = errors.New("not binary file")
+
ErrMissingGitModules = errors.New("no .gitmodules file found")
+
ErrInvalidGitModules = errors.New("invalid .gitmodules file")
+
ErrNotSubmodule = errors.New("path is not a submodule")
)
type GitRepo struct {
···
defer reader.Close()
return io.ReadAll(reader)
+
}
+
+
// read and parse .gitmodules
+
func (g *GitRepo) Submodules() (*config.Modules, error) {
+
c, err := g.r.CommitObject(g.h)
+
if err != nil {
+
return nil, fmt.Errorf("commit object: %w", err)
+
}
+
+
tree, err := c.Tree()
+
if err != nil {
+
return nil, fmt.Errorf("tree: %w", err)
+
}
+
+
// read .gitmodules file
+
modulesEntry, err := tree.FindEntry(".gitmodules")
+
if err != nil {
+
return nil, fmt.Errorf("%w: %w", ErrMissingGitModules, err)
+
}
+
+
modulesFile, err := tree.TreeEntryFile(modulesEntry)
+
if err != nil {
+
return nil, fmt.Errorf("%w: failed to read file: %w", ErrInvalidGitModules, err)
+
}
+
+
content, err := modulesFile.Contents()
+
if err != nil {
+
return nil, fmt.Errorf("%w: failed to read contents: %w", ErrInvalidGitModules, err)
+
}
+
+
// parse .gitmodules
+
modules := config.NewModules()
+
if err = modules.Unmarshal([]byte(content)); err != nil {
+
return nil, fmt.Errorf("%w: failed to parse: %w", ErrInvalidGitModules, err)
+
}
+
+
return modules, nil
+
}
+
+
func (g *GitRepo) Submodule(path string) (*config.Submodule, error) {
+
modules, err := g.Submodules()
+
if err != nil {
+
return nil, err
+
}
+
+
for _, submodule := range modules.Submodules {
+
if submodule.Path == path {
+
return submodule, nil
+
}
+
}
+
+
// path is not a submodule
+
return nil, ErrNotSubmodule
}
func (g *GitRepo) Branch(name string) (*plumbing.Reference, error) {
+4 -13
knotserver/git/tree.go
···
"path"
"time"
+
"github.com/go-git/go-git/v5/plumbing/filemode"
"github.com/go-git/go-git/v5/plumbing/object"
"tangled.org/core/types"
)
···
}
for _, e := range subtree.Entries {
-
mode, _ := e.Mode.ToOSFileMode()
sz, _ := subtree.Size(e.Name)
-
fpath := path.Join(parent, e.Name)
var lastCommit *types.LastCommitInfo
···
nts = append(nts, types.NiceTree{
Name: e.Name,
-
Mode: mode.String(),
-
IsFile: e.Mode.IsFile(),
+
Mode: e.Mode.String(),
Size: sz,
LastCommit: lastCommit,
})
···
default:
}
-
mode, err := e.Mode.ToOSFileMode()
-
if err != nil {
-
// TODO: log this
-
continue
-
}
-
if e.Mode.IsFile() {
-
err = cb(e, currentTree, root)
-
if errors.Is(err, TerminateWalk) {
+
if err := cb(e, currentTree, root); errors.Is(err, TerminateWalk) {
return err
}
}
// e is a directory
-
if mode.IsDir() {
+
if e.Mode == filemode.Dir {
subtree, err := currentTree.Tree(e.Name)
if err != nil {
return fmt.Errorf("sub tree %s: %w", e.Name, err)
+4 -8
knotserver/ingester.go
···
"github.com/bluesky-social/jetstream/pkg/models"
securejoin "github.com/cyphar/filepath-securejoin"
"tangled.org/core/api/tangled"
-
"tangled.org/core/idresolver"
"tangled.org/core/knotserver/db"
"tangled.org/core/knotserver/git"
"tangled.org/core/log"
···
}
// resolve this aturi to extract the repo record
-
resolver := idresolver.DefaultResolver()
-
ident, err := resolver.ResolveIdent(ctx, repoAt.Authority().String())
+
ident, err := h.resolver.ResolveIdent(ctx, repoAt.Authority().String())
if err != nil || ident.Handle.IsInvalidHandle() {
return fmt.Errorf("failed to resolve handle: %w", err)
}
···
var pipeline workflow.RawPipeline
for _, e := range workflowDir {
-
if !e.IsFile {
+
if !e.IsFile() {
continue
}
···
return err
}
-
resolver := idresolver.DefaultResolver()
-
-
subjectId, err := resolver.ResolveIdent(ctx, record.Subject)
+
subjectId, err := h.resolver.ResolveIdent(ctx, record.Subject)
if err != nil || subjectId.Handle.IsInvalidHandle() {
return err
}
// TODO: fix this for good, we need to fetch the record here unfortunately
// resolve this aturi to extract the repo record
-
owner, err := resolver.ResolveIdent(ctx, repoAt.Authority().String())
+
owner, err := h.resolver.ResolveIdent(ctx, repoAt.Authority().String())
if err != nil || owner.Handle.IsInvalidHandle() {
return fmt.Errorf("failed to resolve handle: %w", err)
}
+146 -49
knotserver/internal.go
···
)
type InternalHandle struct {
-
db *db.DB
-
c *config.Config
-
e *rbac.Enforcer
-
l *slog.Logger
-
n *notifier.Notifier
+
db *db.DB
+
c *config.Config
+
e *rbac.Enforcer
+
l *slog.Logger
+
n *notifier.Notifier
+
res *idresolver.Resolver
}
func (h *InternalHandle) PushAllowed(w http.ResponseWriter, r *http.Request) {
···
writeJSON(w, data)
}
+
// response in text/plain format
+
// the body will be qualified repository path on success/push-denied
+
// or an error message when process failed
+
func (h *InternalHandle) Guard(w http.ResponseWriter, r *http.Request) {
+
l := h.l.With("handler", "PostReceiveHook")
+
+
var (
+
incomingUser = r.URL.Query().Get("user")
+
repo = r.URL.Query().Get("repo")
+
gitCommand = r.URL.Query().Get("gitCmd")
+
)
+
+
if incomingUser == "" || repo == "" || gitCommand == "" {
+
w.WriteHeader(http.StatusBadRequest)
+
l.Error("invalid request", "incomingUser", incomingUser, "repo", repo, "gitCommand", gitCommand)
+
fmt.Fprintln(w, "invalid internal request")
+
return
+
}
+
+
// did:foo/repo-name or
+
// handle/repo-name or
+
// any of the above with a leading slash (/)
+
components := strings.Split(strings.TrimPrefix(strings.Trim(repo, "'"), "/"), "/")
+
l.Info("command components", "components", components)
+
+
if len(components) != 2 {
+
w.WriteHeader(http.StatusBadRequest)
+
l.Error("invalid repo format", "components", components)
+
fmt.Fprintln(w, "invalid repo format, needs <user>/<repo> or /<user>/<repo>")
+
return
+
}
+
repoOwner := components[0]
+
repoName := components[1]
+
+
resolver := idresolver.DefaultResolver(h.c.Server.PlcUrl)
+
+
repoOwnerIdent, err := resolver.ResolveIdent(r.Context(), repoOwner)
+
if err != nil || repoOwnerIdent.Handle.IsInvalidHandle() {
+
l.Error("Error resolving handle", "handle", repoOwner, "err", err)
+
w.WriteHeader(http.StatusInternalServerError)
+
fmt.Fprintf(w, "error resolving handle: invalid handle\n")
+
return
+
}
+
repoOwnerDid := repoOwnerIdent.DID.String()
+
+
qualifiedRepo, _ := securejoin.SecureJoin(repoOwnerDid, repoName)
+
+
if gitCommand == "git-receive-pack" {
+
ok, err := h.e.IsPushAllowed(incomingUser, rbac.ThisServer, qualifiedRepo)
+
if err != nil || !ok {
+
w.WriteHeader(http.StatusForbidden)
+
fmt.Fprint(w, repo)
+
return
+
}
+
}
+
+
w.WriteHeader(http.StatusOK)
+
fmt.Fprint(w, qualifiedRepo)
+
}
+
type PushOptions struct {
skipCi bool
verboseCi bool
···
// non-fatal
}
-
if (line.NewSha.String() != line.OldSha.String()) && line.OldSha.IsZero() {
-
msg, err := h.replyCompare(line, repoDid, gitRelativeDir, repoName, r.Context())
-
if err != nil {
-
l.Error("failed to reply with compare link", "err", err, "line", line, "did", gitUserDid, "repo", gitRelativeDir)
-
// non-fatal
-
} else {
-
for msgLine := range msg {
-
resp.Messages = append(resp.Messages, msg[msgLine])
-
}
-
}
+
err = h.emitCompareLink(&resp.Messages, line, repoDid, repoName)
+
if err != nil {
+
l.Error("failed to reply with compare link", "err", err, "line", line, "did", gitUserDid, "repo", gitRelativeDir)
+
// non-fatal
}
err = h.triggerPipeline(&resp.Messages, line, gitUserDid, repoDid, repoName, pushOptions)
···
writeJSON(w, resp)
}
-
func (h *InternalHandle) replyCompare(line git.PostReceiveLine, repoOwner string, gitRelativeDir string, repoName string, ctx context.Context) ([]string, error) {
-
l := h.l.With("handler", "replyCompare")
-
userIdent, err := idresolver.DefaultResolver().ResolveIdent(ctx, repoOwner)
-
user := repoOwner
-
if err != nil {
-
l.Error("Failed to fetch user identity", "err", err)
-
// non-fatal
-
} else {
-
user = userIdent.Handle.String()
-
}
-
gr, err := git.PlainOpen(gitRelativeDir)
-
if err != nil {
-
l.Error("Failed to open git repository", "err", err)
-
return []string{}, err
-
}
-
defaultBranch, err := gr.FindMainBranch()
-
if err != nil {
-
l.Error("Failed to fetch default branch", "err", err)
-
return []string{}, err
-
}
-
if line.Ref == plumbing.NewBranchReferenceName(defaultBranch).String() {
-
return []string{}, nil
-
}
-
ZWS := "\u200B"
-
var msg []string
-
msg = append(msg, ZWS)
-
msg = append(msg, fmt.Sprintf("Create a PR pointing to %s", defaultBranch))
-
msg = append(msg, fmt.Sprintf("\t%s/%s/%s/compare/%s...%s", h.c.AppViewEndpoint, user, repoName, defaultBranch, strings.TrimPrefix(line.Ref, "refs/heads/")))
-
msg = append(msg, ZWS)
-
return msg, nil
-
}
-
func (h *InternalHandle) insertRefUpdate(line git.PostReceiveLine, gitUserDid, repoDid, repoName string) error {
didSlashRepo, err := securejoin.SecureJoin(repoDid, repoName)
if err != nil {
···
return errors.Join(errs, h.db.InsertEvent(event, h.n))
}
-
func (h *InternalHandle) triggerPipeline(clientMsgs *[]string, line git.PostReceiveLine, gitUserDid, repoDid, repoName string, pushOptions PushOptions) error {
+
func (h *InternalHandle) triggerPipeline(
+
clientMsgs *[]string,
+
line git.PostReceiveLine,
+
gitUserDid string,
+
repoDid string,
+
repoName string,
+
pushOptions PushOptions,
+
) error {
if pushOptions.skipCi {
return nil
}
···
var pipeline workflow.RawPipeline
for _, e := range workflowDir {
-
if !e.IsFile {
+
if !e.IsFile() {
continue
}
···
return h.db.InsertEvent(event, h.n)
}
+
func (h *InternalHandle) emitCompareLink(
+
clientMsgs *[]string,
+
line git.PostReceiveLine,
+
repoDid string,
+
repoName string,
+
) error {
+
// this is a second push to a branch, don't reply with the link again
+
if !line.OldSha.IsZero() {
+
return nil
+
}
+
+
// the ref was not updated to a new hash, don't reply with the link
+
//
+
// NOTE: do we need this?
+
if line.NewSha.String() == line.OldSha.String() {
+
return nil
+
}
+
+
pushedRef := plumbing.ReferenceName(line.Ref)
+
+
userIdent, err := h.res.ResolveIdent(context.Background(), repoDid)
+
user := repoDid
+
if err == nil {
+
user = userIdent.Handle.String()
+
}
+
+
didSlashRepo, err := securejoin.SecureJoin(repoDid, repoName)
+
if err != nil {
+
return err
+
}
+
+
repoPath, err := securejoin.SecureJoin(h.c.Repo.ScanPath, didSlashRepo)
+
if err != nil {
+
return err
+
}
+
+
gr, err := git.PlainOpen(repoPath)
+
if err != nil {
+
return err
+
}
+
+
defaultBranch, err := gr.FindMainBranch()
+
if err != nil {
+
return err
+
}
+
+
// pushing to default branch
+
if pushedRef == plumbing.NewBranchReferenceName(defaultBranch) {
+
return nil
+
}
+
+
// pushing a tag, don't prompt the user the open a PR
+
if pushedRef.IsTag() {
+
return nil
+
}
+
+
ZWS := "\u200B"
+
*clientMsgs = append(*clientMsgs, ZWS)
+
*clientMsgs = append(*clientMsgs, fmt.Sprintf("Create a PR pointing to %s", defaultBranch))
+
*clientMsgs = append(*clientMsgs, fmt.Sprintf("\t%s/%s/%s/compare/%s...%s", h.c.AppViewEndpoint, user, repoName, defaultBranch, strings.TrimPrefix(line.Ref, "refs/heads/")))
+
*clientMsgs = append(*clientMsgs, ZWS)
+
return nil
+
}
+
func Internal(ctx context.Context, c *config.Config, db *db.DB, e *rbac.Enforcer, n *notifier.Notifier) http.Handler {
r := chi.NewRouter()
l := log.FromContext(ctx)
l = log.SubLogger(l, "internal")
+
res := idresolver.DefaultResolver(c.Server.PlcUrl)
h := InternalHandle{
db,
···
e,
l,
n,
+
res,
}
r.Get("/push-allowed", h.PushAllowed)
r.Get("/keys", h.InternalKeys)
+
r.Get("/guard", h.Guard)
r.Post("/hooks/post-receive", h.PostReceiveHook)
r.Mount("/debug", middleware.Profiler())
+18
knotserver/middleware.go
···
)
})
}
+
+
func (h *Knot) CORS(next http.Handler) http.Handler {
+
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+
// Set CORS headers
+
w.Header().Set("Access-Control-Allow-Origin", "*")
+
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
+
w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization")
+
w.Header().Set("Access-Control-Max-Age", "86400")
+
+
// Handle preflight requests
+
if r.Method == "OPTIONS" {
+
w.WriteHeader(http.StatusOK)
+
return
+
}
+
+
next.ServeHTTP(w, r)
+
})
+
}
+2 -1
knotserver/router.go
···
l: log.FromContext(ctx),
jc: jc,
n: n,
-
resolver: idresolver.DefaultResolver(),
+
resolver: idresolver.DefaultResolver(c.Server.PlcUrl),
}
err := e.AddKnot(rbac.ThisServer)
···
func (h *Knot) Router() http.Handler {
r := chi.NewRouter()
+
r.Use(h.CORS)
r.Use(h.RequestLogger)
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
+1 -1
knotserver/server.go
···
logger.Info("running in dev mode, signature verification is disabled")
}
-
db, err := db.Setup(c.Server.DBPath)
+
db, err := db.Setup(ctx, c.Server.DBPath)
if err != nil {
return fmt.Errorf("failed to load db: %w", err)
}
+1 -1
knotserver/xrpc/create_repo.go
···
repoPath, _ := securejoin.SecureJoin(h.Config.Repo.ScanPath, relativeRepoPath)
if data.Source != nil && *data.Source != "" {
-
err = git.Fork(repoPath, *data.Source)
+
err = git.Fork(repoPath, *data.Source, h.Config)
if err != nil {
l.Error("forking repo", "error", err.Error())
writeError(w, xrpcerr.GenericError(err), http.StatusInternalServerError)
+21 -2
knotserver/xrpc/repo_blob.go
···
return
}
+
// first check if this path is a submodule
+
submodule, err := gr.Submodule(treePath)
+
if err != nil {
+
// this is okay, continue and try to treat it as a regular file
+
} else {
+
response := tangled.RepoBlob_Output{
+
Ref: ref,
+
Path: treePath,
+
Submodule: &tangled.RepoBlob_Submodule{
+
Name: submodule.Name,
+
Url: submodule.URL,
+
Branch: &submodule.Branch,
+
},
+
}
+
writeJson(w, response)
+
return
+
}
+
contents, err := gr.RawContent(treePath)
if err != nil {
x.Logger.Error("file content", "error", err.Error(), "treePath", treePath)
···
var encoding string
isBinary := !isTextual(mimeType)
+
size := int64(len(contents))
if isBinary {
content = base64.StdEncoding.EncodeToString(contents)
···
response := tangled.RepoBlob_Output{
Ref: ref,
Path: treePath,
-
Content: content,
+
Content: &content,
Encoding: &encoding,
-
Size: &[]int64{int64(len(contents))}[0],
+
Size: &size,
IsBinary: &isBinary,
}
+6 -1
knotserver/xrpc/repo_log.go
···
return
}
+
tcommits := make([]types.Commit, len(commits))
+
for i, c := range commits {
+
tcommits[i].FromGoGitCommit(c)
+
}
+
// Create response using existing types.RepoLogResponse
response := types.RepoLogResponse{
-
Commits: commits,
+
Commits: tcommits,
Ref: ref,
Page: (offset / limit) + 1,
PerPage: limit,
+3 -5
knotserver/xrpc/repo_tree.go
···
treeEntries := make([]*tangled.RepoTree_TreeEntry, len(files))
for i, file := range files {
entry := &tangled.RepoTree_TreeEntry{
-
Name: file.Name,
-
Mode: file.Mode,
-
Size: file.Size,
-
Is_file: file.IsFile,
-
Is_subtree: file.IsSubtree,
+
Name: file.Name,
+
Mode: file.Mode,
+
Size: file.Size,
}
if file.LastCommit != nil {
+5
lexicons/actor/profile.json
···
"type": "string",
"format": "at-uri"
}
+
},
+
"pronouns": {
+
"type": "string",
+
"description": "Preferred gender pronouns.",
+
"maxLength": 40
}
}
}
+14
lexicons/issue/comment.json
···
"replyTo": {
"type": "string",
"format": "at-uri"
+
},
+
"mentions": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "did"
+
}
+
},
+
"references": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "at-uri"
+
}
}
}
}
+14
lexicons/issue/issue.json
···
"createdAt": {
"type": "string",
"format": "datetime"
+
},
+
"mentions": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "did"
+
}
+
},
+
"references": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "at-uri"
+
}
}
}
}
+14
lexicons/pulls/comment.json
···
"createdAt": {
"type": "string",
"format": "datetime"
+
},
+
"mentions": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "did"
+
}
+
},
+
"references": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "at-uri"
+
}
}
}
}
+14
lexicons/pulls/pull.json
···
"createdAt": {
"type": "string",
"format": "datetime"
+
},
+
"mentions": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "did"
+
}
+
},
+
"references": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "at-uri"
+
}
}
}
}
+49 -5
lexicons/repo/blob.json
···
"type": "query",
"parameters": {
"type": "params",
-
"required": ["repo", "ref", "path"],
+
"required": [
+
"repo",
+
"ref",
+
"path"
+
],
"properties": {
"repo": {
"type": "string",
···
"encoding": "application/json",
"schema": {
"type": "object",
-
"required": ["ref", "path", "content"],
+
"required": [
+
"ref",
+
"path"
+
],
"properties": {
"ref": {
"type": "string",
···
"encoding": {
"type": "string",
"description": "Content encoding",
-
"enum": ["utf-8", "base64"]
+
"enum": [
+
"utf-8",
+
"base64"
+
]
},
"size": {
"type": "integer",
···
"mimeType": {
"type": "string",
"description": "MIME type of the file"
+
},
+
"submodule": {
+
"type": "ref",
+
"ref": "#submodule",
+
"description": "Submodule information if path is a submodule"
},
"lastCommit": {
"type": "ref",
···
},
"lastCommit": {
"type": "object",
-
"required": ["hash", "message", "when"],
+
"required": [
+
"hash",
+
"message",
+
"when"
+
],
"properties": {
"hash": {
"type": "string",
···
},
"signature": {
"type": "object",
-
"required": ["name", "email", "when"],
+
"required": [
+
"name",
+
"email",
+
"when"
+
],
"properties": {
"name": {
"type": "string",
···
"type": "string",
"format": "datetime",
"description": "Author timestamp"
+
}
+
}
+
},
+
"submodule": {
+
"type": "object",
+
"required": [
+
"name",
+
"url"
+
],
+
"properties": {
+
"name": {
+
"type": "string",
+
"description": "Submodule name"
+
},
+
"url": {
+
"type": "string",
+
"description": "Submodule repository URL"
+
},
+
"branch": {
+
"type": "string",
+
"description": "Branch to track in the submodule"
}
}
}
+15
lexicons/repo/repo.json
···
"minGraphemes": 1,
"maxGraphemes": 140
},
+
"website": {
+
"type": "string",
+
"format": "uri",
+
"description": "Any URI related to the repo"
+
},
+
"topics": {
+
"type": "array",
+
"description": "Topics related to the repo",
+
"items": {
+
"type": "string",
+
"minLength": 1,
+
"maxLength": 50
+
},
+
"maxLength": 50
+
},
"source": {
"type": "string",
"format": "uri",
+1 -9
lexicons/repo/tree.json
···
},
"treeEntry": {
"type": "object",
-
"required": ["name", "mode", "size", "is_file", "is_subtree"],
+
"required": ["name", "mode", "size"],
"properties": {
"name": {
"type": "string",
···
"size": {
"type": "integer",
"description": "File size in bytes"
-
},
-
"is_file": {
-
"type": "boolean",
-
"description": "Whether this entry is a file"
-
},
-
"is_subtree": {
-
"type": "boolean",
-
"description": "Whether this entry is a directory/subtree"
},
"last_commit": {
"type": "ref",
+83 -32
nix/gomod2nix.toml
···
[mod."github.com/ProtonMail/go-crypto"]
version = "v1.3.0"
hash = "sha256-TUG+C4MyeWglOmiwiW2/NUVurFHXLgEPRd3X9uQ1NGI="
+
[mod."github.com/RoaringBitmap/roaring/v2"]
+
version = "v2.4.5"
+
hash = "sha256-igWY0S1PTolQkfctYcmVJioJyV1pk2V81X6o6BA1XQA="
[mod."github.com/alecthomas/assert/v2"]
version = "v2.11.0"
hash = "sha256-tDJCDKZ0R4qNA7hgMKWrpDyogt1802LCJDBCExxdqaU="
···
[mod."github.com/beorn7/perks"]
version = "v1.0.1"
hash = "sha256-h75GUqfwJKngCJQVE5Ao5wnO3cfKD9lSIteoLp/3xJ4="
+
[mod."github.com/bits-and-blooms/bitset"]
+
version = "v1.22.0"
+
hash = "sha256-lY1K29h4vlAmJVvwKgbTG8BTACYGjFaginCszN+ST6w="
+
[mod."github.com/blevesearch/bleve/v2"]
+
version = "v2.5.3"
+
hash = "sha256-DkpX43WMpB8+9KCibdNjyf6N/1a51xJTfGF97xdoCAQ="
+
[mod."github.com/blevesearch/bleve_index_api"]
+
version = "v1.2.8"
+
hash = "sha256-LyGDBRvK2GThgUFLZoAbDOOKP1M9Z8oy0E2M6bHZdrk="
+
[mod."github.com/blevesearch/geo"]
+
version = "v0.2.4"
+
hash = "sha256-W1OV/pvqzJC28VJomGnIU/HeBZ689+p54vWdZ1z/bxc="
+
[mod."github.com/blevesearch/go-faiss"]
+
version = "v1.0.25"
+
hash = "sha256-bcm976UX22aNIuSjBxFaYMKTltO9lbqyeG4Z3KVG3/Y="
+
[mod."github.com/blevesearch/go-porterstemmer"]
+
version = "v1.0.3"
+
hash = "sha256-hUjo6g1ehUD1awBmta0ji/xoooD2qG7O22HIeSQiRFo="
+
[mod."github.com/blevesearch/gtreap"]
+
version = "v0.1.1"
+
hash = "sha256-B4p/5RnECRfV4yOiSQDLMHb23uI7lsQDePhNK+zjbF4="
+
[mod."github.com/blevesearch/mmap-go"]
+
version = "v1.0.4"
+
hash = "sha256-8y0nMAE9goKjYhR/FFEvtbP7cvM46xneE461L1Jn2Pg="
+
[mod."github.com/blevesearch/scorch_segment_api/v2"]
+
version = "v2.3.10"
+
hash = "sha256-BcBRjVOrsYySdsdgEjS3qHFm/c58KUNJepRPUO0lFmY="
+
[mod."github.com/blevesearch/segment"]
+
version = "v0.9.1"
+
hash = "sha256-0EAT737kNxl8IJFGl2SD9mOzxolONGgpfaYEGr7JXkQ="
+
[mod."github.com/blevesearch/snowballstem"]
+
version = "v0.9.0"
+
hash = "sha256-NQsXrhXcYXn4jQcvwjwLc96SGMRcqVlrR6hYKWGk7/s="
+
[mod."github.com/blevesearch/upsidedown_store_api"]
+
version = "v1.0.2"
+
hash = "sha256-P69Mnh6YR5RI73bD6L7BYDxkVmaqPMNUrjbfSJoKWuo="
+
[mod."github.com/blevesearch/vellum"]
+
version = "v1.1.0"
+
hash = "sha256-GJ1wslEJEZhPbMiANw0W4Dgb1ZouiILbWEaIUfxZTkw="
+
[mod."github.com/blevesearch/zapx/v11"]
+
version = "v11.4.2"
+
hash = "sha256-YzRcc2GwV4VL2Bc+tXOOUL6xNi8LWS76DXEcTkFPTaQ="
+
[mod."github.com/blevesearch/zapx/v12"]
+
version = "v12.4.2"
+
hash = "sha256-yqyzkMWpyXZSF9KLjtiuOmnRUfhaZImk27mU8lsMyJY="
+
[mod."github.com/blevesearch/zapx/v13"]
+
version = "v13.4.2"
+
hash = "sha256-VSS2fI7YUkeGMBH89TB9yW5qG8MWjM6zKbl8DboHsB4="
+
[mod."github.com/blevesearch/zapx/v14"]
+
version = "v14.4.2"
+
hash = "sha256-mAWr+vK0uZWMUaJfGfchzQo4dzMdBbD3Z7F84Jn/ktg="
+
[mod."github.com/blevesearch/zapx/v15"]
+
version = "v15.4.2"
+
hash = "sha256-R8Eh3N4e8CDXiW47J8ZBnfMY1TTnX1SJPwQc4gYChi8="
+
[mod."github.com/blevesearch/zapx/v16"]
+
version = "v16.2.4"
+
hash = "sha256-Jo5k7DflV/ghszOWJTCOGVyyLMvlvSYyxRrmSIFjyEE="
[mod."github.com/bluekeyes/go-gitdiff"]
version = "v0.8.2"
hash = "sha256-GWm5i1ukuBukV0GMF1rffpbOSSXZdfg6/0pABMiGzLQ="
···
version = "v0.0.0-20241210005130-ea96859b93d1"
hash = "sha256-AiapbrkjXboIKc5QNiWH0KyNs0zKnn6UlGwWFlkUfm0="
[mod."github.com/bmatcuk/doublestar/v4"]
-
version = "v4.7.1"
-
hash = "sha256-idO38nWZtmxvE0CgdziepwFM+Xc70Isr6NiKEZjHOjA="
+
version = "v4.9.1"
+
hash = "sha256-0iyHjyTAsfhgYSsE+NKxSNGBuM3Id615VWeQhssTShE="
[mod."github.com/carlmjohnson/versioninfo"]
version = "v0.22.5"
hash = "sha256-tf7yKVFTUPmGKBLK43bjyIRQUboCYduh3I5HXE5+LPw="
···
[mod."github.com/davecgh/go-spew"]
version = "v1.1.2-0.20180830191138-d8f796af33cc"
hash = "sha256-fV9oI51xjHdOmEx6+dlq7Ku2Ag+m/bmbzPo6A4Y74qc="
-
[mod."github.com/decred/dcrd/dcrec/secp256k1/v4"]
-
version = "v4.4.0"
-
hash = "sha256-qrhEIwhDll3cxoVpMbm1NQ9/HTI42S7ms8Buzlo5HCg="
[mod."github.com/dgraph-io/ristretto"]
version = "v0.2.0"
hash = "sha256-bnpxX+oO/Qf7IJevA0gsbloVoqRx+5bh7RQ9d9eLNYw="
···
[mod."github.com/golang/mock"]
version = "v1.6.0"
hash = "sha256-fWdnMQisRbiRzGT3ISrUHovquzLRHWvcv1JEsJFZRno="
+
[mod."github.com/golang/protobuf"]
+
version = "v1.5.4"
+
hash = "sha256-N3+Lv9lEZjrdOWdQhFj6Y3Iap4rVLEQeI8/eFFyAMZ0="
+
[mod."github.com/golang/snappy"]
+
version = "v0.0.4"
+
hash = "sha256-Umx+5xHAQCN/Gi4HbtMhnDCSPFAXSsjVbXd8n5LhjAA="
[mod."github.com/google/go-querystring"]
version = "v1.1.0"
hash = "sha256-itsKgKghuX26czU79cK6C2n+lc27jm5Dw1XbIRgwZJY="
···
[mod."github.com/ipfs/go-metrics-interface"]
version = "v0.3.0"
hash = "sha256-b3tp3jxecLmJEGx2kW7MiKGlAKPEWg/LJ7hXylSC8jQ="
+
[mod."github.com/json-iterator/go"]
+
version = "v1.1.12"
+
hash = "sha256-To8A0h+lbfZ/6zM+2PpRpY3+L6725OPC66lffq6fUoM="
[mod."github.com/kevinburke/ssh_config"]
version = "v1.2.0"
hash = "sha256-Ta7ZOmyX8gG5tzWbY2oES70EJPfI90U7CIJS9EAce0s="
···
[mod."github.com/klauspost/cpuid/v2"]
version = "v2.3.0"
hash = "sha256-50JhbQyT67BK38HIdJihPtjV7orYp96HknI2VP7A9Yc="
-
[mod."github.com/lestrrat-go/blackmagic"]
-
version = "v1.0.4"
-
hash = "sha256-HmWOpwoPDNMwLdOi7onNn3Sb+ZsAa3Ai3gVBbXmQ0e8="
-
[mod."github.com/lestrrat-go/httpcc"]
-
version = "v1.0.1"
-
hash = "sha256-SMRSwJpqDIs/xL0l2e8vP0W65qtCHX2wigcOeqPJmos="
-
[mod."github.com/lestrrat-go/httprc"]
-
version = "v1.0.6"
-
hash = "sha256-mfZzePEhrmyyu/avEBd2MsDXyto8dq5+fyu5lA8GUWM="
-
[mod."github.com/lestrrat-go/iter"]
-
version = "v1.0.2"
-
hash = "sha256-30tErRf7Qu/NOAt1YURXY/XJSA6sCr6hYQfO8QqHrtw="
-
[mod."github.com/lestrrat-go/jwx/v2"]
-
version = "v2.1.6"
-
hash = "sha256-0LszXRZIba+X8AOrs3T4uanAUafBdlVB8/MpUNEFpbc="
-
[mod."github.com/lestrrat-go/option"]
-
version = "v1.0.1"
-
hash = "sha256-jVcIYYVsxElIS/l2akEw32vdEPR8+anR6oeT1FoYULI="
[mod."github.com/lucasb-eyer/go-colorful"]
version = "v1.2.0"
hash = "sha256-Gg9dDJFCTaHrKHRR1SrJgZ8fWieJkybljybkI9x0gyE="
···
[mod."github.com/moby/term"]
version = "v0.5.2"
hash = "sha256-/G20jUZKx36ktmPU/nEw/gX7kRTl1Dbu7zvNBYNt4xU="
+
[mod."github.com/modern-go/concurrent"]
+
version = "v0.0.0-20180306012644-bacd9c7ef1dd"
+
hash = "sha256-OTySieAgPWR4oJnlohaFTeK1tRaVp/b0d1rYY8xKMzo="
+
[mod."github.com/modern-go/reflect2"]
+
version = "v1.0.2"
+
hash = "sha256-+W9EIW7okXIXjWEgOaMh58eLvBZ7OshW2EhaIpNLSBU="
[mod."github.com/morikuni/aec"]
version = "v1.0.0"
hash = "sha256-5zYgLeGr3K+uhGKlN3xv0PO67V+2Zw+cezjzNCmAWOE="
[mod."github.com/mr-tron/base58"]
version = "v1.2.0"
hash = "sha256-8FzMu3kHUbBX10pUdtGf59Ag7BNupx8ZHeUaodR1/Vk="
+
[mod."github.com/mschoch/smat"]
+
version = "v0.2.0"
+
hash = "sha256-DZvUJXjIcta3U+zxzgU3wpoGn/V4lpBY7Xme8aQUi+E="
[mod."github.com/muesli/termenv"]
version = "v0.16.0"
hash = "sha256-hGo275DJlyLtcifSLpWnk8jardOksdeX9lH4lBeE3gI="
···
[mod."github.com/ryanuber/go-glob"]
version = "v1.0.0"
hash = "sha256-YkMl1utwUhi3E0sHK23ISpAsPyj4+KeXyXKoFYGXGVY="
-
[mod."github.com/segmentio/asm"]
-
version = "v1.2.0"
-
hash = "sha256-zbNuKxNrUDUc6IlmRQNuJQzVe5Ol/mqp7srDg9IMMqs="
[mod."github.com/sergi/go-diff"]
version = "v1.1.0"
hash = "sha256-8NJMabldpf40uwQN20T6QXx5KORDibCBJL02KD661xY="
···
[mod."github.com/whyrusleeping/cbor-gen"]
version = "v0.3.1"
hash = "sha256-PAd8M2Z8t6rVRBII+Rg8Bz+QaJIwbW64bfyqsv31kgc="
-
[mod."github.com/wyatt915/goldmark-treeblood"]
-
version = "v0.0.0-20250825231212-5dcbdb2f4b57"
-
hash = "sha256-IZEsUXTBTsNgWoD7vqRUc9aFCCHNjzk1IUmI9O+NCnM="
-
[mod."github.com/wyatt915/treeblood"]
-
version = "v0.1.15"
-
hash = "sha256-hb99exdkoY2Qv8WdDxhwgPXGbEYimUr6wFtPXEvcO9g="
[mod."github.com/xo/terminfo"]
version = "v0.0.0-20220910002029-abceb7e1c41e"
hash = "sha256-GyCDxxMQhXA3Pi/TsWXpA8cX5akEoZV7CFx4RO3rARU="
···
[mod."gitlab.com/yawning/tuplehash"]
version = "v0.0.0-20230713102510-df83abbf9a02"
hash = "sha256-pehQduoaJRLchebhgvMYacVvbuNIBA++XkiqCuqdato="
+
[mod."go.etcd.io/bbolt"]
+
version = "v1.4.0"
+
hash = "sha256-nR/YGQjwz6ue99IFbgw/01Pl8PhoOjpKiwVy5sJxlps="
[mod."go.opentelemetry.io/auto/sdk"]
version = "v1.1.0"
hash = "sha256-cA9qCCu8P1NSJRxgmpfkfa5rKyn9X+Y/9FSmSd5xjyo="
+285 -18
nix/modules/appview.nix
···
lib,
...
}: let
-
cfg = config.services.tangled-appview;
+
cfg = config.services.tangled.appview;
in
with lib; {
options = {
-
services.tangled-appview = {
+
services.tangled.appview = {
enable = mkOption {
type = types.bool;
default = false;
description = "Enable tangled appview";
};
+
package = mkOption {
type = types.package;
description = "Package to use for the appview";
};
+
+
# core configuration
port = mkOption {
-
type = types.int;
+
type = types.port;
default = 3000;
description = "Port to run the appview on";
};
-
cookie_secret = mkOption {
+
+
listenAddr = mkOption {
+
type = types.str;
+
default = "0.0.0.0:${toString cfg.port}";
+
description = "Listen address for the appview service";
+
};
+
+
dbPath = mkOption {
type = types.str;
-
default = "00000000000000000000000000000000";
-
description = "Cookie secret";
+
default = "/var/lib/appview/appview.db";
+
description = "Path to the SQLite database file";
+
};
+
+
appviewHost = mkOption {
+
type = types.str;
+
default = "https://tangled.org";
+
example = "https://example.com";
+
description = "Public host URL for the appview instance";
+
};
+
+
appviewName = mkOption {
+
type = types.str;
+
default = "Tangled";
+
description = "Display name for the appview instance";
+
};
+
+
dev = mkOption {
+
type = types.bool;
+
default = false;
+
description = "Enable development mode";
+
};
+
+
disallowedNicknamesFile = mkOption {
+
type = types.nullOr types.path;
+
default = null;
+
description = "Path to file containing disallowed nicknames";
+
};
+
+
# redis configuration
+
redis = {
+
addr = mkOption {
+
type = types.str;
+
default = "localhost:6379";
+
description = "Redis server address";
+
};
+
+
db = mkOption {
+
type = types.int;
+
default = 0;
+
description = "Redis database number";
+
};
+
};
+
+
# jetstream configuration
+
jetstream = {
+
endpoint = mkOption {
+
type = types.str;
+
default = "wss://jetstream1.us-east.bsky.network/subscribe";
+
description = "Jetstream WebSocket endpoint";
+
};
+
};
+
+
# knotstream consumer configuration
+
knotstream = {
+
retryInterval = mkOption {
+
type = types.str;
+
default = "60s";
+
description = "Initial retry interval for knotstream consumer";
+
};
+
+
maxRetryInterval = mkOption {
+
type = types.str;
+
default = "120m";
+
description = "Maximum retry interval for knotstream consumer";
+
};
+
+
connectionTimeout = mkOption {
+
type = types.str;
+
default = "5s";
+
description = "Connection timeout for knotstream consumer";
+
};
+
+
workerCount = mkOption {
+
type = types.int;
+
default = 64;
+
description = "Number of workers for knotstream consumer";
+
};
+
+
queueSize = mkOption {
+
type = types.int;
+
default = 100;
+
description = "Queue size for knotstream consumer";
+
};
+
};
+
+
# spindlestream consumer configuration
+
spindlestream = {
+
retryInterval = mkOption {
+
type = types.str;
+
default = "60s";
+
description = "Initial retry interval for spindlestream consumer";
+
};
+
+
maxRetryInterval = mkOption {
+
type = types.str;
+
default = "120m";
+
description = "Maximum retry interval for spindlestream consumer";
+
};
+
+
connectionTimeout = mkOption {
+
type = types.str;
+
default = "5s";
+
description = "Connection timeout for spindlestream consumer";
+
};
+
+
workerCount = mkOption {
+
type = types.int;
+
default = 64;
+
description = "Number of workers for spindlestream consumer";
+
};
+
+
queueSize = mkOption {
+
type = types.int;
+
default = 100;
+
description = "Queue size for spindlestream consumer";
+
};
+
};
+
+
# resend configuration
+
resend = {
+
sentFrom = mkOption {
+
type = types.str;
+
default = "noreply@notifs.tangled.sh";
+
description = "Email address to send notifications from";
+
};
+
};
+
+
# posthog configuration
+
posthog = {
+
endpoint = mkOption {
+
type = types.str;
+
default = "https://eu.i.posthog.com";
+
description = "PostHog API endpoint";
+
};
+
};
+
+
# camo configuration
+
camo = {
+
host = mkOption {
+
type = types.str;
+
default = "https://camo.tangled.sh";
+
description = "Camo proxy host URL";
+
};
};
+
+
# avatar configuration
+
avatar = {
+
host = mkOption {
+
type = types.str;
+
default = "https://avatar.tangled.sh";
+
description = "Avatar service host URL";
+
};
+
};
+
+
plc = {
+
url = mkOption {
+
type = types.str;
+
default = "https://plc.directory";
+
description = "PLC directory URL";
+
};
+
};
+
+
pds = {
+
host = mkOption {
+
type = types.str;
+
default = "https://tngl.sh";
+
description = "PDS host URL";
+
};
+
};
+
+
label = {
+
defaults = mkOption {
+
type = types.listOf types.str;
+
default = [
+
"at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/wontfix"
+
"at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/good-first-issue"
+
"at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/duplicate"
+
"at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/documentation"
+
"at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/assignee"
+
];
+
description = "Default label definitions";
+
};
+
+
goodFirstIssue = mkOption {
+
type = types.str;
+
default = "at://did:plc:wshs7t2adsemcrrd4snkeqli/sh.tangled.label.definition/good-first-issue";
+
description = "Good first issue label definition";
+
};
+
};
+
environmentFile = mkOption {
type = with types; nullOr path;
default = null;
-
example = "/etc/tangled-appview.env";
+
example = "/etc/appview.env";
description = ''
Additional environment file as defined in {manpage}`systemd.exec(5)`.
-
Sensitive secrets such as {env}`TANGLED_COOKIE_SECRET` may be
-
passed to the service without makeing them world readable in the
-
nix store.
-
+
Sensitive secrets such as {env}`TANGLED_COOKIE_SECRET`,
+
{env}`TANGLED_OAUTH_CLIENT_SECRET`, {env}`TANGLED_RESEND_API_KEY`,
+
{env}`TANGLED_CAMO_SHARED_SECRET`, {env}`TANGLED_AVATAR_SHARED_SECRET`,
+
{env}`TANGLED_REDIS_PASS`, {env}`TANGLED_PDS_ADMIN_SECRET`,
+
{env}`TANGLED_CLOUDFLARE_API_TOKEN`, {env}`TANGLED_CLOUDFLARE_ZONE_ID`,
+
{env}`TANGLED_CLOUDFLARE_TURNSTILE_SITE_KEY`,
+
{env}`TANGLED_CLOUDFLARE_TURNSTILE_SECRET_KEY`,
+
{env}`TANGLED_POSTHOG_API_KEY`, {env}`TANGLED_APP_PASSWORD`,
+
and {env}`TANGLED_ALT_APP_PASSWORD` may be passed to the service
+
without making them world readable in the nix store.
'';
};
};
};
config = mkIf cfg.enable {
-
systemd.services.tangled-appview = {
+
services.redis.servers.appview = {
+
enable = true;
+
port = 6379;
+
};
+
+
systemd.services.appview = {
description = "tangled appview service";
wantedBy = ["multi-user.target"];
+
after = ["redis-appview.service" "network-online.target"];
+
requires = ["redis-appview.service"];
+
wants = ["network-online.target"];
serviceConfig = {
-
ListenStream = "0.0.0.0:${toString cfg.port}";
+
Type = "simple";
ExecStart = "${cfg.package}/bin/appview";
Restart = "always";
-
EnvironmentFile = optional (cfg.environmentFile != null) cfg.environmentFile;
+
RestartSec = "10s";
+
EnvironmentFile = mkIf (cfg.environmentFile != null) cfg.environmentFile;
+
+
# state directory
+
StateDirectory = "appview";
+
WorkingDirectory = "/var/lib/appview";
+
+
# security hardening
+
NoNewPrivileges = true;
+
PrivateTmp = true;
+
ProtectSystem = "strict";
+
ProtectHome = true;
+
ReadWritePaths = ["/var/lib/appview"];
};
-
environment = {
-
TANGLED_DB_PATH = "appview.db";
-
TANGLED_COOKIE_SECRET = cfg.cookie_secret;
-
};
+
environment =
+
{
+
TANGLED_DB_PATH = cfg.dbPath;
+
TANGLED_LISTEN_ADDR = cfg.listenAddr;
+
TANGLED_APPVIEW_HOST = cfg.appviewHost;
+
TANGLED_APPVIEW_NAME = cfg.appviewName;
+
TANGLED_DEV =
+
if cfg.dev
+
then "true"
+
else "false";
+
}
+
// optionalAttrs (cfg.disallowedNicknamesFile != null) {
+
TANGLED_DISALLOWED_NICKNAMES_FILE = cfg.disallowedNicknamesFile;
+
}
+
// {
+
TANGLED_REDIS_ADDR = cfg.redis.addr;
+
TANGLED_REDIS_DB = toString cfg.redis.db;
+
+
TANGLED_JETSTREAM_ENDPOINT = cfg.jetstream.endpoint;
+
+
TANGLED_KNOTSTREAM_RETRY_INTERVAL = cfg.knotstream.retryInterval;
+
TANGLED_KNOTSTREAM_MAX_RETRY_INTERVAL = cfg.knotstream.maxRetryInterval;
+
TANGLED_KNOTSTREAM_CONNECTION_TIMEOUT = cfg.knotstream.connectionTimeout;
+
TANGLED_KNOTSTREAM_WORKER_COUNT = toString cfg.knotstream.workerCount;
+
TANGLED_KNOTSTREAM_QUEUE_SIZE = toString cfg.knotstream.queueSize;
+
+
TANGLED_SPINDLESTREAM_RETRY_INTERVAL = cfg.spindlestream.retryInterval;
+
TANGLED_SPINDLESTREAM_MAX_RETRY_INTERVAL = cfg.spindlestream.maxRetryInterval;
+
TANGLED_SPINDLESTREAM_CONNECTION_TIMEOUT = cfg.spindlestream.connectionTimeout;
+
TANGLED_SPINDLESTREAM_WORKER_COUNT = toString cfg.spindlestream.workerCount;
+
TANGLED_SPINDLESTREAM_QUEUE_SIZE = toString cfg.spindlestream.queueSize;
+
+
TANGLED_RESEND_SENT_FROM = cfg.resend.sentFrom;
+
+
TANGLED_POSTHOG_ENDPOINT = cfg.posthog.endpoint;
+
+
TANGLED_CAMO_HOST = cfg.camo.host;
+
+
TANGLED_AVATAR_HOST = cfg.avatar.host;
+
+
TANGLED_PLC_URL = cfg.plc.url;
+
+
TANGLED_PDS_HOST = cfg.pds.host;
+
+
TANGLED_LABEL_DEFAULTS = concatStringsSep "," cfg.label.defaults;
+
TANGLED_LABEL_GFI = cfg.label.goodFirstIssue;
+
};
};
};
}
+76 -4
nix/modules/knot.nix
···
lib,
...
}: let
-
cfg = config.services.tangled-knot;
+
cfg = config.services.tangled.knot;
in
with lib; {
options = {
-
services.tangled-knot = {
+
services.tangled.knot = {
enable = mkOption {
type = types.bool;
default = false;
···
description = "Path where repositories are scanned from";
};
+
readme = mkOption {
+
type = types.listOf types.str;
+
default = [
+
"README.md"
+
"readme.md"
+
"README"
+
"readme"
+
"README.markdown"
+
"readme.markdown"
+
"README.txt"
+
"readme.txt"
+
"README.rst"
+
"readme.rst"
+
"README.org"
+
"readme.org"
+
"README.asciidoc"
+
"readme.asciidoc"
+
];
+
description = "List of README filenames to look for (in priority order)";
+
};
+
mainBranch = mkOption {
type = types.str;
default = "main";
description = "Default branch name for repositories";
+
};
+
};
+
+
git = {
+
userName = mkOption {
+
type = types.str;
+
default = "Tangled";
+
description = "Git user name used as committer";
+
};
+
+
userEmail = mkOption {
+
type = types.str;
+
default = "noreply@tangled.org";
+
description = "Git user email used as committer";
};
};
···
description = "Hostname for the server (required)";
};
+
plcUrl = mkOption {
+
type = types.str;
+
default = "https://plc.directory";
+
description = "atproto PLC directory";
+
};
+
+
jetstreamEndpoint = mkOption {
+
type = types.str;
+
default = "wss://jetstream1.us-west.bsky.network/subscribe";
+
description = "Jetstream endpoint to subscribe to";
+
};
+
+
logDids = mkOption {
+
type = types.bool;
+
default = true;
+
description = "Enable logging of DIDs";
+
};
+
dev = mkOption {
type = types.bool;
default = false;
···
Match User ${cfg.gitUser}
AuthorizedKeysCommand /etc/ssh/keyfetch_wrapper
AuthorizedKeysCommandUser nobody
+
ChallengeResponseAuthentication no
+
PasswordAuthentication no
'';
};
···
mkdir -p "${cfg.stateDir}/.config/git"
cat > "${cfg.stateDir}/.config/git/config" << EOF
[user]
-
name = Git User
-
email = git@example.com
+
name = ${cfg.git.userName}
+
email = ${cfg.git.userEmail}
[receive]
advertisePushOptions = true
+
[uploadpack]
+
allowFilter = true
EOF
${setMotd}
chown -R ${cfg.gitUser}:${cfg.gitUser} "${cfg.stateDir}"
···
WorkingDirectory = cfg.stateDir;
Environment = [
"KNOT_REPO_SCAN_PATH=${cfg.repo.scanPath}"
+
"KNOT_REPO_README=${concatStringsSep "," cfg.repo.readme}"
"KNOT_REPO_MAIN_BRANCH=${cfg.repo.mainBranch}"
+
"KNOT_GIT_USER_NAME=${cfg.git.userName}"
+
"KNOT_GIT_USER_EMAIL=${cfg.git.userEmail}"
"APPVIEW_ENDPOINT=${cfg.appviewEndpoint}"
"KNOT_SERVER_INTERNAL_LISTEN_ADDR=${cfg.server.internalListenAddr}"
"KNOT_SERVER_LISTEN_ADDR=${cfg.server.listenAddr}"
"KNOT_SERVER_DB_PATH=${cfg.server.dbPath}"
"KNOT_SERVER_HOSTNAME=${cfg.server.hostname}"
+
"KNOT_SERVER_PLC_URL=${cfg.server.plcUrl}"
+
"KNOT_SERVER_JETSTREAM_ENDPOINT=${cfg.server.jetstreamEndpoint}"
"KNOT_SERVER_OWNER=${cfg.server.owner}"
+
"KNOT_SERVER_LOG_DIDS=${
+
if cfg.server.logDids
+
then "true"
+
else "false"
+
}"
+
"KNOT_SERVER_DEV=${
+
if cfg.server.dev
+
then "true"
+
else "false"
+
}"
];
ExecStart = "${cfg.package}/bin/knot server";
Restart = "always";
+10 -3
nix/modules/spindle.nix
···
lib,
...
}: let
-
cfg = config.services.tangled-spindle;
+
cfg = config.services.tangled.spindle;
in
with lib; {
options = {
-
services.tangled-spindle = {
+
services.tangled.spindle = {
enable = mkOption {
type = types.bool;
default = false;
···
type = types.str;
example = "my.spindle.com";
description = "Hostname for the server (required)";
+
};
+
+
plcUrl = mkOption {
+
type = types.str;
+
default = "https://plc.directory";
+
description = "atproto PLC directory";
};
jetstreamEndpoint = mkOption {
···
"SPINDLE_SERVER_LISTEN_ADDR=${cfg.server.listenAddr}"
"SPINDLE_SERVER_DB_PATH=${cfg.server.dbPath}"
"SPINDLE_SERVER_HOSTNAME=${cfg.server.hostname}"
-
"SPINDLE_SERVER_JETSTREAM=${cfg.server.jetstreamEndpoint}"
+
"SPINDLE_SERVER_PLC_URL=${cfg.server.plcUrl}"
+
"SPINDLE_SERVER_JETSTREAM_ENDPOINT=${cfg.server.jetstreamEndpoint}"
"SPINDLE_SERVER_DEV=${lib.boolToString cfg.server.dev}"
"SPINDLE_SERVER_OWNER=${cfg.server.owner}"
"SPINDLE_SERVER_MAX_JOB_COUNT=${toString cfg.server.maxJobCount}"
+2
nix/pkgs/appview-static-files.nix
···
lucide-src,
inter-fonts-src,
ibm-plex-mono-src,
+
actor-typeahead-src,
sqlite-lib,
tailwindcss,
src,
···
cp -f ${inter-fonts-src}/web/InterDisplay*.woff2 fonts/
cp -f ${inter-fonts-src}/InterVariable*.ttf fonts/
cp -f ${ibm-plex-mono-src}/fonts/complete/woff2/IBMPlexMono*.woff2 fonts/
+
cp -f ${actor-typeahead-src}/actor-typeahead.js .
# tailwindcss -c $src/tailwind.config.js -i $src/input.css -o tw.css won't work
# for whatever reason (produces broken css), so we are doing this instead
cd ${src} && ${tailwindcss}/bin/tailwindcss -i input.css -o $out/tw.css
-18
nix/pkgs/genjwks.nix
···
-
{
-
buildGoApplication,
-
modules,
-
}:
-
buildGoApplication {
-
pname = "genjwks";
-
version = "0.1.0";
-
src = ../../cmd/genjwks;
-
postPatch = ''
-
ln -s ${../../go.mod} ./go.mod
-
'';
-
postInstall = ''
-
mv $out/bin/core $out/bin/genjwks
-
'';
-
inherit modules;
-
doCheck = false;
-
CGO_ENABLED = 0;
-
}
+12
nix/pkgs/goat.nix
···
+
{
+
buildGoModule,
+
indigo,
+
}:
+
buildGoModule {
+
pname = "goat";
+
version = "0.1.0";
+
src = indigo;
+
subPackages = ["cmd/goat"];
+
vendorHash = "sha256-VbDrcN4r5b7utRFQzVsKgDsVgdQLSXl7oZ5kdPA/huw=";
+
doCheck = false;
+
}
+1 -1
nix/pkgs/knot-unwrapped.nix
···
sqlite-lib,
src,
}: let
-
version = "1.9.1-alpha";
+
version = "1.11.0-alpha";
in
buildGoApplication {
pname = "knot";
+7 -5
nix/pkgs/sqlite-lib.nix
···
{
-
gcc,
stdenv,
sqlite-lib-src,
}:
stdenv.mkDerivation {
name = "sqlite-lib";
src = sqlite-lib-src;
-
nativeBuildInputs = [gcc];
+
buildPhase = ''
-
gcc -c sqlite3.c
-
ar rcs libsqlite3.a sqlite3.o
-
ranlib libsqlite3.a
+
$CC -c sqlite3.c
+
$AR rcs libsqlite3.a sqlite3.o
+
$RANLIB libsqlite3.a
+
'';
+
+
installPhase = ''
mkdir -p $out/include $out/lib
cp *.h $out/include
cp libsqlite3.a $out/lib
+24 -11
nix/vm.nix
···
if var == ""
then throw "\$${name} must be defined, see docs/hacking.md for more details"
else var;
+
envVarOr = name: default: let
+
var = builtins.getEnv name;
+
in
+
if var != ""
+
then var
+
else default;
+
+
plcUrl = envVarOr "TANGLED_VM_PLC_URL" "https://plc.directory";
+
jetstream = envVarOr "TANGLED_VM_JETSTREAM_ENDPOINT" "wss://jetstream1.us-west.bsky.network/subscribe";
in
nixpkgs.lib.nixosSystem {
inherit system;
···
# knot
{
from = "host";
-
host.port = 6000;
-
guest.port = 6000;
+
host.port = 6444;
+
guest.port = 6444;
}
# spindle
{
···
time.timeZone = "Europe/London";
services.getty.autologinUser = "root";
environment.systemPackages = with pkgs; [curl vim git sqlite litecli];
-
services.tangled-knot = {
+
services.tangled.knot = {
enable = true;
motd = "Welcome to the development knot!\n";
server = {
owner = envVar "TANGLED_VM_KNOT_OWNER";
-
hostname = "localhost:6000";
-
listenAddr = "0.0.0.0:6000";
+
hostname = envVarOr "TANGLED_VM_KNOT_HOST" "localhost:6444";
+
plcUrl = plcUrl;
+
jetstreamEndpoint = jetstream;
+
listenAddr = "0.0.0.0:6444";
};
};
-
services.tangled-spindle = {
+
services.tangled.spindle = {
enable = true;
server = {
owner = envVar "TANGLED_VM_SPINDLE_OWNER";
-
hostname = "localhost:6555";
+
hostname = envVarOr "TANGLED_VM_SPINDLE_HOST" "localhost:6555";
+
plcUrl = plcUrl;
+
jetstreamEndpoint = jetstream;
listenAddr = "0.0.0.0:6555";
dev = true;
queueSize = 100;
···
users = {
# So we don't have to deal with permission clashing between
# blank disk VMs and existing state
-
users.${config.services.tangled-knot.gitUser}.uid = 666;
-
groups.${config.services.tangled-knot.gitUser}.gid = 666;
+
users.${config.services.tangled.knot.gitUser}.uid = 666;
+
groups.${config.services.tangled.knot.gitUser}.gid = 666;
# TODO: separate spindle user
};
···
serviceConfig.PermissionsStartOnly = true;
};
in {
-
knot = mkDataSyncScripts "/mnt/knot-data" config.services.tangled-knot.stateDir;
-
spindle = mkDataSyncScripts "/mnt/spindle-data" (builtins.dirOf config.services.tangled-spindle.server.dbPath);
+
knot = mkDataSyncScripts "/mnt/knot-data" config.services.tangled.knot.stateDir;
+
spindle = mkDataSyncScripts "/mnt/spindle-data" (builtins.dirOf config.services.tangled.spindle.server.dbPath);
};
})
];
+122
orm/orm.go
···
+
package orm
+
+
import (
+
"context"
+
"database/sql"
+
"fmt"
+
"log/slog"
+
"reflect"
+
"strings"
+
)
+
+
type migrationFn = func(*sql.Tx) error
+
+
func RunMigration(c *sql.Conn, logger *slog.Logger, name string, migrationFn migrationFn) error {
+
logger = logger.With("migration", name)
+
+
tx, err := c.BeginTx(context.Background(), nil)
+
if err != nil {
+
return err
+
}
+
defer tx.Rollback()
+
+
var exists bool
+
err = tx.QueryRow("select exists (select 1 from migrations where name = ?)", name).Scan(&exists)
+
if err != nil {
+
return err
+
}
+
+
if !exists {
+
// run migration
+
err = migrationFn(tx)
+
if err != nil {
+
logger.Error("failed to run migration", "err", err)
+
return err
+
}
+
+
// mark migration as complete
+
_, err = tx.Exec("insert into migrations (name) values (?)", name)
+
if err != nil {
+
logger.Error("failed to mark migration as complete", "err", err)
+
return err
+
}
+
+
// commit the transaction
+
if err := tx.Commit(); err != nil {
+
return err
+
}
+
+
logger.Info("migration applied successfully")
+
} else {
+
logger.Warn("skipped migration, already applied")
+
}
+
+
return nil
+
}
+
+
type Filter struct {
+
Key string
+
arg any
+
Cmp string
+
}
+
+
func newFilter(key, cmp string, arg any) Filter {
+
return Filter{
+
Key: key,
+
arg: arg,
+
Cmp: cmp,
+
}
+
}
+
+
func FilterEq(key string, arg any) Filter { return newFilter(key, "=", arg) }
+
func FilterNotEq(key string, arg any) Filter { return newFilter(key, "<>", arg) }
+
func FilterGte(key string, arg any) Filter { return newFilter(key, ">=", arg) }
+
func FilterLte(key string, arg any) Filter { return newFilter(key, "<=", arg) }
+
func FilterIs(key string, arg any) Filter { return newFilter(key, "is", arg) }
+
func FilterIsNot(key string, arg any) Filter { return newFilter(key, "is not", arg) }
+
func FilterIn(key string, arg any) Filter { return newFilter(key, "in", arg) }
+
func FilterLike(key string, arg any) Filter { return newFilter(key, "like", arg) }
+
func FilterNotLike(key string, arg any) Filter { return newFilter(key, "not like", arg) }
+
func FilterContains(key string, arg any) Filter {
+
return newFilter(key, "like", fmt.Sprintf("%%%v%%", arg))
+
}
+
+
func (f Filter) Condition() string {
+
rv := reflect.ValueOf(f.arg)
+
kind := rv.Kind()
+
+
// if we have `FilterIn(k, [1, 2, 3])`, compile it down to `k in (?, ?, ?)`
+
if (kind == reflect.Slice && rv.Type().Elem().Kind() != reflect.Uint8) || kind == reflect.Array {
+
if rv.Len() == 0 {
+
// always false
+
return "1 = 0"
+
}
+
+
placeholders := make([]string, rv.Len())
+
for i := range placeholders {
+
placeholders[i] = "?"
+
}
+
+
return fmt.Sprintf("%s %s (%s)", f.Key, f.Cmp, strings.Join(placeholders, ", "))
+
}
+
+
return fmt.Sprintf("%s %s ?", f.Key, f.Cmp)
+
}
+
+
func (f Filter) Arg() []any {
+
rv := reflect.ValueOf(f.arg)
+
kind := rv.Kind()
+
if (kind == reflect.Slice && rv.Type().Elem().Kind() != reflect.Uint8) || kind == reflect.Array {
+
if rv.Len() == 0 {
+
return nil
+
}
+
+
out := make([]any, rv.Len())
+
for i := range rv.Len() {
+
out[i] = rv.Index(i).Interface()
+
}
+
return out
+
}
+
+
return []any{f.arg}
+
}
-1
patchutil/patchutil.go
···
}
nd := types.NiceDiff{}
-
nd.Commit.Parent = targetBranch
for _, d := range diffs {
ndiff := types.Diff{}
+8
rbac/rbac.go
···
return e.E.Enforce(user, domain, repo, "repo:delete")
}
+
func (e *Enforcer) IsRepoOwner(user, domain, repo string) (bool, error) {
+
return e.E.Enforce(user, domain, repo, "repo:owner")
+
}
+
+
func (e *Enforcer) IsRepoCollaborator(user, domain, repo string) (bool, error) {
+
return e.E.Enforce(user, domain, repo, "repo:collaborator")
+
}
+
func (e *Enforcer) IsPushAllowed(user, domain, repo string) (bool, error) {
return e.E.Enforce(user, domain, repo, "repo:push")
}
-26
scripts/appview.sh
···
-
#!/bin/bash
-
-
# Variables
-
BINARY_NAME="appview"
-
BINARY_PATH=".bin/app"
-
SERVER="95.111.206.63"
-
USER="appview"
-
-
# SCP the binary to root's home directory
-
scp "$BINARY_PATH" root@$SERVER:/root/"$BINARY_NAME"
-
-
# SSH into the server and perform the necessary operations
-
ssh root@$SERVER <<EOF
-
set -e # Exit on error
-
-
# Move binary to /usr/local/bin and set executable permissions
-
mv /root/$BINARY_NAME /usr/local/bin/$BINARY_NAME
-
chmod +x /usr/local/bin/$BINARY_NAME
-
-
su appview
-
cd ~
-
./reset.sh
-
EOF
-
-
echo "Deployment complete."
-
-5
scripts/generate-jwks.sh
···
-
#! /usr/bin/env bash
-
-
set -e
-
-
go run ./cmd/genjwks/
+31
sets/gen.go
···
+
package sets
+
+
import (
+
"math/rand"
+
"reflect"
+
"testing/quick"
+
)
+
+
func (_ Set[T]) Generate(rand *rand.Rand, size int) reflect.Value {
+
s := New[T]()
+
+
var zero T
+
itemType := reflect.TypeOf(zero)
+
+
for {
+
if s.Len() >= size {
+
break
+
}
+
+
item, ok := quick.Value(itemType, rand)
+
if !ok {
+
continue
+
}
+
+
if val, ok := item.Interface().(T); ok {
+
s.Insert(val)
+
}
+
}
+
+
return reflect.ValueOf(s)
+
}
+35
sets/readme.txt
···
+
sets
+
----
+
set datastructure for go with generics and iterators. the
+
api is supposed to mimic rust's std::collections::HashSet api.
+
+
s1 := sets.Collect(slices.Values([]int{1, 2, 3, 4}))
+
s2 := sets.Collect(slices.Values([]int{1, 2, 3, 4, 5, 6}))
+
+
union := sets.Collect(s1.Union(s2))
+
intersect := sets.Collect(s1.Intersection(s2))
+
diff := sets.Collect(s1.Difference(s2))
+
symdiff := sets.Collect(s1.SymmetricDifference(s2))
+
+
s1.Len() // 4
+
s1.Contains(1) // true
+
s1.IsEmpty() // false
+
s1.IsSubset(s2) // true
+
s1.IsSuperset(s2) // false
+
s1.IsDisjoint(s2) // false
+
+
if exists := s1.Insert(1); exists {
+
// already existed in set
+
}
+
+
if existed := s1.Remove(1); existed {
+
// existed in set, now removed
+
}
+
+
+
testing
+
-------
+
includes property-based tests using the wonderful
+
testing/quick module!
+
+
go test -v
+174
sets/set.go
···
+
package sets
+
+
import (
+
"iter"
+
"maps"
+
)
+
+
type Set[T comparable] struct {
+
data map[T]struct{}
+
}
+
+
func New[T comparable]() Set[T] {
+
return Set[T]{
+
data: make(map[T]struct{}),
+
}
+
}
+
+
func (s *Set[T]) Insert(item T) bool {
+
_, exists := s.data[item]
+
s.data[item] = struct{}{}
+
return !exists
+
}
+
+
func Singleton[T comparable](item T) Set[T] {
+
n := New[T]()
+
_ = n.Insert(item)
+
return n
+
}
+
+
func (s *Set[T]) Remove(item T) bool {
+
_, exists := s.data[item]
+
if exists {
+
delete(s.data, item)
+
}
+
return exists
+
}
+
+
func (s Set[T]) Contains(item T) bool {
+
_, exists := s.data[item]
+
return exists
+
}
+
+
func (s Set[T]) Len() int {
+
return len(s.data)
+
}
+
+
func (s Set[T]) IsEmpty() bool {
+
return len(s.data) == 0
+
}
+
+
func (s *Set[T]) Clear() {
+
s.data = make(map[T]struct{})
+
}
+
+
func (s Set[T]) All() iter.Seq[T] {
+
return func(yield func(T) bool) {
+
for item := range s.data {
+
if !yield(item) {
+
return
+
}
+
}
+
}
+
}
+
+
func (s Set[T]) Clone() Set[T] {
+
return Set[T]{
+
data: maps.Clone(s.data),
+
}
+
}
+
+
func (s Set[T]) Union(other Set[T]) iter.Seq[T] {
+
if s.Len() >= other.Len() {
+
return chain(s.All(), other.Difference(s))
+
} else {
+
return chain(other.All(), s.Difference(other))
+
}
+
}
+
+
func chain[T any](seqs ...iter.Seq[T]) iter.Seq[T] {
+
return func(yield func(T) bool) {
+
for _, seq := range seqs {
+
for item := range seq {
+
if !yield(item) {
+
return
+
}
+
}
+
}
+
}
+
}
+
+
func (s Set[T]) Intersection(other Set[T]) iter.Seq[T] {
+
return func(yield func(T) bool) {
+
for item := range s.data {
+
if other.Contains(item) {
+
if !yield(item) {
+
return
+
}
+
}
+
}
+
}
+
}
+
+
func (s Set[T]) Difference(other Set[T]) iter.Seq[T] {
+
return func(yield func(T) bool) {
+
for item := range s.data {
+
if !other.Contains(item) {
+
if !yield(item) {
+
return
+
}
+
}
+
}
+
}
+
}
+
+
func (s Set[T]) SymmetricDifference(other Set[T]) iter.Seq[T] {
+
return func(yield func(T) bool) {
+
for item := range s.data {
+
if !other.Contains(item) {
+
if !yield(item) {
+
return
+
}
+
}
+
}
+
for item := range other.data {
+
if !s.Contains(item) {
+
if !yield(item) {
+
return
+
}
+
}
+
}
+
}
+
}
+
+
func (s Set[T]) IsSubset(other Set[T]) bool {
+
for item := range s.data {
+
if !other.Contains(item) {
+
return false
+
}
+
}
+
return true
+
}
+
+
func (s Set[T]) IsSuperset(other Set[T]) bool {
+
return other.IsSubset(s)
+
}
+
+
func (s Set[T]) IsDisjoint(other Set[T]) bool {
+
for item := range s.data {
+
if other.Contains(item) {
+
return false
+
}
+
}
+
return true
+
}
+
+
func (s Set[T]) Equal(other Set[T]) bool {
+
if s.Len() != other.Len() {
+
return false
+
}
+
for item := range s.data {
+
if !other.Contains(item) {
+
return false
+
}
+
}
+
return true
+
}
+
+
func Collect[T comparable](seq iter.Seq[T]) Set[T] {
+
result := New[T]()
+
for item := range seq {
+
result.Insert(item)
+
}
+
return result
+
}
+411
sets/set_test.go
···
+
package sets
+
+
import (
+
"slices"
+
"testing"
+
"testing/quick"
+
)
+
+
func TestNew(t *testing.T) {
+
s := New[int]()
+
if s.Len() != 0 {
+
t.Errorf("New set should be empty, got length %d", s.Len())
+
}
+
if !s.IsEmpty() {
+
t.Error("New set should be empty")
+
}
+
}
+
+
func TestFromSlice(t *testing.T) {
+
s := Collect(slices.Values([]int{1, 2, 3, 2, 1}))
+
if s.Len() != 3 {
+
t.Errorf("Expected length 3, got %d", s.Len())
+
}
+
if !s.Contains(1) || !s.Contains(2) || !s.Contains(3) {
+
t.Error("Set should contain all unique elements from slice")
+
}
+
}
+
+
func TestInsert(t *testing.T) {
+
s := New[string]()
+
+
if !s.Insert("hello") {
+
t.Error("First insert should return true")
+
}
+
if s.Insert("hello") {
+
t.Error("Duplicate insert should return false")
+
}
+
if s.Len() != 1 {
+
t.Errorf("Expected length 1, got %d", s.Len())
+
}
+
}
+
+
func TestRemove(t *testing.T) {
+
s := Collect(slices.Values([]int{1, 2, 3}))
+
+
if !s.Remove(2) {
+
t.Error("Remove existing element should return true")
+
}
+
if s.Remove(2) {
+
t.Error("Remove non-existing element should return false")
+
}
+
if s.Contains(2) {
+
t.Error("Element should be removed")
+
}
+
if s.Len() != 2 {
+
t.Errorf("Expected length 2, got %d", s.Len())
+
}
+
}
+
+
func TestContains(t *testing.T) {
+
s := Collect(slices.Values([]int{1, 2, 3}))
+
+
if !s.Contains(1) {
+
t.Error("Should contain 1")
+
}
+
if s.Contains(4) {
+
t.Error("Should not contain 4")
+
}
+
}
+
+
func TestClear(t *testing.T) {
+
s := Collect(slices.Values([]int{1, 2, 3}))
+
s.Clear()
+
+
if !s.IsEmpty() {
+
t.Error("Set should be empty after clear")
+
}
+
if s.Len() != 0 {
+
t.Errorf("Expected length 0, got %d", s.Len())
+
}
+
}
+
+
func TestIterator(t *testing.T) {
+
s := Collect(slices.Values([]int{1, 2, 3}))
+
var items []int
+
+
for item := range s.All() {
+
items = append(items, item)
+
}
+
+
slices.Sort(items)
+
expected := []int{1, 2, 3}
+
if !slices.Equal(items, expected) {
+
t.Errorf("Expected %v, got %v", expected, items)
+
}
+
}
+
+
func TestClone(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2, 3}))
+
s2 := s1.Clone()
+
+
if !s1.Equal(s2) {
+
t.Error("Cloned set should be equal to original")
+
}
+
+
s2.Insert(4)
+
if s1.Contains(4) {
+
t.Error("Modifying clone should not affect original")
+
}
+
}
+
+
func TestUnion(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2}))
+
s2 := Collect(slices.Values([]int{2, 3}))
+
+
result := Collect(s1.Union(s2))
+
expected := Collect(slices.Values([]int{1, 2, 3}))
+
+
if !result.Equal(expected) {
+
t.Errorf("Expected %v, got %v", expected, result)
+
}
+
}
+
+
func TestIntersection(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2, 3}))
+
s2 := Collect(slices.Values([]int{2, 3, 4}))
+
+
expected := Collect(slices.Values([]int{2, 3}))
+
result := Collect(s1.Intersection(s2))
+
+
if !result.Equal(expected) {
+
t.Errorf("Expected %v, got %v", expected, result)
+
}
+
}
+
+
func TestDifference(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2, 3}))
+
s2 := Collect(slices.Values([]int{2, 3, 4}))
+
+
expected := Collect(slices.Values([]int{1}))
+
result := Collect(s1.Difference(s2))
+
+
if !result.Equal(expected) {
+
t.Errorf("Expected %v, got %v", expected, result)
+
}
+
}
+
+
func TestSymmetricDifference(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2, 3}))
+
s2 := Collect(slices.Values([]int{2, 3, 4}))
+
+
expected := Collect(slices.Values([]int{1, 4}))
+
result := Collect(s1.SymmetricDifference(s2))
+
+
if !result.Equal(expected) {
+
t.Errorf("Expected %v, got %v", expected, result)
+
}
+
}
+
+
func TestSymmetricDifferenceCommutativeProperty(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2, 3}))
+
s2 := Collect(slices.Values([]int{2, 3, 4}))
+
+
result1 := Collect(s1.SymmetricDifference(s2))
+
result2 := Collect(s2.SymmetricDifference(s1))
+
+
if !result1.Equal(result2) {
+
t.Errorf("Expected %v, got %v", result1, result2)
+
}
+
}
+
+
func TestIsSubset(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2}))
+
s2 := Collect(slices.Values([]int{1, 2, 3}))
+
+
if !s1.IsSubset(s2) {
+
t.Error("s1 should be subset of s2")
+
}
+
if s2.IsSubset(s1) {
+
t.Error("s2 should not be subset of s1")
+
}
+
}
+
+
func TestIsSuperset(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2, 3}))
+
s2 := Collect(slices.Values([]int{1, 2}))
+
+
if !s1.IsSuperset(s2) {
+
t.Error("s1 should be superset of s2")
+
}
+
if s2.IsSuperset(s1) {
+
t.Error("s2 should not be superset of s1")
+
}
+
}
+
+
func TestIsDisjoint(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2}))
+
s2 := Collect(slices.Values([]int{3, 4}))
+
s3 := Collect(slices.Values([]int{2, 3}))
+
+
if !s1.IsDisjoint(s2) {
+
t.Error("s1 and s2 should be disjoint")
+
}
+
if s1.IsDisjoint(s3) {
+
t.Error("s1 and s3 should not be disjoint")
+
}
+
}
+
+
func TestEqual(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2, 3}))
+
s2 := Collect(slices.Values([]int{3, 2, 1}))
+
s3 := Collect(slices.Values([]int{1, 2}))
+
+
if !s1.Equal(s2) {
+
t.Error("s1 and s2 should be equal")
+
}
+
if s1.Equal(s3) {
+
t.Error("s1 and s3 should not be equal")
+
}
+
}
+
+
func TestCollect(t *testing.T) {
+
s1 := Collect(slices.Values([]int{1, 2}))
+
s2 := Collect(slices.Values([]int{2, 3}))
+
+
unionSet := Collect(s1.Union(s2))
+
if unionSet.Len() != 3 {
+
t.Errorf("Expected union set length 3, got %d", unionSet.Len())
+
}
+
if !unionSet.Contains(1) || !unionSet.Contains(2) || !unionSet.Contains(3) {
+
t.Error("Union set should contain 1, 2, and 3")
+
}
+
+
diffSet := Collect(s1.Difference(s2))
+
if diffSet.Len() != 1 {
+
t.Errorf("Expected difference set length 1, got %d", diffSet.Len())
+
}
+
if !diffSet.Contains(1) {
+
t.Error("Difference set should contain 1")
+
}
+
}
+
+
func TestPropertySingleonLen(t *testing.T) {
+
f := func(item int) bool {
+
single := Singleton(item)
+
return single.Len() == 1
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyInsertIdempotent(t *testing.T) {
+
f := func(s Set[int], item int) bool {
+
clone := s.Clone()
+
+
clone.Insert(item)
+
firstLen := clone.Len()
+
+
clone.Insert(item)
+
secondLen := clone.Len()
+
+
return firstLen == secondLen
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyUnionCommutative(t *testing.T) {
+
f := func(s1 Set[int], s2 Set[int]) bool {
+
union1 := Collect(s1.Union(s2))
+
union2 := Collect(s2.Union(s1))
+
return union1.Equal(union2)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyIntersectionCommutative(t *testing.T) {
+
f := func(s1 Set[int], s2 Set[int]) bool {
+
inter1 := Collect(s1.Intersection(s2))
+
inter2 := Collect(s2.Intersection(s1))
+
return inter1.Equal(inter2)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyCloneEquals(t *testing.T) {
+
f := func(s Set[int]) bool {
+
clone := s.Clone()
+
return s.Equal(clone)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyIntersectionIsSubset(t *testing.T) {
+
f := func(s1 Set[int], s2 Set[int]) bool {
+
inter := Collect(s1.Intersection(s2))
+
return inter.IsSubset(s1) && inter.IsSubset(s2)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyUnionIsSuperset(t *testing.T) {
+
f := func(s1 Set[int], s2 Set[int]) bool {
+
union := Collect(s1.Union(s2))
+
return union.IsSuperset(s1) && union.IsSuperset(s2)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyDifferenceDisjoint(t *testing.T) {
+
f := func(s1 Set[int], s2 Set[int]) bool {
+
diff := Collect(s1.Difference(s2))
+
return diff.IsDisjoint(s2)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertySymmetricDifferenceCommutative(t *testing.T) {
+
f := func(s1 Set[int], s2 Set[int]) bool {
+
symDiff1 := Collect(s1.SymmetricDifference(s2))
+
symDiff2 := Collect(s2.SymmetricDifference(s1))
+
return symDiff1.Equal(symDiff2)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyRemoveWorks(t *testing.T) {
+
f := func(s Set[int], item int) bool {
+
clone := s.Clone()
+
clone.Insert(item)
+
clone.Remove(item)
+
return !clone.Contains(item)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyClearEmpty(t *testing.T) {
+
f := func(s Set[int]) bool {
+
s.Clear()
+
return s.IsEmpty() && s.Len() == 0
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyIsSubsetReflexive(t *testing.T) {
+
f := func(s Set[int]) bool {
+
return s.IsSubset(s)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+
+
func TestPropertyDeMorganUnion(t *testing.T) {
+
f := func(s1 Set[int], s2 Set[int], universe Set[int]) bool {
+
// create a universe that contains both sets
+
u := universe.Clone()
+
for item := range s1.All() {
+
u.Insert(item)
+
}
+
for item := range s2.All() {
+
u.Insert(item)
+
}
+
+
// (A u B)' = A' n B'
+
union := Collect(s1.Union(s2))
+
complementUnion := Collect(u.Difference(union))
+
+
complementS1 := Collect(u.Difference(s1))
+
complementS2 := Collect(u.Difference(s2))
+
intersectionComplements := Collect(complementS1.Intersection(complementS2))
+
+
return complementUnion.Equal(intersectionComplements)
+
}
+
+
if err := quick.Check(f, nil); err != nil {
+
t.Error(err)
+
}
+
}
+1
spindle/config/config.go
···
DBPath string `env:"DB_PATH, default=spindle.db"`
Hostname string `env:"HOSTNAME, required"`
JetstreamEndpoint string `env:"JETSTREAM_ENDPOINT, default=wss://jetstream1.us-west.bsky.network/subscribe"`
+
PlcUrl string `env:"PLC_URL, default=https://plc.directory"`
Dev bool `env:"DEV, default=false"`
Owner string `env:"OWNER, required"`
Secrets Secrets `env:",prefix=SECRETS_"`
+13 -3
spindle/engine/engine.go
···
defer cancel()
for stepIdx, step := range w.Steps {
+
// log start of step
if wfLogger != nil {
-
ctl := wfLogger.ControlWriter(stepIdx, step)
-
ctl.Write([]byte(step.Name()))
+
wfLogger.
+
ControlWriter(stepIdx, step, models.StepStatusStart).
+
Write([]byte{0})
}
err = eng.RunStep(ctx, wid, &w, stepIdx, allSecrets, wfLogger)
+
+
// log end of step
+
if wfLogger != nil {
+
wfLogger.
+
ControlWriter(stepIdx, step, models.StepStatusEnd).
+
Write([]byte{0})
+
}
+
if err != nil {
if errors.Is(err, ErrTimedOut) {
dbErr := db.StatusTimeout(wid, n)
···
if err := eg.Wait(); err != nil {
l.Error("failed to run one or more workflows", "err", err)
} else {
-
l.Error("successfully ran full pipeline")
+
l.Info("successfully ran full pipeline")
}
}
+13 -12
spindle/engines/nixery/engine.go
···
type addlFields struct {
image string
container string
-
env map[string]string
}
func (e *Engine) InitWorkflow(twf tangled.Pipeline_Workflow, tpl tangled.Pipeline) (*models.Workflow, error) {
···
swf.Steps = append(swf.Steps, sstep)
}
swf.Name = twf.Name
-
addl.env = dwf.Environment
+
swf.Environment = dwf.Environment
addl.image = workflowImage(dwf.Dependencies, e.cfg.NixeryPipelines.Nixery)
setup := &setupSteps{}
setup.addStep(nixConfStep())
-
setup.addStep(cloneStep(twf, *tpl.TriggerMetadata, e.cfg.Server.Dev))
+
setup.addStep(models.BuildCloneStep(twf, *tpl.TriggerMetadata, e.cfg.Server.Dev))
// this step could be empty
if s := dependencyStep(dwf.Dependencies); s != nil {
setup.addStep(*s)
···
},
ReadonlyRootfs: false,
CapDrop: []string{"ALL"},
-
CapAdd: []string{"CAP_DAC_OVERRIDE"},
+
CapAdd: []string{"CAP_DAC_OVERRIDE", "CAP_CHOWN", "CAP_FOWNER", "CAP_SETUID", "CAP_SETGID"},
SecurityOpt: []string{"no-new-privileges"},
ExtraHosts: []string{"host.docker.internal:host-gateway"},
}, nil, nil, "")
···
func (e *Engine) RunStep(ctx context.Context, wid models.WorkflowId, w *models.Workflow, idx int, secrets []secrets.UnlockedSecret, wfLogger *models.WorkflowLogger) error {
addl := w.Data.(addlFields)
-
workflowEnvs := ConstructEnvs(addl.env)
+
workflowEnvs := ConstructEnvs(w.Environment)
// TODO(winter): should SetupWorkflow also have secret access?
// IMO yes, but probably worth thinking on.
for _, s := range secrets {
workflowEnvs.AddEnv(s.Key, s.Value)
}
-
step := w.Steps[idx].(Step)
+
step := w.Steps[idx]
select {
case <-ctx.Done():
···
}
envs := append(EnvVars(nil), workflowEnvs...)
-
for k, v := range step.environment {
-
envs.AddEnv(k, v)
+
if nixStep, ok := step.(Step); ok {
+
for k, v := range nixStep.environment {
+
envs.AddEnv(k, v)
+
}
}
envs.AddEnv("HOME", homeDir)
mkExecResp, err := e.docker.ContainerExecCreate(ctx, addl.container, container.ExecOptions{
-
Cmd: []string{"bash", "-c", step.command},
+
Cmd: []string{"bash", "-c", step.Command()},
AttachStdout: true,
AttachStderr: true,
Env: envs,
···
// Docker doesn't provide an API to kill an exec run
// (sure, we could grab the PID and kill it ourselves,
// but that's wasted effort)
-
e.l.Warn("step timed out", "step", step.Name)
+
e.l.Warn("step timed out", "step", step.Name())
<-tailDone
···
defer logs.Close()
_, err = stdcopy.StdCopy(
-
wfLogger.DataWriter("stdout"),
-
wfLogger.DataWriter("stderr"),
+
wfLogger.DataWriter(stepIdx, "stdout"),
+
wfLogger.DataWriter(stepIdx, "stderr"),
logs.Reader,
)
if err != nil && err != io.EOF && !errors.Is(err, context.DeadlineExceeded) {
-73
spindle/engines/nixery/setup_steps.go
···
import (
"fmt"
-
"path"
"strings"
-
-
"tangled.org/core/api/tangled"
-
"tangled.org/core/workflow"
)
func nixConfStep() Step {
···
command: setupCmd,
name: "Configure Nix",
}
-
}
-
-
// cloneOptsAsSteps processes clone options and adds corresponding steps
-
// to the beginning of the workflow's step list if cloning is not skipped.
-
//
-
// the steps to do here are:
-
// - git init
-
// - git remote add origin <url>
-
// - git fetch --depth=<d> --recurse-submodules=<yes|no> <sha>
-
// - git checkout FETCH_HEAD
-
func cloneStep(twf tangled.Pipeline_Workflow, tr tangled.Pipeline_TriggerMetadata, dev bool) Step {
-
if twf.Clone.Skip {
-
return Step{}
-
}
-
-
var commands []string
-
-
// initialize git repo in workspace
-
commands = append(commands, "git init")
-
-
// add repo as git remote
-
scheme := "https://"
-
if dev {
-
scheme = "http://"
-
tr.Repo.Knot = strings.ReplaceAll(tr.Repo.Knot, "localhost", "host.docker.internal")
-
}
-
url := scheme + path.Join(tr.Repo.Knot, tr.Repo.Did, tr.Repo.Repo)
-
commands = append(commands, fmt.Sprintf("git remote add origin %s", url))
-
-
// run git fetch
-
{
-
var fetchArgs []string
-
-
// default clone depth is 1
-
depth := 1
-
if twf.Clone.Depth > 1 {
-
depth = int(twf.Clone.Depth)
-
}
-
fetchArgs = append(fetchArgs, fmt.Sprintf("--depth=%d", depth))
-
-
// optionally recurse submodules
-
if twf.Clone.Submodules {
-
fetchArgs = append(fetchArgs, "--recurse-submodules=yes")
-
}
-
-
// set remote to fetch from
-
fetchArgs = append(fetchArgs, "origin")
-
-
// set revision to checkout
-
switch workflow.TriggerKind(tr.Kind) {
-
case workflow.TriggerKindManual:
-
// TODO: unimplemented
-
case workflow.TriggerKindPush:
-
fetchArgs = append(fetchArgs, tr.Push.NewSha)
-
case workflow.TriggerKindPullRequest:
-
fetchArgs = append(fetchArgs, tr.PullRequest.SourceSha)
-
}
-
-
commands = append(commands, fmt.Sprintf("git fetch %s", strings.Join(fetchArgs, " ")))
-
}
-
-
// run git checkout
-
commands = append(commands, "git checkout FETCH_HEAD")
-
-
cloneStep := Step{
-
command: strings.Join(commands, "\n"),
-
name: "Clone repository into workspace",
-
}
-
return cloneStep
}
// dependencyStep processes dependencies defined in the workflow.
+3 -7
spindle/ingester.go
···
"tangled.org/core/api/tangled"
"tangled.org/core/eventconsumer"
-
"tangled.org/core/idresolver"
"tangled.org/core/rbac"
"tangled.org/core/spindle/db"
···
func (s *Spindle) ingestRepo(ctx context.Context, e *models.Event) error {
var err error
did := e.Did
-
resolver := idresolver.DefaultResolver()
l := s.l.With("component", "ingester", "record", tangled.RepoNSID)
···
}
// add collaborators to rbac
-
owner, err := resolver.ResolveIdent(ctx, did)
+
owner, err := s.res.ResolveIdent(ctx, did)
if err != nil || owner.Handle.IsInvalidHandle() {
return err
}
···
return err
}
-
resolver := idresolver.DefaultResolver()
-
-
subjectId, err := resolver.ResolveIdent(ctx, record.Subject)
+
subjectId, err := s.res.ResolveIdent(ctx, record.Subject)
if err != nil || subjectId.Handle.IsInvalidHandle() {
return err
}
···
// TODO: get rid of this entirely
// resolve this aturi to extract the repo record
-
owner, err := resolver.ResolveIdent(ctx, repoAt.Authority().String())
+
owner, err := s.res.ResolveIdent(ctx, repoAt.Authority().String())
if err != nil || owner.Handle.IsInvalidHandle() {
return fmt.Errorf("failed to resolve handle: %w", err)
}
+150
spindle/models/clone.go
···
+
package models
+
+
import (
+
"fmt"
+
"strings"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/workflow"
+
)
+
+
type CloneStep struct {
+
name string
+
kind StepKind
+
commands []string
+
}
+
+
func (s CloneStep) Name() string {
+
return s.name
+
}
+
+
func (s CloneStep) Commands() []string {
+
return s.commands
+
}
+
+
func (s CloneStep) Command() string {
+
return strings.Join(s.commands, "\n")
+
}
+
+
func (s CloneStep) Kind() StepKind {
+
return s.kind
+
}
+
+
// BuildCloneStep generates git clone commands.
+
// The caller must ensure the current working directory is set to the desired
+
// workspace directory before executing these commands.
+
//
+
// The generated commands are:
+
// - git init
+
// - git remote add origin <url>
+
// - git fetch --depth=<d> --recurse-submodules=<yes|no> <sha>
+
// - git checkout FETCH_HEAD
+
//
+
// Supports all trigger types (push, PR, manual) and clone options.
+
func BuildCloneStep(twf tangled.Pipeline_Workflow, tr tangled.Pipeline_TriggerMetadata, dev bool) CloneStep {
+
if twf.Clone != nil && twf.Clone.Skip {
+
return CloneStep{}
+
}
+
+
commitSHA, err := extractCommitSHA(tr)
+
if err != nil {
+
return CloneStep{
+
kind: StepKindSystem,
+
name: "Clone repository into workspace (error)",
+
commands: []string{fmt.Sprintf("echo 'Failed to get clone info: %s' && exit 1", err.Error())},
+
}
+
}
+
+
repoURL := BuildRepoURL(tr.Repo, dev)
+
+
var cloneOpts tangled.Pipeline_CloneOpts
+
if twf.Clone != nil {
+
cloneOpts = *twf.Clone
+
}
+
fetchArgs := buildFetchArgs(cloneOpts, commitSHA)
+
+
return CloneStep{
+
kind: StepKindSystem,
+
name: "Clone repository into workspace",
+
commands: []string{
+
"git init",
+
fmt.Sprintf("git remote add origin %s", repoURL),
+
fmt.Sprintf("git fetch %s", strings.Join(fetchArgs, " ")),
+
"git checkout FETCH_HEAD",
+
},
+
}
+
}
+
+
// extractCommitSHA extracts the commit SHA from trigger metadata based on trigger type
+
func extractCommitSHA(tr tangled.Pipeline_TriggerMetadata) (string, error) {
+
switch workflow.TriggerKind(tr.Kind) {
+
case workflow.TriggerKindPush:
+
if tr.Push == nil {
+
return "", fmt.Errorf("push trigger metadata is nil")
+
}
+
return tr.Push.NewSha, nil
+
+
case workflow.TriggerKindPullRequest:
+
if tr.PullRequest == nil {
+
return "", fmt.Errorf("pull request trigger metadata is nil")
+
}
+
return tr.PullRequest.SourceSha, nil
+
+
case workflow.TriggerKindManual:
+
// Manual triggers don't have an explicit SHA in the metadata
+
// For now, return empty string - could be enhanced to fetch from default branch
+
// TODO: Implement manual trigger SHA resolution (fetch default branch HEAD)
+
return "", nil
+
+
default:
+
return "", fmt.Errorf("unknown trigger kind: %s", tr.Kind)
+
}
+
}
+
+
// BuildRepoURL constructs the repository URL from repo metadata.
+
func BuildRepoURL(repo *tangled.Pipeline_TriggerRepo, devMode bool) string {
+
if repo == nil {
+
return ""
+
}
+
+
scheme := "https://"
+
if devMode {
+
scheme = "http://"
+
}
+
+
// Get host from knot
+
host := repo.Knot
+
+
// In dev mode, replace localhost with host.docker.internal for Docker networking
+
if devMode && strings.Contains(host, "localhost") {
+
host = strings.ReplaceAll(host, "localhost", "host.docker.internal")
+
}
+
+
// Build URL: {scheme}{knot}/{did}/{repo}
+
return fmt.Sprintf("%s%s/%s/%s", scheme, host, repo.Did, repo.Repo)
+
}
+
+
// buildFetchArgs constructs the arguments for git fetch based on clone options
+
func buildFetchArgs(clone tangled.Pipeline_CloneOpts, sha string) []string {
+
args := []string{}
+
+
// Set fetch depth (default to 1 for shallow clone)
+
depth := clone.Depth
+
if depth == 0 {
+
depth = 1
+
}
+
args = append(args, fmt.Sprintf("--depth=%d", depth))
+
+
// Add submodules if requested
+
if clone.Submodules {
+
args = append(args, "--recurse-submodules=yes")
+
}
+
+
// Add remote and SHA
+
args = append(args, "origin")
+
if sha != "" {
+
args = append(args, sha)
+
}
+
+
return args
+
}
+371
spindle/models/clone_test.go
···
+
package models
+
+
import (
+
"strings"
+
"testing"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/workflow"
+
)
+
+
func TestBuildCloneStep_PushTrigger(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Depth: 1,
+
Submodules: false,
+
Skip: false,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
NewSha: "abc123",
+
OldSha: "def456",
+
Ref: "refs/heads/main",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
if step.Kind() != StepKindSystem {
+
t.Errorf("Expected StepKindSystem, got %v", step.Kind())
+
}
+
+
if step.Name() != "Clone repository into workspace" {
+
t.Errorf("Expected 'Clone repository into workspace', got '%s'", step.Name())
+
}
+
+
commands := step.Commands()
+
if len(commands) != 4 {
+
t.Errorf("Expected 4 commands, got %d", len(commands))
+
}
+
+
// Verify commands contain expected git operations
+
allCmds := strings.Join(commands, " ")
+
if !strings.Contains(allCmds, "git init") {
+
t.Error("Commands should contain 'git init'")
+
}
+
if !strings.Contains(allCmds, "git remote add origin") {
+
t.Error("Commands should contain 'git remote add origin'")
+
}
+
if !strings.Contains(allCmds, "git fetch") {
+
t.Error("Commands should contain 'git fetch'")
+
}
+
if !strings.Contains(allCmds, "abc123") {
+
t.Error("Commands should contain commit SHA")
+
}
+
if !strings.Contains(allCmds, "git checkout FETCH_HEAD") {
+
t.Error("Commands should contain 'git checkout FETCH_HEAD'")
+
}
+
if !strings.Contains(allCmds, "https://example.com/did:plc:user123/my-repo") {
+
t.Error("Commands should contain expected repo URL")
+
}
+
}
+
+
func TestBuildCloneStep_PullRequestTrigger(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Depth: 1,
+
Skip: false,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPullRequest),
+
PullRequest: &tangled.Pipeline_PullRequestTriggerData{
+
SourceSha: "pr-sha-789",
+
SourceBranch: "feature-branch",
+
TargetBranch: "main",
+
Action: "opened",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
allCmds := strings.Join(step.Commands(), " ")
+
if !strings.Contains(allCmds, "pr-sha-789") {
+
t.Error("Commands should contain PR commit SHA")
+
}
+
}
+
+
func TestBuildCloneStep_ManualTrigger(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Depth: 1,
+
Skip: false,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindManual),
+
Manual: &tangled.Pipeline_ManualTriggerData{
+
Inputs: nil,
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
// Manual triggers don't have a SHA yet (TODO), so git fetch won't include a SHA
+
allCmds := strings.Join(step.Commands(), " ")
+
// Should still have basic git commands
+
if !strings.Contains(allCmds, "git init") {
+
t.Error("Commands should contain 'git init'")
+
}
+
if !strings.Contains(allCmds, "git fetch") {
+
t.Error("Commands should contain 'git fetch'")
+
}
+
}
+
+
func TestBuildCloneStep_SkipFlag(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Skip: true,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
NewSha: "abc123",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
// Empty step when skip is true
+
if step.Name() != "" {
+
t.Error("Expected empty step name when Skip is true")
+
}
+
if len(step.Commands()) != 0 {
+
t.Errorf("Expected no commands when Skip is true, got %d commands", len(step.Commands()))
+
}
+
}
+
+
func TestBuildCloneStep_DevMode(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Depth: 1,
+
Skip: false,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
NewSha: "abc123",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "localhost:3000",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, true)
+
+
// In dev mode, should use http:// and replace localhost with host.docker.internal
+
allCmds := strings.Join(step.Commands(), " ")
+
expectedURL := "http://host.docker.internal:3000/did:plc:user123/my-repo"
+
if !strings.Contains(allCmds, expectedURL) {
+
t.Errorf("Expected dev mode URL '%s' in commands", expectedURL)
+
}
+
}
+
+
func TestBuildCloneStep_DepthAndSubmodules(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Depth: 10,
+
Submodules: true,
+
Skip: false,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
NewSha: "abc123",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
allCmds := strings.Join(step.Commands(), " ")
+
if !strings.Contains(allCmds, "--depth=10") {
+
t.Error("Commands should contain '--depth=10'")
+
}
+
+
if !strings.Contains(allCmds, "--recurse-submodules=yes") {
+
t.Error("Commands should contain '--recurse-submodules=yes'")
+
}
+
}
+
+
func TestBuildCloneStep_DefaultDepth(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Depth: 0, // Default should be 1
+
Skip: false,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
NewSha: "abc123",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
allCmds := strings.Join(step.Commands(), " ")
+
if !strings.Contains(allCmds, "--depth=1") {
+
t.Error("Commands should default to '--depth=1'")
+
}
+
}
+
+
func TestBuildCloneStep_NilPushData(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Depth: 1,
+
Skip: false,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: nil, // Nil push data should create error step
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
// Should return an error step
+
if !strings.Contains(step.Name(), "error") {
+
t.Error("Expected error in step name when push data is nil")
+
}
+
+
allCmds := strings.Join(step.Commands(), " ")
+
if !strings.Contains(allCmds, "Failed to get clone info") {
+
t.Error("Commands should contain error message")
+
}
+
if !strings.Contains(allCmds, "exit 1") {
+
t.Error("Commands should exit with error")
+
}
+
}
+
+
func TestBuildCloneStep_NilPRData(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Depth: 1,
+
Skip: false,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPullRequest),
+
PullRequest: nil, // Nil PR data should create error step
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
// Should return an error step
+
if !strings.Contains(step.Name(), "error") {
+
t.Error("Expected error in step name when pull request data is nil")
+
}
+
+
allCmds := strings.Join(step.Commands(), " ")
+
if !strings.Contains(allCmds, "Failed to get clone info") {
+
t.Error("Commands should contain error message")
+
}
+
}
+
+
func TestBuildCloneStep_UnknownTriggerKind(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: &tangled.Pipeline_CloneOpts{
+
Depth: 1,
+
Skip: false,
+
},
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: "unknown_trigger",
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
// Should return an error step
+
if !strings.Contains(step.Name(), "error") {
+
t.Error("Expected error in step name for unknown trigger kind")
+
}
+
+
allCmds := strings.Join(step.Commands(), " ")
+
if !strings.Contains(allCmds, "unknown trigger kind") {
+
t.Error("Commands should contain error message about unknown trigger kind")
+
}
+
}
+
+
func TestBuildCloneStep_NilCloneOpts(t *testing.T) {
+
twf := tangled.Pipeline_Workflow{
+
Clone: nil, // Nil clone options should use defaults
+
}
+
tr := tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
NewSha: "abc123",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
+
step := BuildCloneStep(twf, tr, false)
+
+
// Should still work with default options
+
if step.Kind() != StepKindSystem {
+
t.Errorf("Expected StepKindSystem, got %v", step.Kind())
+
}
+
+
allCmds := strings.Join(step.Commands(), " ")
+
if !strings.Contains(allCmds, "--depth=1") {
+
t.Error("Commands should default to '--depth=1' when Clone is nil")
+
}
+
if !strings.Contains(allCmds, "git init") {
+
t.Error("Commands should contain 'git init'")
+
}
+
}
+14 -11
spindle/models/logger.go
···
return l.file.Close()
}
-
func (l *WorkflowLogger) DataWriter(stream string) io.Writer {
-
// TODO: emit stream
+
func (l *WorkflowLogger) DataWriter(idx int, stream string) io.Writer {
return &dataWriter{
logger: l,
+
idx: idx,
stream: stream,
}
}
-
func (l *WorkflowLogger) ControlWriter(idx int, step Step) io.Writer {
+
func (l *WorkflowLogger) ControlWriter(idx int, step Step, stepStatus StepStatus) io.Writer {
return &controlWriter{
-
logger: l,
-
idx: idx,
-
step: step,
+
logger: l,
+
idx: idx,
+
step: step,
+
stepStatus: stepStatus,
}
}
type dataWriter struct {
logger *WorkflowLogger
+
idx int
stream string
}
func (w *dataWriter) Write(p []byte) (int, error) {
line := strings.TrimRight(string(p), "\r\n")
-
entry := NewDataLogLine(line, w.stream)
+
entry := NewDataLogLine(w.idx, line, w.stream)
if err := w.logger.encoder.Encode(entry); err != nil {
return 0, err
}
···
}
type controlWriter struct {
-
logger *WorkflowLogger
-
idx int
-
step Step
+
logger *WorkflowLogger
+
idx int
+
step Step
+
stepStatus StepStatus
}
func (w *controlWriter) Write(_ []byte) (int, error) {
-
entry := NewControlLogLine(w.idx, w.step)
+
entry := NewControlLogLine(w.idx, w.step, w.stepStatus)
if err := w.logger.encoder.Encode(entry); err != nil {
return 0, err
}
+23 -8
spindle/models/models.go
···
"fmt"
"regexp"
"slices"
+
"time"
"tangled.org/core/api/tangled"
···
var (
// step log data
LogKindData LogKind = "data"
-
// indicates start/end of a step
+
// indicates status of a step
LogKindControl LogKind = "control"
)
+
// step status indicator in control log lines
+
type StepStatus string
+
+
var (
+
StepStatusStart StepStatus = "start"
+
StepStatusEnd StepStatus = "end"
+
)
+
type LogLine struct {
-
Kind LogKind `json:"kind"`
-
Content string `json:"content"`
+
Kind LogKind `json:"kind"`
+
Content string `json:"content"`
+
Time time.Time `json:"time"`
+
StepId int `json:"step_id"`
// fields if kind is "data"
Stream string `json:"stream,omitempty"`
// fields if kind is "control"
-
StepId int `json:"step_id,omitempty"`
-
StepKind StepKind `json:"step_kind,omitempty"`
-
StepCommand string `json:"step_command,omitempty"`
+
StepStatus StepStatus `json:"step_status,omitempty"`
+
StepKind StepKind `json:"step_kind,omitempty"`
+
StepCommand string `json:"step_command,omitempty"`
}
-
func NewDataLogLine(content, stream string) LogLine {
+
func NewDataLogLine(idx int, content, stream string) LogLine {
return LogLine{
Kind: LogKindData,
+
Time: time.Now(),
Content: content,
+
StepId: idx,
Stream: stream,
}
}
-
func NewControlLogLine(idx int, step Step) LogLine {
+
func NewControlLogLine(idx int, step Step, status StepStatus) LogLine {
return LogLine{
Kind: LogKindControl,
+
Time: time.Now(),
Content: step.Name(),
StepId: idx,
+
StepStatus: status,
StepKind: step.Kind(),
StepCommand: step.Command(),
}
+4 -3
spindle/models/pipeline.go
···
)
type Workflow struct {
-
Steps []Step
-
Name string
-
Data any
+
Steps []Step
+
Name string
+
Data any
+
Environment map[string]string
}
+77
spindle/models/pipeline_env.go
···
+
package models
+
+
import (
+
"strings"
+
+
"github.com/go-git/go-git/v5/plumbing"
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/workflow"
+
)
+
+
// PipelineEnvVars extracts environment variables from pipeline trigger metadata.
+
// These are framework-provided variables that are injected into workflow steps.
+
func PipelineEnvVars(tr *tangled.Pipeline_TriggerMetadata, pipelineId PipelineId, devMode bool) map[string]string {
+
if tr == nil {
+
return nil
+
}
+
+
env := make(map[string]string)
+
+
// Standard CI environment variable
+
env["CI"] = "true"
+
+
env["TANGLED_PIPELINE_ID"] = pipelineId.Rkey
+
+
// Repo info
+
if tr.Repo != nil {
+
env["TANGLED_REPO_KNOT"] = tr.Repo.Knot
+
env["TANGLED_REPO_DID"] = tr.Repo.Did
+
env["TANGLED_REPO_NAME"] = tr.Repo.Repo
+
env["TANGLED_REPO_DEFAULT_BRANCH"] = tr.Repo.DefaultBranch
+
env["TANGLED_REPO_URL"] = BuildRepoURL(tr.Repo, devMode)
+
}
+
+
switch workflow.TriggerKind(tr.Kind) {
+
case workflow.TriggerKindPush:
+
if tr.Push != nil {
+
refName := plumbing.ReferenceName(tr.Push.Ref)
+
refType := "branch"
+
if refName.IsTag() {
+
refType = "tag"
+
}
+
+
env["TANGLED_REF"] = tr.Push.Ref
+
env["TANGLED_REF_NAME"] = refName.Short()
+
env["TANGLED_REF_TYPE"] = refType
+
env["TANGLED_SHA"] = tr.Push.NewSha
+
env["TANGLED_COMMIT_SHA"] = tr.Push.NewSha
+
}
+
+
case workflow.TriggerKindPullRequest:
+
if tr.PullRequest != nil {
+
// For PRs, the "ref" is the source branch
+
env["TANGLED_REF"] = "refs/heads/" + tr.PullRequest.SourceBranch
+
env["TANGLED_REF_NAME"] = tr.PullRequest.SourceBranch
+
env["TANGLED_REF_TYPE"] = "branch"
+
env["TANGLED_SHA"] = tr.PullRequest.SourceSha
+
env["TANGLED_COMMIT_SHA"] = tr.PullRequest.SourceSha
+
+
// PR-specific variables
+
env["TANGLED_PR_SOURCE_BRANCH"] = tr.PullRequest.SourceBranch
+
env["TANGLED_PR_TARGET_BRANCH"] = tr.PullRequest.TargetBranch
+
env["TANGLED_PR_SOURCE_SHA"] = tr.PullRequest.SourceSha
+
env["TANGLED_PR_ACTION"] = tr.PullRequest.Action
+
}
+
+
case workflow.TriggerKindManual:
+
// Manual triggers may not have ref/sha info
+
// Include any manual inputs if present
+
if tr.Manual != nil {
+
for _, pair := range tr.Manual.Inputs {
+
env["TANGLED_INPUT_"+strings.ToUpper(pair.Key)] = pair.Value
+
}
+
}
+
}
+
+
return env
+
}
+260
spindle/models/pipeline_env_test.go
···
+
package models
+
+
import (
+
"testing"
+
+
"tangled.org/core/api/tangled"
+
"tangled.org/core/workflow"
+
)
+
+
func TestPipelineEnvVars_PushBranch(t *testing.T) {
+
tr := &tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
NewSha: "abc123def456",
+
OldSha: "000000000000",
+
Ref: "refs/heads/main",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
DefaultBranch: "main",
+
},
+
}
+
id := PipelineId{
+
Knot: "example.com",
+
Rkey: "123123",
+
}
+
env := PipelineEnvVars(tr, id, false)
+
+
// Check standard CI variable
+
if env["CI"] != "true" {
+
t.Errorf("Expected CI='true', got '%s'", env["CI"])
+
}
+
+
// Check ref variables
+
if env["TANGLED_REF"] != "refs/heads/main" {
+
t.Errorf("Expected TANGLED_REF='refs/heads/main', got '%s'", env["TANGLED_REF"])
+
}
+
if env["TANGLED_REF_NAME"] != "main" {
+
t.Errorf("Expected TANGLED_REF_NAME='main', got '%s'", env["TANGLED_REF_NAME"])
+
}
+
if env["TANGLED_REF_TYPE"] != "branch" {
+
t.Errorf("Expected TANGLED_REF_TYPE='branch', got '%s'", env["TANGLED_REF_TYPE"])
+
}
+
+
// Check SHA variables
+
if env["TANGLED_SHA"] != "abc123def456" {
+
t.Errorf("Expected TANGLED_SHA='abc123def456', got '%s'", env["TANGLED_SHA"])
+
}
+
if env["TANGLED_COMMIT_SHA"] != "abc123def456" {
+
t.Errorf("Expected TANGLED_COMMIT_SHA='abc123def456', got '%s'", env["TANGLED_COMMIT_SHA"])
+
}
+
+
// Check repo variables
+
if env["TANGLED_REPO_KNOT"] != "example.com" {
+
t.Errorf("Expected TANGLED_REPO_KNOT='example.com', got '%s'", env["TANGLED_REPO_KNOT"])
+
}
+
if env["TANGLED_REPO_DID"] != "did:plc:user123" {
+
t.Errorf("Expected TANGLED_REPO_DID='did:plc:user123', got '%s'", env["TANGLED_REPO_DID"])
+
}
+
if env["TANGLED_REPO_NAME"] != "my-repo" {
+
t.Errorf("Expected TANGLED_REPO_NAME='my-repo', got '%s'", env["TANGLED_REPO_NAME"])
+
}
+
if env["TANGLED_REPO_DEFAULT_BRANCH"] != "main" {
+
t.Errorf("Expected TANGLED_REPO_DEFAULT_BRANCH='main', got '%s'", env["TANGLED_REPO_DEFAULT_BRANCH"])
+
}
+
if env["TANGLED_REPO_URL"] != "https://example.com/did:plc:user123/my-repo" {
+
t.Errorf("Expected TANGLED_REPO_URL='https://example.com/did:plc:user123/my-repo', got '%s'", env["TANGLED_REPO_URL"])
+
}
+
}
+
+
func TestPipelineEnvVars_PushTag(t *testing.T) {
+
tr := &tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
NewSha: "abc123def456",
+
OldSha: "000000000000",
+
Ref: "refs/tags/v1.2.3",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
id := PipelineId{
+
Knot: "example.com",
+
Rkey: "123123",
+
}
+
env := PipelineEnvVars(tr, id, false)
+
+
if env["TANGLED_REF"] != "refs/tags/v1.2.3" {
+
t.Errorf("Expected TANGLED_REF='refs/tags/v1.2.3', got '%s'", env["TANGLED_REF"])
+
}
+
if env["TANGLED_REF_NAME"] != "v1.2.3" {
+
t.Errorf("Expected TANGLED_REF_NAME='v1.2.3', got '%s'", env["TANGLED_REF_NAME"])
+
}
+
if env["TANGLED_REF_TYPE"] != "tag" {
+
t.Errorf("Expected TANGLED_REF_TYPE='tag', got '%s'", env["TANGLED_REF_TYPE"])
+
}
+
}
+
+
func TestPipelineEnvVars_PullRequest(t *testing.T) {
+
tr := &tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPullRequest),
+
PullRequest: &tangled.Pipeline_PullRequestTriggerData{
+
SourceBranch: "feature-branch",
+
TargetBranch: "main",
+
SourceSha: "pr-sha-789",
+
Action: "opened",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
id := PipelineId{
+
Knot: "example.com",
+
Rkey: "123123",
+
}
+
env := PipelineEnvVars(tr, id, false)
+
+
// Check ref variables for PR
+
if env["TANGLED_REF"] != "refs/heads/feature-branch" {
+
t.Errorf("Expected TANGLED_REF='refs/heads/feature-branch', got '%s'", env["TANGLED_REF"])
+
}
+
if env["TANGLED_REF_NAME"] != "feature-branch" {
+
t.Errorf("Expected TANGLED_REF_NAME='feature-branch', got '%s'", env["TANGLED_REF_NAME"])
+
}
+
if env["TANGLED_REF_TYPE"] != "branch" {
+
t.Errorf("Expected TANGLED_REF_TYPE='branch', got '%s'", env["TANGLED_REF_TYPE"])
+
}
+
+
// Check SHA variables
+
if env["TANGLED_SHA"] != "pr-sha-789" {
+
t.Errorf("Expected TANGLED_SHA='pr-sha-789', got '%s'", env["TANGLED_SHA"])
+
}
+
if env["TANGLED_COMMIT_SHA"] != "pr-sha-789" {
+
t.Errorf("Expected TANGLED_COMMIT_SHA='pr-sha-789', got '%s'", env["TANGLED_COMMIT_SHA"])
+
}
+
+
// Check PR-specific variables
+
if env["TANGLED_PR_SOURCE_BRANCH"] != "feature-branch" {
+
t.Errorf("Expected TANGLED_PR_SOURCE_BRANCH='feature-branch', got '%s'", env["TANGLED_PR_SOURCE_BRANCH"])
+
}
+
if env["TANGLED_PR_TARGET_BRANCH"] != "main" {
+
t.Errorf("Expected TANGLED_PR_TARGET_BRANCH='main', got '%s'", env["TANGLED_PR_TARGET_BRANCH"])
+
}
+
if env["TANGLED_PR_SOURCE_SHA"] != "pr-sha-789" {
+
t.Errorf("Expected TANGLED_PR_SOURCE_SHA='pr-sha-789', got '%s'", env["TANGLED_PR_SOURCE_SHA"])
+
}
+
if env["TANGLED_PR_ACTION"] != "opened" {
+
t.Errorf("Expected TANGLED_PR_ACTION='opened', got '%s'", env["TANGLED_PR_ACTION"])
+
}
+
}
+
+
func TestPipelineEnvVars_ManualWithInputs(t *testing.T) {
+
tr := &tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindManual),
+
Manual: &tangled.Pipeline_ManualTriggerData{
+
Inputs: []*tangled.Pipeline_Pair{
+
{Key: "version", Value: "1.0.0"},
+
{Key: "environment", Value: "production"},
+
},
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
id := PipelineId{
+
Knot: "example.com",
+
Rkey: "123123",
+
}
+
env := PipelineEnvVars(tr, id, false)
+
+
// Check manual input variables
+
if env["TANGLED_INPUT_VERSION"] != "1.0.0" {
+
t.Errorf("Expected TANGLED_INPUT_VERSION='1.0.0', got '%s'", env["TANGLED_INPUT_VERSION"])
+
}
+
if env["TANGLED_INPUT_ENVIRONMENT"] != "production" {
+
t.Errorf("Expected TANGLED_INPUT_ENVIRONMENT='production', got '%s'", env["TANGLED_INPUT_ENVIRONMENT"])
+
}
+
+
// Manual triggers shouldn't have ref/sha variables
+
if _, ok := env["TANGLED_REF"]; ok {
+
t.Error("Manual trigger should not have TANGLED_REF")
+
}
+
if _, ok := env["TANGLED_SHA"]; ok {
+
t.Error("Manual trigger should not have TANGLED_SHA")
+
}
+
}
+
+
func TestPipelineEnvVars_DevMode(t *testing.T) {
+
tr := &tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
NewSha: "abc123",
+
Ref: "refs/heads/main",
+
},
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "localhost:3000",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
id := PipelineId{
+
Knot: "example.com",
+
Rkey: "123123",
+
}
+
env := PipelineEnvVars(tr, id, true)
+
+
// Dev mode should use http:// and replace localhost with host.docker.internal
+
expectedURL := "http://host.docker.internal:3000/did:plc:user123/my-repo"
+
if env["TANGLED_REPO_URL"] != expectedURL {
+
t.Errorf("Expected TANGLED_REPO_URL='%s', got '%s'", expectedURL, env["TANGLED_REPO_URL"])
+
}
+
}
+
+
func TestPipelineEnvVars_NilTrigger(t *testing.T) {
+
id := PipelineId{
+
Knot: "example.com",
+
Rkey: "123123",
+
}
+
env := PipelineEnvVars(nil, id, false)
+
+
if env != nil {
+
t.Error("Expected nil env for nil trigger")
+
}
+
}
+
+
func TestPipelineEnvVars_NilPushData(t *testing.T) {
+
tr := &tangled.Pipeline_TriggerMetadata{
+
Kind: string(workflow.TriggerKindPush),
+
Push: nil,
+
Repo: &tangled.Pipeline_TriggerRepo{
+
Knot: "example.com",
+
Did: "did:plc:user123",
+
Repo: "my-repo",
+
},
+
}
+
id := PipelineId{
+
Knot: "example.com",
+
Rkey: "123123",
+
}
+
env := PipelineEnvVars(tr, id, false)
+
+
// Should still have repo variables
+
if env["TANGLED_REPO_KNOT"] != "example.com" {
+
t.Errorf("Expected TANGLED_REPO_KNOT='example.com', got '%s'", env["TANGLED_REPO_KNOT"])
+
}
+
+
// Should not have ref/sha variables
+
if _, ok := env["TANGLED_REF"]; ok {
+
t.Error("Should not have TANGLED_REF when push data is nil")
+
}
+
}
+15 -7
spindle/secrets/openbao.go
···
)
type OpenBaoManager struct {
-
client *vault.Client
-
mountPath string
-
logger *slog.Logger
+
client *vault.Client
+
mountPath string
+
logger *slog.Logger
+
connectionTimeout time.Duration
}
type OpenBaoManagerOpt func(*OpenBaoManager)
···
}
}
+
func WithConnectionTimeout(timeout time.Duration) OpenBaoManagerOpt {
+
return func(v *OpenBaoManager) {
+
v.connectionTimeout = timeout
+
}
+
}
+
// NewOpenBaoManager creates a new OpenBao manager that connects to a Bao Proxy
// The proxyAddress should point to the local Bao Proxy (e.g., "http://127.0.0.1:8200")
// The proxy handles all authentication automatically via Auto-Auth
···
}
manager := &OpenBaoManager{
-
client: client,
-
mountPath: "spindle", // default KV v2 mount path
-
logger: logger,
+
client: client,
+
mountPath: "spindle", // default KV v2 mount path
+
logger: logger,
+
connectionTimeout: 10 * time.Second, // default connection timeout
}
for _, opt := range opts {
···
// testConnection verifies that we can connect to the proxy
func (v *OpenBaoManager) testConnection() error {
-
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+
ctx, cancel := context.WithTimeout(context.Background(), v.connectionTimeout)
defer cancel()
// try token self-lookup as a quick way to verify proxy works
+5 -2
spindle/secrets/openbao_test.go
···
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
logger := slog.New(slog.NewTextHandler(os.Stderr, nil))
-
manager, err := NewOpenBaoManager(tt.proxyAddr, logger, tt.opts...)
+
// Use shorter timeout for tests to avoid long waits
+
opts := append(tt.opts, WithConnectionTimeout(1*time.Second))
+
manager, err := NewOpenBaoManager(tt.proxyAddr, logger, opts...)
if tt.expectError {
assert.Error(t, err)
···
// All these will fail because no real proxy is running
// but we can test that the configuration is properly accepted
-
manager, err := NewOpenBaoManager(tt.proxyAddr, logger)
+
// Use shorter timeout for tests to avoid long waits
+
manager, err := NewOpenBaoManager(tt.proxyAddr, logger, WithConnectionTimeout(1*time.Second))
assert.Error(t, err) // Expected because no real proxy
assert.Nil(t, manager)
assert.Contains(t, err.Error(), "failed to connect to bao proxy")
+97 -41
spindle/server.go
···
"encoding/json"
"fmt"
"log/slog"
+
"maps"
"net/http"
"github.com/go-chi/chi/v5"
···
vault secrets.Manager
}
-
func Run(ctx context.Context) error {
+
// New creates a new Spindle server with the provided configuration and engines.
+
func New(ctx context.Context, cfg *config.Config, engines map[string]models.Engine) (*Spindle, error) {
logger := log.FromContext(ctx)
-
cfg, err := config.Load(ctx)
-
if err != nil {
-
return fmt.Errorf("failed to load config: %w", err)
-
}
-
d, err := db.Make(cfg.Server.DBPath)
if err != nil {
-
return fmt.Errorf("failed to setup db: %w", err)
+
return nil, fmt.Errorf("failed to setup db: %w", err)
}
e, err := rbac.NewEnforcer(cfg.Server.DBPath)
if err != nil {
-
return fmt.Errorf("failed to setup rbac enforcer: %w", err)
+
return nil, fmt.Errorf("failed to setup rbac enforcer: %w", err)
}
e.E.EnableAutoSave(true)
···
switch cfg.Server.Secrets.Provider {
case "openbao":
if cfg.Server.Secrets.OpenBao.ProxyAddr == "" {
-
return fmt.Errorf("openbao proxy address is required when using openbao secrets provider")
+
return nil, fmt.Errorf("openbao proxy address is required when using openbao secrets provider")
}
vault, err = secrets.NewOpenBaoManager(
cfg.Server.Secrets.OpenBao.ProxyAddr,
···
secrets.WithMountPath(cfg.Server.Secrets.OpenBao.Mount),
)
if err != nil {
-
return fmt.Errorf("failed to setup openbao secrets provider: %w", err)
+
return nil, fmt.Errorf("failed to setup openbao secrets provider: %w", err)
}
logger.Info("using openbao secrets provider", "proxy_address", cfg.Server.Secrets.OpenBao.ProxyAddr, "mount", cfg.Server.Secrets.OpenBao.Mount)
case "sqlite", "":
vault, err = secrets.NewSQLiteManager(cfg.Server.DBPath, secrets.WithTableName("secrets"))
if err != nil {
-
return fmt.Errorf("failed to setup sqlite secrets provider: %w", err)
+
return nil, fmt.Errorf("failed to setup sqlite secrets provider: %w", err)
}
logger.Info("using sqlite secrets provider", "path", cfg.Server.DBPath)
default:
-
return fmt.Errorf("unknown secrets provider: %s", cfg.Server.Secrets.Provider)
-
}
-
-
nixeryEng, err := nixery.New(ctx, cfg)
-
if err != nil {
-
return err
+
return nil, fmt.Errorf("unknown secrets provider: %s", cfg.Server.Secrets.Provider)
}
jq := queue.NewQueue(cfg.Server.QueueSize, cfg.Server.MaxJobCount)
···
}
jc, err := jetstream.NewJetstreamClient(cfg.Server.JetstreamEndpoint, "spindle", collections, nil, log.SubLogger(logger, "jetstream"), d, true, true)
if err != nil {
-
return fmt.Errorf("failed to setup jetstream client: %w", err)
+
return nil, fmt.Errorf("failed to setup jetstream client: %w", err)
}
jc.AddDid(cfg.Server.Owner)
// Check if the spindle knows about any Dids;
dids, err := d.GetAllDids()
if err != nil {
-
return fmt.Errorf("failed to get all dids: %w", err)
+
return nil, fmt.Errorf("failed to get all dids: %w", err)
}
for _, d := range dids {
jc.AddDid(d)
}
-
resolver := idresolver.DefaultResolver()
+
resolver := idresolver.DefaultResolver(cfg.Server.PlcUrl)
-
spindle := Spindle{
+
spindle := &Spindle{
jc: jc,
e: e,
db: d,
l: logger,
n: &n,
-
engs: map[string]models.Engine{"nixery": nixeryEng},
+
engs: engines,
jq: jq,
cfg: cfg,
res: resolver,
···
err = e.AddSpindle(rbacDomain)
if err != nil {
-
return fmt.Errorf("failed to set rbac domain: %w", err)
+
return nil, fmt.Errorf("failed to set rbac domain: %w", err)
}
err = spindle.configureOwner()
if err != nil {
-
return err
+
return nil, err
}
logger.Info("owner set", "did", cfg.Server.Owner)
-
// starts a job queue runner in the background
-
jq.Start()
-
defer jq.Stop()
-
-
// Stop vault token renewal if it implements Stopper
-
if stopper, ok := vault.(secrets.Stopper); ok {
-
defer stopper.Stop()
-
}
-
cursorStore, err := cursor.NewSQLiteStore(cfg.Server.DBPath)
if err != nil {
-
return fmt.Errorf("failed to setup sqlite3 cursor store: %w", err)
+
return nil, fmt.Errorf("failed to setup sqlite3 cursor store: %w", err)
}
err = jc.StartJetstream(ctx, spindle.ingest())
if err != nil {
-
return fmt.Errorf("failed to start jetstream consumer: %w", err)
+
return nil, fmt.Errorf("failed to start jetstream consumer: %w", err)
}
// for each incoming sh.tangled.pipeline, we execute
···
ccfg.CursorStore = cursorStore
knownKnots, err := d.Knots()
if err != nil {
-
return err
+
return nil, err
}
for _, knot := range knownKnots {
logger.Info("adding source start", "knot", knot)
···
}
spindle.ks = eventconsumer.NewConsumer(*ccfg)
+
return spindle, nil
+
}
+
+
// DB returns the database instance.
+
func (s *Spindle) DB() *db.DB {
+
return s.db
+
}
+
+
// Queue returns the job queue instance.
+
func (s *Spindle) Queue() *queue.Queue {
+
return s.jq
+
}
+
+
// Engines returns the map of available engines.
+
func (s *Spindle) Engines() map[string]models.Engine {
+
return s.engs
+
}
+
+
// Vault returns the secrets manager instance.
+
func (s *Spindle) Vault() secrets.Manager {
+
return s.vault
+
}
+
+
// Notifier returns the notifier instance.
+
func (s *Spindle) Notifier() *notifier.Notifier {
+
return s.n
+
}
+
+
// Enforcer returns the RBAC enforcer instance.
+
func (s *Spindle) Enforcer() *rbac.Enforcer {
+
return s.e
+
}
+
+
// Start starts the Spindle server (blocking).
+
func (s *Spindle) Start(ctx context.Context) error {
+
// starts a job queue runner in the background
+
s.jq.Start()
+
defer s.jq.Stop()
+
+
// Stop vault token renewal if it implements Stopper
+
if stopper, ok := s.vault.(secrets.Stopper); ok {
+
defer stopper.Stop()
+
}
+
go func() {
-
logger.Info("starting knot event consumer")
-
spindle.ks.Start(ctx)
+
s.l.Info("starting knot event consumer")
+
s.ks.Start(ctx)
}()
-
logger.Info("starting spindle server", "address", cfg.Server.ListenAddr)
-
logger.Error("server error", "error", http.ListenAndServe(cfg.Server.ListenAddr, spindle.Router()))
+
s.l.Info("starting spindle server", "address", s.cfg.Server.ListenAddr)
+
return http.ListenAndServe(s.cfg.Server.ListenAddr, s.Router())
+
}
-
return nil
+
func Run(ctx context.Context) error {
+
cfg, err := config.Load(ctx)
+
if err != nil {
+
return fmt.Errorf("failed to load config: %w", err)
+
}
+
+
nixeryEng, err := nixery.New(ctx, cfg)
+
if err != nil {
+
return err
+
}
+
+
s, err := New(ctx, cfg, map[string]models.Engine{
+
"nixery": nixeryEng,
+
})
+
if err != nil {
+
return err
+
}
+
+
return s.Start(ctx)
}
func (s *Spindle) Router() http.Handler {
···
workflows := make(map[models.Engine][]models.Workflow)
+
// Build pipeline environment variables once for all workflows
+
pipelineEnv := models.PipelineEnvVars(tpl.TriggerMetadata, pipelineId, s.cfg.Server.Dev)
+
for _, w := range tpl.Workflows {
if w != nil {
if _, ok := s.engs[w.Engine]; !ok {
···
if err != nil {
return err
}
+
+
// inject TANGLED_* env vars after InitWorkflow
+
// This prevents user-defined env vars from overriding them
+
if ewf.Environment == nil {
+
ewf.Environment = make(map[string]string)
+
}
+
maps.Copy(ewf.Environment, pipelineEnv)
workflows[eng] = append(workflows[eng], *ewf)
+5
spindle/stream.go
···
if err := conn.WriteMessage(websocket.TextMessage, []byte(line.Text)); err != nil {
return fmt.Errorf("failed to write to websocket: %w", err)
}
+
case <-time.After(30 * time.Second):
+
// send a keep-alive
+
if err := conn.WriteControl(websocket.PingMessage, []byte{}, time.Now().Add(time.Second)); err != nil {
+
return fmt.Errorf("failed to write control: %w", err)
+
}
}
}
}
+199
types/commit.go
···
+
package types
+
+
import (
+
"bytes"
+
"encoding/json"
+
"fmt"
+
"maps"
+
"regexp"
+
"strings"
+
+
"github.com/go-git/go-git/v5/plumbing"
+
"github.com/go-git/go-git/v5/plumbing/object"
+
)
+
+
type Commit struct {
+
// hash of the commit object.
+
Hash plumbing.Hash `json:"hash,omitempty"`
+
+
// author is the original author of the commit.
+
Author object.Signature `json:"author"`
+
+
// committer is the one performing the commit, might be different from author.
+
Committer object.Signature `json:"committer"`
+
+
// message is the commit message, contains arbitrary text.
+
Message string `json:"message"`
+
+
// treehash is the hash of the root tree of the commit.
+
Tree string `json:"tree"`
+
+
// parents are the hashes of the parent commits of the commit.
+
ParentHashes []plumbing.Hash `json:"parent_hashes,omitempty"`
+
+
// pgpsignature is the pgp signature of the commit.
+
PGPSignature string `json:"pgp_signature,omitempty"`
+
+
// mergetag is the embedded tag object when a merge commit is created by
+
// merging a signed tag.
+
MergeTag string `json:"merge_tag,omitempty"`
+
+
// changeid is a unique identifier for the change (e.g., gerrit change-id).
+
ChangeId string `json:"change_id,omitempty"`
+
+
// extraheaders contains additional headers not captured by other fields.
+
ExtraHeaders map[string][]byte `json:"extra_headers,omitempty"`
+
+
// deprecated: kept for backwards compatibility with old json format.
+
This string `json:"this,omitempty"`
+
+
// deprecated: kept for backwards compatibility with old json format.
+
Parent string `json:"parent,omitempty"`
+
}
+
+
// types.Commit is an unify two commit structs:
+
// - git.object.Commit from
+
// - types.NiceDiff.commit
+
//
+
// to do this in backwards compatible fashion, we define the base struct
+
// to use the same fields as NiceDiff.Commit, and then we also unmarshal
+
// the struct fields from go-git structs, this custom unmarshal makes sense
+
// of both representations and unifies them to have maximal data in either
+
// form.
+
func (c *Commit) UnmarshalJSON(data []byte) error {
+
type Alias Commit
+
+
aux := &struct {
+
*object.Commit
+
*Alias
+
}{
+
Alias: (*Alias)(c),
+
}
+
+
if err := json.Unmarshal(data, aux); err != nil {
+
return err
+
}
+
+
c.FromGoGitCommit(aux.Commit)
+
+
return nil
+
}
+
+
// fill in as much of Commit as possible from the given go-git commit
+
func (c *Commit) FromGoGitCommit(gc *object.Commit) {
+
if gc == nil {
+
return
+
}
+
+
if c.Hash.IsZero() {
+
c.Hash = gc.Hash
+
}
+
if c.This == "" {
+
c.This = gc.Hash.String()
+
}
+
if isEmptySignature(c.Author) {
+
c.Author = gc.Author
+
}
+
if isEmptySignature(c.Committer) {
+
c.Committer = gc.Committer
+
}
+
if c.Message == "" {
+
c.Message = gc.Message
+
}
+
if c.Tree == "" {
+
c.Tree = gc.TreeHash.String()
+
}
+
if c.PGPSignature == "" {
+
c.PGPSignature = gc.PGPSignature
+
}
+
if c.MergeTag == "" {
+
c.MergeTag = gc.MergeTag
+
}
+
+
if len(c.ParentHashes) == 0 {
+
c.ParentHashes = gc.ParentHashes
+
}
+
if c.Parent == "" && len(gc.ParentHashes) > 0 {
+
c.Parent = gc.ParentHashes[0].String()
+
}
+
+
if len(c.ExtraHeaders) == 0 {
+
c.ExtraHeaders = make(map[string][]byte)
+
maps.Copy(c.ExtraHeaders, gc.ExtraHeaders)
+
}
+
+
if c.ChangeId == "" {
+
if v, ok := gc.ExtraHeaders["change-id"]; ok {
+
c.ChangeId = string(v)
+
}
+
}
+
}
+
+
func isEmptySignature(s object.Signature) bool {
+
return s.Email == "" && s.Name == "" && s.When.IsZero()
+
}
+
+
// produce a verifiable payload from this commit's metadata
+
func (c *Commit) Payload() string {
+
author := bytes.NewBuffer([]byte{})
+
c.Author.Encode(author)
+
+
committer := bytes.NewBuffer([]byte{})
+
c.Committer.Encode(committer)
+
+
payload := strings.Builder{}
+
+
fmt.Fprintf(&payload, "tree %s\n", c.Tree)
+
+
if len(c.ParentHashes) > 0 {
+
for _, p := range c.ParentHashes {
+
fmt.Fprintf(&payload, "parent %s\n", p.String())
+
}
+
} else {
+
// present for backwards compatibility
+
fmt.Fprintf(&payload, "parent %s\n", c.Parent)
+
}
+
+
fmt.Fprintf(&payload, "author %s\n", author.String())
+
fmt.Fprintf(&payload, "committer %s\n", committer.String())
+
+
if c.ChangeId != "" {
+
fmt.Fprintf(&payload, "change-id %s\n", c.ChangeId)
+
} else if v, ok := c.ExtraHeaders["change-id"]; ok {
+
fmt.Fprintf(&payload, "change-id %s\n", string(v))
+
}
+
+
fmt.Fprintf(&payload, "\n%s", c.Message)
+
+
return payload.String()
+
}
+
+
var (
+
coAuthorRegex = regexp.MustCompile(`(?im)^Co-authored-by:\s*(.+?)\s*<([^>]+)>`)
+
)
+
+
func (commit Commit) CoAuthors() []object.Signature {
+
var coAuthors []object.Signature
+
seen := make(map[string]bool)
+
matches := coAuthorRegex.FindAllStringSubmatch(commit.Message, -1)
+
+
for _, match := range matches {
+
if len(match) >= 3 {
+
name := strings.TrimSpace(match[1])
+
email := strings.TrimSpace(match[2])
+
+
if seen[email] {
+
continue
+
}
+
seen[email] = true
+
+
coAuthors = append(coAuthors, object.Signature{
+
Name: name,
+
Email: email,
+
When: commit.Committer.When,
+
})
+
}
+
}
+
+
return coAuthors
+
}
+2 -12
types/diff.go
···
import (
"github.com/bluekeyes/go-gitdiff/gitdiff"
-
"github.com/go-git/go-git/v5/plumbing/object"
)
type DiffOpts struct {
···
// A nicer git diff representation.
type NiceDiff struct {
-
Commit struct {
-
Message string `json:"message"`
-
Author object.Signature `json:"author"`
-
This string `json:"this"`
-
Parent string `json:"parent"`
-
PGPSignature string `json:"pgp_signature"`
-
Committer object.Signature `json:"committer"`
-
Tree string `json:"tree"`
-
ChangedId string `json:"change_id"`
-
} `json:"commit"`
-
Stat struct {
+
Commit Commit `json:"commit"`
+
Stat struct {
FilesChanged int `json:"files_changed"`
Insertions int `json:"insertions"`
Deletions int `json:"deletions"`
+39 -18
types/repo.go
···
package types
import (
+
"encoding/json"
+
"github.com/bluekeyes/go-gitdiff/gitdiff"
"github.com/go-git/go-git/v5/plumbing/object"
)
type RepoIndexResponse struct {
-
IsEmpty bool `json:"is_empty"`
-
Ref string `json:"ref,omitempty"`
-
Readme string `json:"readme,omitempty"`
-
ReadmeFileName string `json:"readme_file_name,omitempty"`
-
Commits []*object.Commit `json:"commits,omitempty"`
-
Description string `json:"description,omitempty"`
-
Files []NiceTree `json:"files,omitempty"`
-
Branches []Branch `json:"branches,omitempty"`
-
Tags []*TagReference `json:"tags,omitempty"`
-
TotalCommits int `json:"total_commits,omitempty"`
+
IsEmpty bool `json:"is_empty"`
+
Ref string `json:"ref,omitempty"`
+
Readme string `json:"readme,omitempty"`
+
ReadmeFileName string `json:"readme_file_name,omitempty"`
+
Commits []Commit `json:"commits,omitempty"`
+
Description string `json:"description,omitempty"`
+
Files []NiceTree `json:"files,omitempty"`
+
Branches []Branch `json:"branches,omitempty"`
+
Tags []*TagReference `json:"tags,omitempty"`
+
TotalCommits int `json:"total_commits,omitempty"`
}
type RepoLogResponse struct {
-
Commits []*object.Commit `json:"commits,omitempty"`
-
Ref string `json:"ref,omitempty"`
-
Description string `json:"description,omitempty"`
-
Log bool `json:"log,omitempty"`
-
Total int `json:"total,omitempty"`
-
Page int `json:"page,omitempty"`
-
PerPage int `json:"per_page,omitempty"`
+
Commits []Commit `json:"commits,omitempty"`
+
Ref string `json:"ref,omitempty"`
+
Description string `json:"description,omitempty"`
+
Log bool `json:"log,omitempty"`
+
Total int `json:"total,omitempty"`
+
Page int `json:"page,omitempty"`
+
PerPage int `json:"per_page,omitempty"`
}
type RepoCommitResponse struct {
···
type Branch struct {
Reference `json:"reference"`
Commit *object.Commit `json:"commit,omitempty"`
-
IsDefault bool `json:"is_deafult,omitempty"`
+
IsDefault bool `json:"is_default,omitempty"`
+
}
+
+
func (b *Branch) UnmarshalJSON(data []byte) error {
+
aux := &struct {
+
Reference `json:"reference"`
+
Commit *object.Commit `json:"commit,omitempty"`
+
IsDefault bool `json:"is_default,omitempty"`
+
MispelledIsDefault bool `json:"is_deafult,omitempty"` // mispelled name
+
}{}
+
+
if err := json.Unmarshal(data, aux); err != nil {
+
return err
+
}
+
+
b.Reference = aux.Reference
+
b.Commit = aux.Commit
+
b.IsDefault = aux.IsDefault || aux.MispelledIsDefault // whichever was set
+
+
return nil
}
type RepoTagsResponse struct {
+88 -5
types/tree.go
···
package types
import (
+
"fmt"
+
"os"
"time"
"github.com/go-git/go-git/v5/plumbing"
+
"github.com/go-git/go-git/v5/plumbing/filemode"
)
// A nicer git tree representation.
type NiceTree struct {
// Relative path
-
Name string `json:"name"`
-
Mode string `json:"mode"`
-
Size int64 `json:"size"`
-
IsFile bool `json:"is_file"`
-
IsSubtree bool `json:"is_subtree"`
+
Name string `json:"name"`
+
Mode string `json:"mode"`
+
Size int64 `json:"size"`
LastCommit *LastCommitInfo `json:"last_commit,omitempty"`
+
}
+
+
func (t *NiceTree) FileMode() (filemode.FileMode, error) {
+
if numericMode, err := filemode.New(t.Mode); err == nil {
+
return numericMode, nil
+
}
+
+
// TODO: this is here for backwards compat, can be removed in future versions
+
osMode, err := parseModeString(t.Mode)
+
if err != nil {
+
return filemode.Empty, nil
+
}
+
+
conv, err := filemode.NewFromOSFileMode(osMode)
+
if err != nil {
+
return filemode.Empty, nil
+
}
+
+
return conv, nil
+
}
+
+
// ParseFileModeString parses a file mode string like "-rw-r--r--"
+
// and returns an os.FileMode
+
func parseModeString(modeStr string) (os.FileMode, error) {
+
if len(modeStr) != 10 {
+
return 0, fmt.Errorf("invalid mode string length: expected 10, got %d", len(modeStr))
+
}
+
+
var mode os.FileMode
+
+
// Parse file type (first character)
+
switch modeStr[0] {
+
case 'd':
+
mode |= os.ModeDir
+
case 'l':
+
mode |= os.ModeSymlink
+
case '-':
+
// regular file
+
default:
+
return 0, fmt.Errorf("unknown file type: %c", modeStr[0])
+
}
+
+
// parse permissions for owner, group, and other
+
perms := modeStr[1:]
+
shifts := []int{6, 3, 0} // bit shifts for owner, group, other
+
+
for i := range 3 {
+
offset := i * 3
+
shift := shifts[i]
+
+
if perms[offset] == 'r' {
+
mode |= os.FileMode(4 << shift)
+
}
+
if perms[offset+1] == 'w' {
+
mode |= os.FileMode(2 << shift)
+
}
+
if perms[offset+2] == 'x' {
+
mode |= os.FileMode(1 << shift)
+
}
+
}
+
+
return mode, nil
+
}
+
+
func (t *NiceTree) IsFile() bool {
+
m, err := t.FileMode()
+
+
if err != nil {
+
return false
+
}
+
+
return m.IsFile()
+
}
+
+
func (t *NiceTree) IsSubmodule() bool {
+
m, err := t.FileMode()
+
+
if err != nil {
+
return false
+
}
+
+
return m == filemode.Submodule
}
type LastCommitInfo struct {
+9 -1
workflow/compile.go
···
func (compiler *Compiler) compileWorkflow(w Workflow) *tangled.Pipeline_Workflow {
cw := &tangled.Pipeline_Workflow{}
-
if !w.Match(compiler.Trigger) {
+
matched, err := w.Match(compiler.Trigger)
+
if err != nil {
+
compiler.Diagnostics.AddError(
+
w.Name,
+
fmt.Errorf("failed to execute workflow: %w", err),
+
)
+
return nil
+
}
+
if !matched {
compiler.Diagnostics.AddWarning(
w.Name,
WorkflowSkipped,
+125
workflow/compile_test.go
···
assert.Len(t, c.Diagnostics.Errors, 1)
assert.Equal(t, MissingEngine, c.Diagnostics.Errors[0].Error)
}
+
+
func TestCompileWorkflow_MultipleBranchAndTag(t *testing.T) {
+
wf := Workflow{
+
Name: ".tangled/workflows/branch_and_tag.yml",
+
When: []Constraint{
+
{
+
Event: []string{"push"},
+
Branch: []string{"main", "develop"},
+
Tag: []string{"v*"},
+
},
+
},
+
Engine: "nixery",
+
}
+
+
tests := []struct {
+
name string
+
trigger tangled.Pipeline_TriggerMetadata
+
shouldMatch bool
+
expectedCount int
+
}{
+
{
+
name: "matches main branch",
+
trigger: tangled.Pipeline_TriggerMetadata{
+
Kind: string(TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
Ref: "refs/heads/main",
+
OldSha: strings.Repeat("0", 40),
+
NewSha: strings.Repeat("f", 40),
+
},
+
},
+
shouldMatch: true,
+
expectedCount: 1,
+
},
+
{
+
name: "matches develop branch",
+
trigger: tangled.Pipeline_TriggerMetadata{
+
Kind: string(TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
Ref: "refs/heads/develop",
+
OldSha: strings.Repeat("0", 40),
+
NewSha: strings.Repeat("f", 40),
+
},
+
},
+
shouldMatch: true,
+
expectedCount: 1,
+
},
+
{
+
name: "matches v* tag pattern",
+
trigger: tangled.Pipeline_TriggerMetadata{
+
Kind: string(TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
Ref: "refs/tags/v1.0.0",
+
OldSha: strings.Repeat("0", 40),
+
NewSha: strings.Repeat("f", 40),
+
},
+
},
+
shouldMatch: true,
+
expectedCount: 1,
+
},
+
{
+
name: "matches v* tag pattern with different version",
+
trigger: tangled.Pipeline_TriggerMetadata{
+
Kind: string(TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
Ref: "refs/tags/v2.5.3",
+
OldSha: strings.Repeat("0", 40),
+
NewSha: strings.Repeat("f", 40),
+
},
+
},
+
shouldMatch: true,
+
expectedCount: 1,
+
},
+
{
+
name: "does not match master branch",
+
trigger: tangled.Pipeline_TriggerMetadata{
+
Kind: string(TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
Ref: "refs/heads/master",
+
OldSha: strings.Repeat("0", 40),
+
NewSha: strings.Repeat("f", 40),
+
},
+
},
+
shouldMatch: false,
+
expectedCount: 0,
+
},
+
{
+
name: "does not match non-v tag",
+
trigger: tangled.Pipeline_TriggerMetadata{
+
Kind: string(TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
Ref: "refs/tags/release-1.0",
+
OldSha: strings.Repeat("0", 40),
+
NewSha: strings.Repeat("f", 40),
+
},
+
},
+
shouldMatch: false,
+
expectedCount: 0,
+
},
+
{
+
name: "does not match feature branch",
+
trigger: tangled.Pipeline_TriggerMetadata{
+
Kind: string(TriggerKindPush),
+
Push: &tangled.Pipeline_PushTriggerData{
+
Ref: "refs/heads/feature/new-feature",
+
OldSha: strings.Repeat("0", 40),
+
NewSha: strings.Repeat("f", 40),
+
},
+
},
+
shouldMatch: false,
+
expectedCount: 0,
+
},
+
}
+
+
for _, tt := range tests {
+
t.Run(tt.name, func(t *testing.T) {
+
c := Compiler{Trigger: tt.trigger}
+
cp := c.Compile([]Workflow{wf})
+
+
assert.Len(t, cp.Workflows, tt.expectedCount)
+
if tt.shouldMatch {
+
assert.Equal(t, wf.Name, cp.Workflows[0].Name)
+
}
+
})
+
}
+
}
+61 -19
workflow/def.go
···
"tangled.org/core/api/tangled"
+
"github.com/bmatcuk/doublestar/v4"
"github.com/go-git/go-git/v5/plumbing"
"gopkg.in/yaml.v3"
)
···
Constraint struct {
Event StringList `yaml:"event"`
-
Branch StringList `yaml:"branch"` // this is optional, and only applied on "push" events
+
Branch StringList `yaml:"branch"` // required for pull_request; for push, either branch or tag must be specified
+
Tag StringList `yaml:"tag"` // optional; only applies to push events
}
CloneOpts struct {
···
return strings.ReplaceAll(string(t), "_", " ")
}
+
// matchesPattern checks if a name matches any of the given patterns.
+
// Patterns can be exact matches or glob patterns using * and **.
+
// * matches any sequence of non-separator characters
+
// ** matches any sequence of characters including separators
+
func matchesPattern(name string, patterns []string) (bool, error) {
+
for _, pattern := range patterns {
+
matched, err := doublestar.Match(pattern, name)
+
if err != nil {
+
return false, err
+
}
+
if matched {
+
return true, nil
+
}
+
}
+
return false, nil
+
}
+
func FromFile(name string, contents []byte) (Workflow, error) {
var wf Workflow
···
}
// if any of the constraints on a workflow is true, return true
-
func (w *Workflow) Match(trigger tangled.Pipeline_TriggerMetadata) bool {
+
func (w *Workflow) Match(trigger tangled.Pipeline_TriggerMetadata) (bool, error) {
// manual triggers always run the workflow
if trigger.Manual != nil {
-
return true
+
return true, nil
}
// if not manual, run through the constraint list and see if any one matches
for _, c := range w.When {
-
if c.Match(trigger) {
-
return true
+
matched, err := c.Match(trigger)
+
if err != nil {
+
return false, err
+
}
+
if matched {
+
return true, nil
}
}
// no constraints, always run this workflow
if len(w.When) == 0 {
-
return true
+
return true, nil
}
-
return false
+
return false, nil
}
-
func (c *Constraint) Match(trigger tangled.Pipeline_TriggerMetadata) bool {
+
func (c *Constraint) Match(trigger tangled.Pipeline_TriggerMetadata) (bool, error) {
match := true
// manual triggers always pass this constraint
if trigger.Manual != nil {
-
return true
+
return true, nil
}
// apply event constraints
···
// apply branch constraints for PRs
if trigger.PullRequest != nil {
-
match = match && c.MatchBranch(trigger.PullRequest.TargetBranch)
+
matched, err := c.MatchBranch(trigger.PullRequest.TargetBranch)
+
if err != nil {
+
return false, err
+
}
+
match = match && matched
}
// apply ref constraints for pushes
if trigger.Push != nil {
-
match = match && c.MatchRef(trigger.Push.Ref)
+
matched, err := c.MatchRef(trigger.Push.Ref)
+
if err != nil {
+
return false, err
+
}
+
match = match && matched
}
-
return match
-
}
-
-
func (c *Constraint) MatchBranch(branch string) bool {
-
return slices.Contains(c.Branch, branch)
+
return match, nil
}
-
func (c *Constraint) MatchRef(ref string) bool {
+
func (c *Constraint) MatchRef(ref string) (bool, error) {
refName := plumbing.ReferenceName(ref)
+
shortName := refName.Short()
+
if refName.IsBranch() {
-
return slices.Contains(c.Branch, refName.Short())
+
return c.MatchBranch(shortName)
}
-
return false
+
+
if refName.IsTag() {
+
return c.MatchTag(shortName)
+
}
+
+
return false, nil
+
}
+
+
func (c *Constraint) MatchBranch(branch string) (bool, error) {
+
return matchesPattern(branch, c.Branch)
+
}
+
+
func (c *Constraint) MatchTag(tag string) (bool, error) {
+
return matchesPattern(tag, c.Tag)
}
func (c *Constraint) MatchEvent(event string) bool {
+284 -1
workflow/def_test.go
···
"github.com/stretchr/testify/assert"
)
-
func TestUnmarshalWorkflow(t *testing.T) {
+
func TestUnmarshalWorkflowWithBranch(t *testing.T) {
yamlData := `
when:
- event: ["push", "pull_request"]
···
assert.True(t, wf.CloneOpts.Skip, "Skip should be false")
}
+
+
func TestUnmarshalWorkflowWithTags(t *testing.T) {
+
yamlData := `
+
when:
+
- event: ["push"]
+
tag: ["v*", "release-*"]`
+
+
wf, err := FromFile("test.yml", []byte(yamlData))
+
assert.NoError(t, err, "YAML should unmarshal without error")
+
+
assert.Len(t, wf.When, 1, "Should have one constraint")
+
assert.ElementsMatch(t, []string{"v*", "release-*"}, wf.When[0].Tag)
+
assert.ElementsMatch(t, []string{"push"}, wf.When[0].Event)
+
}
+
+
func TestUnmarshalWorkflowWithBranchAndTag(t *testing.T) {
+
yamlData := `
+
when:
+
- event: ["push"]
+
branch: ["main", "develop"]
+
tag: ["v*"]`
+
+
wf, err := FromFile("test.yml", []byte(yamlData))
+
assert.NoError(t, err, "YAML should unmarshal without error")
+
+
assert.Len(t, wf.When, 1, "Should have one constraint")
+
assert.ElementsMatch(t, []string{"main", "develop"}, wf.When[0].Branch)
+
assert.ElementsMatch(t, []string{"v*"}, wf.When[0].Tag)
+
}
+
+
func TestMatchesPattern(t *testing.T) {
+
tests := []struct {
+
name string
+
input string
+
patterns []string
+
expected bool
+
}{
+
{"exact match", "main", []string{"main"}, true},
+
{"exact match in list", "develop", []string{"main", "develop"}, true},
+
{"no match", "feature", []string{"main", "develop"}, false},
+
{"wildcard prefix", "v1.0.0", []string{"v*"}, true},
+
{"wildcard suffix", "release-1.0", []string{"*-1.0"}, true},
+
{"wildcard middle", "feature-123-test", []string{"feature-*-test"}, true},
+
{"double star prefix", "release-1.0.0", []string{"release-**"}, true},
+
{"double star with slashes", "release/1.0/hotfix", []string{"release/**"}, true},
+
{"double star matches multiple levels", "foo/bar/baz/qux", []string{"foo/**"}, true},
+
{"double star no match", "feature/test", []string{"release/**"}, false},
+
{"no patterns matches nothing", "anything", []string{}, false},
+
{"pattern doesn't match", "v1.0.0", []string{"release-*"}, false},
+
{"complex pattern", "release/v1.2.3", []string{"release/*"}, true},
+
{"single star stops at slash", "release/1.0/hotfix", []string{"release/*"}, false},
+
}
+
+
for _, tt := range tests {
+
t.Run(tt.name, func(t *testing.T) {
+
result, _ := matchesPattern(tt.input, tt.patterns)
+
assert.Equal(t, tt.expected, result, "matchesPattern(%q, %v) should be %v", tt.input, tt.patterns, tt.expected)
+
})
+
}
+
}
+
+
func TestConstraintMatchRef_Branches(t *testing.T) {
+
tests := []struct {
+
name string
+
constraint Constraint
+
ref string
+
expected bool
+
}{
+
{
+
name: "exact branch match",
+
constraint: Constraint{Branch: []string{"main"}},
+
ref: "refs/heads/main",
+
expected: true,
+
},
+
{
+
name: "branch glob match",
+
constraint: Constraint{Branch: []string{"feature-*"}},
+
ref: "refs/heads/feature-123",
+
expected: true,
+
},
+
{
+
name: "branch no match",
+
constraint: Constraint{Branch: []string{"main"}},
+
ref: "refs/heads/develop",
+
expected: false,
+
},
+
{
+
name: "no constraints matches nothing",
+
constraint: Constraint{},
+
ref: "refs/heads/anything",
+
expected: false,
+
},
+
}
+
+
for _, tt := range tests {
+
t.Run(tt.name, func(t *testing.T) {
+
result, _ := tt.constraint.MatchRef(tt.ref)
+
assert.Equal(t, tt.expected, result, "MatchRef should return %v for ref %q", tt.expected, tt.ref)
+
})
+
}
+
}
+
+
func TestConstraintMatchRef_Tags(t *testing.T) {
+
tests := []struct {
+
name string
+
constraint Constraint
+
ref string
+
expected bool
+
}{
+
{
+
name: "exact tag match",
+
constraint: Constraint{Tag: []string{"v1.0.0"}},
+
ref: "refs/tags/v1.0.0",
+
expected: true,
+
},
+
{
+
name: "tag glob match",
+
constraint: Constraint{Tag: []string{"v*"}},
+
ref: "refs/tags/v1.2.3",
+
expected: true,
+
},
+
{
+
name: "tag glob with pattern",
+
constraint: Constraint{Tag: []string{"release-*"}},
+
ref: "refs/tags/release-2024",
+
expected: true,
+
},
+
{
+
name: "tag no match",
+
constraint: Constraint{Tag: []string{"v*"}},
+
ref: "refs/tags/release-1.0",
+
expected: false,
+
},
+
{
+
name: "tag not matched when only branch constraint",
+
constraint: Constraint{Branch: []string{"main"}},
+
ref: "refs/tags/v1.0.0",
+
expected: false,
+
},
+
}
+
+
for _, tt := range tests {
+
t.Run(tt.name, func(t *testing.T) {
+
result, _ := tt.constraint.MatchRef(tt.ref)
+
assert.Equal(t, tt.expected, result, "MatchRef should return %v for ref %q", tt.expected, tt.ref)
+
})
+
}
+
}
+
+
func TestConstraintMatchRef_Combined(t *testing.T) {
+
tests := []struct {
+
name string
+
constraint Constraint
+
ref string
+
expected bool
+
}{
+
{
+
name: "matches branch in combined constraint",
+
constraint: Constraint{Branch: []string{"main"}, Tag: []string{"v*"}},
+
ref: "refs/heads/main",
+
expected: true,
+
},
+
{
+
name: "matches tag in combined constraint",
+
constraint: Constraint{Branch: []string{"main"}, Tag: []string{"v*"}},
+
ref: "refs/tags/v1.0.0",
+
expected: true,
+
},
+
{
+
name: "no match in combined constraint",
+
constraint: Constraint{Branch: []string{"main"}, Tag: []string{"v*"}},
+
ref: "refs/heads/develop",
+
expected: false,
+
},
+
{
+
name: "glob patterns in combined constraint - branch",
+
constraint: Constraint{Branch: []string{"release-*"}, Tag: []string{"v*"}},
+
ref: "refs/heads/release-2024",
+
expected: true,
+
},
+
{
+
name: "glob patterns in combined constraint - tag",
+
constraint: Constraint{Branch: []string{"release-*"}, Tag: []string{"v*"}},
+
ref: "refs/tags/v2.0.0",
+
expected: true,
+
},
+
}
+
+
for _, tt := range tests {
+
t.Run(tt.name, func(t *testing.T) {
+
result, _ := tt.constraint.MatchRef(tt.ref)
+
assert.Equal(t, tt.expected, result, "MatchRef should return %v for ref %q", tt.expected, tt.ref)
+
})
+
}
+
}
+
+
func TestConstraintMatchBranch_GlobPatterns(t *testing.T) {
+
tests := []struct {
+
name string
+
constraint Constraint
+
branch string
+
expected bool
+
}{
+
{
+
name: "exact match",
+
constraint: Constraint{Branch: []string{"main"}},
+
branch: "main",
+
expected: true,
+
},
+
{
+
name: "glob match",
+
constraint: Constraint{Branch: []string{"feature-*"}},
+
branch: "feature-123",
+
expected: true,
+
},
+
{
+
name: "no match",
+
constraint: Constraint{Branch: []string{"main"}},
+
branch: "develop",
+
expected: false,
+
},
+
{
+
name: "multiple patterns with match",
+
constraint: Constraint{Branch: []string{"main", "release-*"}},
+
branch: "release-1.0",
+
expected: true,
+
},
+
}
+
+
for _, tt := range tests {
+
t.Run(tt.name, func(t *testing.T) {
+
result, _ := tt.constraint.MatchBranch(tt.branch)
+
assert.Equal(t, tt.expected, result, "MatchBranch should return %v for branch %q", tt.expected, tt.branch)
+
})
+
}
+
}
+
+
func TestConstraintMatchTag_GlobPatterns(t *testing.T) {
+
tests := []struct {
+
name string
+
constraint Constraint
+
tag string
+
expected bool
+
}{
+
{
+
name: "exact match",
+
constraint: Constraint{Tag: []string{"v1.0.0"}},
+
tag: "v1.0.0",
+
expected: true,
+
},
+
{
+
name: "glob match",
+
constraint: Constraint{Tag: []string{"v*"}},
+
tag: "v2.3.4",
+
expected: true,
+
},
+
{
+
name: "no match",
+
constraint: Constraint{Tag: []string{"v*"}},
+
tag: "release-1.0",
+
expected: false,
+
},
+
{
+
name: "multiple patterns with match",
+
constraint: Constraint{Tag: []string{"v*", "release-*"}},
+
tag: "release-2024",
+
expected: true,
+
},
+
{
+
name: "empty tag list matches nothing",
+
constraint: Constraint{Tag: []string{}},
+
tag: "v1.0.0",
+
expected: false,
+
},
+
}
+
+
for _, tt := range tests {
+
t.Run(tt.name, func(t *testing.T) {
+
result, _ := tt.constraint.MatchTag(tt.tag)
+
assert.Equal(t, tt.expected, result, "MatchTag should return %v for tag %q", tt.expected, tt.tag)
+
})
+
}
+
}