appview,lexicons: atprotate the mentions & references #761

merged
opened by boltless.me targeting master from feat/mentions

Storing references parsed from the markdown body in atproto record and DB. There can be lots of reference types considering the from/to types so storing both as AT-URIs

Using sql.Tx more to combine multiple DB query to single recoverable operation.

Note: Pulls don't have mentinos/references yet

Signed-off-by: Seongmin Lee git@boltless.me

+667 -26
api/tangled/cbor_gen.go
···
cw := cbg.NewCborWriter(w)
-
fieldCount := 5
+
fieldCount := 7
if t.Body == nil {
fieldCount--
+
if t.Mentions == nil {
+
fieldCount--
+
}
+
+
if t.References == nil {
+
fieldCount--
+
}
+
if _, err := cw.Write(cbg.CborEncodeMajorType(cbg.MajMap, uint64(fieldCount))); err != nil {
return err
···
return err
+
// t.Mentions ([]string) (slice)
+
if t.Mentions != nil {
+
+
if len("mentions") > 1000000 {
+
return xerrors.Errorf("Value in field \"mentions\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("mentions"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("mentions")); err != nil {
+
return err
+
}
+
+
if len(t.Mentions) > 8192 {
+
return xerrors.Errorf("Slice value in field t.Mentions was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.Mentions))); err != nil {
+
return err
+
}
+
for _, v := range t.Mentions {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
+
// t.CreatedAt (string) (string)
if len("createdAt") > 1000000 {
return xerrors.Errorf("Value in field \"createdAt\" was too long")
···
if _, err := cw.WriteString(string(t.CreatedAt)); err != nil {
return err
+
+
// t.References ([]string) (slice)
+
if t.References != nil {
+
+
if len("references") > 1000000 {
+
return xerrors.Errorf("Value in field \"references\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("references"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("references")); err != nil {
+
return err
+
}
+
+
if len(t.References) > 8192 {
+
return xerrors.Errorf("Slice value in field t.References was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.References))); err != nil {
+
return err
+
}
+
for _, v := range t.References {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
return nil
···
n := extra
-
nameBuf := make([]byte, 9)
+
nameBuf := make([]byte, 10)
for i := uint64(0); i < n; i++ {
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
if err != nil {
···
t.Title = string(sval)
+
// t.Mentions ([]string) (slice)
+
case "mentions":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.Mentions: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.Mentions = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Mentions[i] = string(sval)
+
}
+
+
}
+
}
// t.CreatedAt (string) (string)
case "createdAt":
···
t.CreatedAt = string(sval)
+
// t.References ([]string) (slice)
+
case "references":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.References: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.References = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.References[i] = string(sval)
+
}
+
+
}
+
}
default:
// Field doesn't exist on this type, so ignore it
···
cw := cbg.NewCborWriter(w)
-
fieldCount := 5
+
fieldCount := 7
+
+
if t.Mentions == nil {
+
fieldCount--
+
}
+
+
if t.References == nil {
+
fieldCount--
+
}
if t.ReplyTo == nil {
fieldCount--
···
+
// t.Mentions ([]string) (slice)
+
if t.Mentions != nil {
+
+
if len("mentions") > 1000000 {
+
return xerrors.Errorf("Value in field \"mentions\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("mentions"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("mentions")); err != nil {
+
return err
+
}
+
+
if len(t.Mentions) > 8192 {
+
return xerrors.Errorf("Slice value in field t.Mentions was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.Mentions))); err != nil {
+
return err
+
}
+
for _, v := range t.Mentions {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
+
// t.CreatedAt (string) (string)
if len("createdAt") > 1000000 {
return xerrors.Errorf("Value in field \"createdAt\" was too long")
···
if _, err := cw.WriteString(string(t.CreatedAt)); err != nil {
return err
+
+
// t.References ([]string) (slice)
+
if t.References != nil {
+
+
if len("references") > 1000000 {
+
return xerrors.Errorf("Value in field \"references\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("references"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("references")); err != nil {
+
return err
+
}
+
+
if len(t.References) > 8192 {
+
return xerrors.Errorf("Slice value in field t.References was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.References))); err != nil {
+
return err
+
}
+
for _, v := range t.References {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
return nil
···
n := extra
-
nameBuf := make([]byte, 9)
+
nameBuf := make([]byte, 10)
for i := uint64(0); i < n; i++ {
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
if err != nil {
···
t.ReplyTo = (*string)(&sval)
+
// t.Mentions ([]string) (slice)
+
case "mentions":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.Mentions: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.Mentions = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Mentions[i] = string(sval)
+
}
+
+
}
+
}
// t.CreatedAt (string) (string)
case "createdAt":
···
t.CreatedAt = string(sval)
+
// t.References ([]string) (slice)
+
case "references":
-
default:
-
// Field doesn't exist on this type, so ignore it
-
if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil {
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
return err
-
}
-
}
-
return nil
-
}
-
func (t *RepoIssueState) MarshalCBOR(w io.Writer) error {
-
if t == nil {
-
_, err := w.Write(cbg.CborNull)
-
return err
-
}
+
if extra > 8192 {
+
return fmt.Errorf("t.References: array too large (%d)", extra)
+
}
-
cw := cbg.NewCborWriter(w)
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
-
if _, err := cw.Write([]byte{163}); err != nil {
-
return err
-
}
+
if extra > 0 {
+
t.References = make([]string, extra)
+
}
-
// t.LexiconTypeID (string) (string)
-
if len("$type") > 1000000 {
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.References[i] = string(sval)
+
}
+
+
}
+
}
+
+
default:
+
// Field doesn't exist on this type, so ignore it
+
if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil {
+
return err
+
}
+
}
+
}
+
+
return nil
+
}
+
func (t *RepoIssueState) MarshalCBOR(w io.Writer) error {
+
if t == nil {
+
_, err := w.Write(cbg.CborNull)
+
return err
+
}
+
+
cw := cbg.NewCborWriter(w)
+
+
if _, err := cw.Write([]byte{163}); err != nil {
+
return err
+
}
+
+
// t.LexiconTypeID (string) (string)
+
if len("$type") > 1000000 {
return xerrors.Errorf("Value in field \"$type\" was too long")
···
cw := cbg.NewCborWriter(w)
-
fieldCount := 7
+
fieldCount := 9
if t.Body == nil {
fieldCount--
+
if t.Mentions == nil {
+
fieldCount--
+
}
+
+
if t.References == nil {
+
fieldCount--
+
}
+
if t.Source == nil {
fieldCount--
···
return err
+
// t.Mentions ([]string) (slice)
+
if t.Mentions != nil {
+
+
if len("mentions") > 1000000 {
+
return xerrors.Errorf("Value in field \"mentions\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("mentions"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("mentions")); err != nil {
+
return err
+
}
+
+
if len(t.Mentions) > 8192 {
+
return xerrors.Errorf("Slice value in field t.Mentions was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.Mentions))); err != nil {
+
return err
+
}
+
for _, v := range t.Mentions {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
+
// t.CreatedAt (string) (string)
if len("createdAt") > 1000000 {
return xerrors.Errorf("Value in field \"createdAt\" was too long")
···
if _, err := cw.WriteString(string(t.CreatedAt)); err != nil {
return err
+
+
// t.References ([]string) (slice)
+
if t.References != nil {
+
+
if len("references") > 1000000 {
+
return xerrors.Errorf("Value in field \"references\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("references"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("references")); err != nil {
+
return err
+
}
+
+
if len(t.References) > 8192 {
+
return xerrors.Errorf("Slice value in field t.References was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.References))); err != nil {
+
return err
+
}
+
for _, v := range t.References {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
return nil
···
n := extra
-
nameBuf := make([]byte, 9)
+
nameBuf := make([]byte, 10)
for i := uint64(0); i < n; i++ {
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
if err != nil {
···
+
// t.Mentions ([]string) (slice)
+
case "mentions":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.Mentions: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.Mentions = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Mentions[i] = string(sval)
+
}
+
+
}
+
}
// t.CreatedAt (string) (string)
case "createdAt":
···
t.CreatedAt = string(sval)
+
// t.References ([]string) (slice)
+
case "references":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.References: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.References = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.References[i] = string(sval)
+
}
+
+
}
+
}
default:
// Field doesn't exist on this type, so ignore it
···
cw := cbg.NewCborWriter(w)
+
fieldCount := 6
-
if _, err := cw.Write([]byte{164}); err != nil {
+
if t.Mentions == nil {
+
fieldCount--
+
}
+
+
if t.References == nil {
+
fieldCount--
+
}
+
+
if _, err := cw.Write(cbg.CborEncodeMajorType(cbg.MajMap, uint64(fieldCount))); err != nil {
return err
···
return err
+
// t.Mentions ([]string) (slice)
+
if t.Mentions != nil {
+
+
if len("mentions") > 1000000 {
+
return xerrors.Errorf("Value in field \"mentions\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("mentions"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("mentions")); err != nil {
+
return err
+
}
+
+
if len(t.Mentions) > 8192 {
+
return xerrors.Errorf("Slice value in field t.Mentions was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.Mentions))); err != nil {
+
return err
+
}
+
for _, v := range t.Mentions {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
+
// t.CreatedAt (string) (string)
if len("createdAt") > 1000000 {
return xerrors.Errorf("Value in field \"createdAt\" was too long")
···
if _, err := cw.WriteString(string(t.CreatedAt)); err != nil {
return err
+
+
// t.References ([]string) (slice)
+
if t.References != nil {
+
+
if len("references") > 1000000 {
+
return xerrors.Errorf("Value in field \"references\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("references"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("references")); err != nil {
+
return err
+
}
+
+
if len(t.References) > 8192 {
+
return xerrors.Errorf("Slice value in field t.References was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajArray, uint64(len(t.References))); err != nil {
+
return err
+
}
+
for _, v := range t.References {
+
if len(v) > 1000000 {
+
return xerrors.Errorf("Value in field v was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(v))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(v)); err != nil {
+
return err
+
}
+
+
}
+
}
return nil
···
n := extra
-
nameBuf := make([]byte, 9)
+
nameBuf := make([]byte, 10)
for i := uint64(0); i < n; i++ {
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
if err != nil {
···
t.LexiconTypeID = string(sval)
+
// t.Mentions ([]string) (slice)
+
case "mentions":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.Mentions: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.Mentions = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Mentions[i] = string(sval)
+
}
+
+
}
+
}
// t.CreatedAt (string) (string)
case "createdAt":
···
t.CreatedAt = string(sval)
+
// t.References ([]string) (slice)
+
case "references":
+
+
maj, extra, err = cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
+
if extra > 8192 {
+
return fmt.Errorf("t.References: array too large (%d)", extra)
+
}
+
+
if maj != cbg.MajArray {
+
return fmt.Errorf("expected cbor array")
+
}
+
+
if extra > 0 {
+
t.References = make([]string, extra)
+
}
+
+
for i := 0; i < int(extra); i++ {
+
{
+
var maj byte
+
var extra uint64
+
var err error
+
_ = maj
+
_ = extra
+
_ = err
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.References[i] = string(sval)
+
}
+
+
}
+
}
default:
// Field doesn't exist on this type, so ignore it
+7 -5
api/tangled/issuecomment.go
···
} //
// RECORDTYPE: RepoIssueComment
type RepoIssueComment struct {
-
LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue.comment" cborgen:"$type,const=sh.tangled.repo.issue.comment"`
-
Body string `json:"body" cborgen:"body"`
-
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
-
Issue string `json:"issue" cborgen:"issue"`
-
ReplyTo *string `json:"replyTo,omitempty" cborgen:"replyTo,omitempty"`
+
LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue.comment" cborgen:"$type,const=sh.tangled.repo.issue.comment"`
+
Body string `json:"body" cborgen:"body"`
+
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+
Issue string `json:"issue" cborgen:"issue"`
+
Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
+
References []string `json:"references,omitempty" cborgen:"references,omitempty"`
+
ReplyTo *string `json:"replyTo,omitempty" cborgen:"replyTo,omitempty"`
}
+6 -4
api/tangled/pullcomment.go
···
} //
// RECORDTYPE: RepoPullComment
type RepoPullComment struct {
-
LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull.comment" cborgen:"$type,const=sh.tangled.repo.pull.comment"`
-
Body string `json:"body" cborgen:"body"`
-
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
-
Pull string `json:"pull" cborgen:"pull"`
+
LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull.comment" cborgen:"$type,const=sh.tangled.repo.pull.comment"`
+
Body string `json:"body" cborgen:"body"`
+
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+
Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
+
Pull string `json:"pull" cborgen:"pull"`
+
References []string `json:"references,omitempty" cborgen:"references,omitempty"`
}
+7 -5
api/tangled/repoissue.go
···
} //
// RECORDTYPE: RepoIssue
type RepoIssue struct {
-
LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue" cborgen:"$type,const=sh.tangled.repo.issue"`
-
Body *string `json:"body,omitempty" cborgen:"body,omitempty"`
-
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
-
Repo string `json:"repo" cborgen:"repo"`
-
Title string `json:"title" cborgen:"title"`
+
LexiconTypeID string `json:"$type,const=sh.tangled.repo.issue" cborgen:"$type,const=sh.tangled.repo.issue"`
+
Body *string `json:"body,omitempty" cborgen:"body,omitempty"`
+
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+
Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
+
References []string `json:"references,omitempty" cborgen:"references,omitempty"`
+
Repo string `json:"repo" cborgen:"repo"`
+
Title string `json:"title" cborgen:"title"`
}
+2
api/tangled/repopull.go
···
LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull" cborgen:"$type,const=sh.tangled.repo.pull"`
Body *string `json:"body,omitempty" cborgen:"body,omitempty"`
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+
Mentions []string `json:"mentions,omitempty" cborgen:"mentions,omitempty"`
Patch string `json:"patch" cborgen:"patch"`
+
References []string `json:"references,omitempty" cborgen:"references,omitempty"`
Source *RepoPull_Source `json:"source,omitempty" cborgen:"source,omitempty"`
Target *RepoPull_Target `json:"target" cborgen:"target"`
Title string `json:"title" cborgen:"title"`
+9
appview/db/db.go
···
email_notifications integer not null default 0
);
+
create table if not exists reference_links (
+
id integer primary key autoincrement,
+
from_at text not null,
+
to_at text not null,
+
unique (from_at, to_at)
+
);
+
create table if not exists migrations (
id integer primary key autoincrement,
name text unique
···
-- indexes for better performance
create index if not exists idx_notifications_recipient_created on notifications(recipient_did, created desc);
create index if not exists idx_notifications_recipient_read on notifications(recipient_did, read);
+
create index if not exists idx_references_from_at on reference_links(from_at);
+
create index if not exists idx_references_to_at on reference_links(to_at);
`)
if err != nil {
return nil, err
+73 -18
appview/db/issues.go
···
"time"
"github.com/bluesky-social/indigo/atproto/syntax"
+
"tangled.org/core/api/tangled"
"tangled.org/core/appview/models"
"tangled.org/core/appview/pagination"
)
···
returning rowid, issue_id
`, issue.RepoAt, issue.Did, issue.Rkey, newIssueId, issue.Title, issue.Body)
-
return row.Scan(&issue.Id, &issue.IssueId)
+
err = row.Scan(&issue.Id, &issue.IssueId)
+
if err != nil {
+
return fmt.Errorf("scan row: %w", err)
+
}
+
+
if err := putReferences(tx, issue.AtUri(), issue.References); err != nil {
+
return fmt.Errorf("put reference_links: %w", err)
+
}
+
return nil
}
func updateIssue(tx *sql.Tx, issue *models.Issue) error {
···
set title = ?, body = ?, edited = ?
where did = ? and rkey = ?
`, issue.Title, issue.Body, time.Now().Format(time.RFC3339), issue.Did, issue.Rkey)
-
return err
+
if err != nil {
+
return err
+
}
+
+
if err := putReferences(tx, issue.AtUri(), issue.References); err != nil {
+
return fmt.Errorf("put reference_links: %w", err)
+
}
+
return nil
}
func GetIssuesPaginated(e Execer, page pagination.Page, filters ...filter) ([]models.Issue, error) {
···
}
}
+
// collect references for each issue
+
allReferencs, err := GetReferencesAll(e, FilterIn("from_at", issueAts))
+
if err != nil {
+
return nil, fmt.Errorf("failed to query reference_links: %w", err)
+
}
+
for issueAt, references := range allReferencs {
+
if issue, ok := issueMap[issueAt.String()]; ok {
+
issue.References = references
+
}
+
}
+
var issues []models.Issue
for _, i := range issueMap {
issues = append(issues, *i)
···
return ids, nil
}
-
func AddIssueComment(e Execer, c models.IssueComment) (int64, error) {
-
result, err := e.Exec(
+
func AddIssueComment(tx *sql.Tx, c models.IssueComment) (int64, error) {
+
result, err := tx.Exec(
`insert into issue_comments (
did,
rkey,
···
return 0, err
}
+
if err := putReferences(tx, c.AtUri(), c.References); err != nil {
+
return 0, fmt.Errorf("put reference_links: %w", err)
+
}
+
return id, nil
}
···
}
func GetIssueComments(e Execer, filters ...filter) ([]models.IssueComment, error) {
-
var comments []models.IssueComment
+
commentMap := make(map[string]*models.IssueComment)
var conditions []string
var args []any
···
comment.ReplyTo = &replyTo.V
}
-
comments = append(comments, comment)
+
atUri := comment.AtUri().String()
+
commentMap[atUri] = &comment
}
if err = rows.Err(); err != nil {
return nil, err
}
+
// collect references for each comments
+
commentAts := slices.Collect(maps.Keys(commentMap))
+
allReferencs, err := GetReferencesAll(e, FilterIn("from_at", commentAts))
+
if err != nil {
+
return nil, fmt.Errorf("failed to query reference_links: %w", err)
+
}
+
for commentAt, references := range allReferencs {
+
if comment, ok := commentMap[commentAt.String()]; ok {
+
comment.References = references
+
}
+
}
+
+
var comments []models.IssueComment
+
for _, c := range commentMap {
+
comments = append(comments, *c)
+
}
+
+
sort.Slice(comments, func(i, j int) bool {
+
return comments[i].Created.After(comments[j].Created)
+
})
+
return comments, nil
}
-
func DeleteIssues(e Execer, filters ...filter) error {
-
var conditions []string
-
var args []any
-
for _, filter := range filters {
-
conditions = append(conditions, filter.Condition())
-
args = append(args, filter.Arg()...)
+
func DeleteIssues(tx *sql.Tx, did, rkey string) error {
+
_, err := tx.Exec(
+
`delete from issues
+
where did = ? and rkey = ?`,
+
did,
+
rkey,
+
)
+
if err != nil {
+
return fmt.Errorf("delete issue: %w", err)
}
-
whereClause := ""
-
if conditions != nil {
-
whereClause = " where " + strings.Join(conditions, " and ")
+
uri := syntax.ATURI(fmt.Sprintf("at://%s/%s/%s", did, tangled.RepoIssueNSID, rkey))
+
err = deleteReferences(tx, uri)
+
if err != nil {
+
return fmt.Errorf("delete reference_links: %w", err)
}
-
query := fmt.Sprintf(`delete from issues %s`, whereClause)
-
_, err := e.Exec(query, args...)
-
return err
+
return nil
}
func CloseIssues(e Execer, filters ...filter) error {
+31 -5
appview/db/pulls.go
···
submissionIds := slices.Collect(maps.Keys(submissionMap))
comments, err := GetPullComments(e, FilterIn("submission_id", submissionIds))
if err != nil {
-
return nil, err
+
return nil, fmt.Errorf("failed to get pull comments: %w", err)
}
for _, comment := range comments {
if submission, ok := submissionMap[comment.SubmissionId]; ok {
···
}
defer rows.Close()
-
var comments []models.PullComment
+
commentMap := make(map[string]*models.PullComment)
for rows.Next() {
var comment models.PullComment
var createdAt string
···
comment.Created = t
}
-
comments = append(comments, comment)
+
atUri := comment.AtUri().String()
+
commentMap[atUri] = &comment
}
if err := rows.Err(); err != nil {
return nil, err
}
+
// collect references for each comments
+
commentAts := slices.Collect(maps.Keys(commentMap))
+
allReferencs, err := GetReferencesAll(e, FilterIn("from_at", commentAts))
+
if err != nil {
+
return nil, fmt.Errorf("failed to query reference_links: %w", err)
+
}
+
for commentAt, references := range allReferencs {
+
if comment, ok := commentMap[commentAt.String()]; ok {
+
comment.References = references
+
}
+
}
+
+
var comments []models.PullComment
+
for _, c := range commentMap {
+
comments = append(comments, *c)
+
}
+
+
sort.Slice(comments, func(i, j int) bool {
+
return comments[i].Created.Before(comments[j].Created)
+
})
+
return comments, nil
}
···
return pulls, nil
}
-
func NewPullComment(e Execer, comment *models.PullComment) (int64, error) {
+
func NewPullComment(tx *sql.Tx, comment *models.PullComment) (int64, error) {
query := `insert into pull_comments (owner_did, repo_at, submission_id, comment_at, pull_id, body) values (?, ?, ?, ?, ?, ?)`
-
res, err := e.Exec(
+
res, err := tx.Exec(
query,
comment.OwnerDid,
comment.RepoAt,
···
return 0, err
}
+
if err := putReferences(tx, comment.AtUri(), comment.References); err != nil {
+
return 0, fmt.Errorf("put reference_links: %w", err)
+
}
+
return i, nil
}
+92 -14
appview/db/reference.go
···
"tangled.org/core/appview/models"
)
-
// FindReferences resolves refLinks to Issue/PR/IssueComment/PullComment ATURIs.
+
// ValidateReferenceLinks resolves refLinks to Issue/PR/IssueComment/PullComment ATURIs.
// It will ignore missing refLinks.
-
func FindReferences(e Execer, refLinks []models.ReferenceLink) ([]syntax.ATURI, error) {
+
func ValidateReferenceLinks(e Execer, refLinks []models.ReferenceLink) ([]syntax.ATURI, error) {
var (
issueRefs []models.ReferenceLink
pullRefs []models.ReferenceLink
···
}
issueUris, err := findIssueReferences(e, issueRefs)
if err != nil {
-
return nil, err
+
return nil, fmt.Errorf("find issue references: %w", err)
}
pullUris, err := findPullReferences(e, pullRefs)
if err != nil {
-
return nil, err
+
return nil, fmt.Errorf("find pull references: %w", err)
}
return append(issueUris, pullUris...), nil
···
}
uris = append(uris, uri)
}
+
if err := rows.Err(); err != nil {
+
return nil, fmt.Errorf("iterate rows: %w", err)
+
}
+
return uris, nil
}
···
)
select
p.owner_did, p.rkey,
-
c.owner_did, c.rkey
+
c.comment_at
from input inp
join repos r
on r.did = inp.owner_did
···
for rows.Next() {
// Scan rows
var pullOwner, pullRkey string
-
var commentOwner, commentRkey sql.NullString
+
var commentUri sql.NullString
var uri syntax.ATURI
-
if err := rows.Scan(&pullOwner, &pullRkey, &commentOwner, &commentRkey); err != nil {
+
if err := rows.Scan(&pullOwner, &pullRkey, &commentUri); err != nil {
return nil, err
}
-
if commentOwner.Valid && commentRkey.Valid {
-
uri = syntax.ATURI(fmt.Sprintf(
-
"at://%s/%s/%s",
-
commentOwner.String,
-
tangled.RepoPullCommentNSID,
-
commentRkey.String,
-
))
+
if commentUri.Valid {
+
// no-op
+
uri = syntax.ATURI(commentUri.String)
} else {
uri = syntax.ATURI(fmt.Sprintf(
"at://%s/%s/%s",
···
}
return uris, nil
}
+
+
func putReferences(tx *sql.Tx, fromAt syntax.ATURI, references []syntax.ATURI) error {
+
err := deleteReferences(tx, fromAt)
+
if err != nil {
+
return fmt.Errorf("delete old reference_links: %w", err)
+
}
+
if len(references) == 0 {
+
return nil
+
}
+
+
values := make([]string, 0, len(references))
+
args := make([]any, 0, len(references)*2)
+
for _, ref := range references {
+
values = append(values, "(?, ?)")
+
args = append(args, fromAt, ref)
+
}
+
_, err = tx.Exec(
+
fmt.Sprintf(
+
`insert into reference_links (from_at, to_at)
+
values %s`,
+
strings.Join(values, ","),
+
),
+
args...,
+
)
+
if err != nil {
+
return fmt.Errorf("insert new reference_links: %w", err)
+
}
+
return nil
+
}
+
+
func deleteReferences(tx *sql.Tx, fromAt syntax.ATURI) error {
+
_, err := tx.Exec(`delete from reference_links where from_at = ?`, fromAt)
+
return err
+
}
+
+
func GetReferencesAll(e Execer, filters ...filter) (map[syntax.ATURI][]syntax.ATURI, error) {
+
var (
+
conditions []string
+
args []any
+
)
+
for _, filter := range filters {
+
conditions = append(conditions, filter.Condition())
+
args = append(args, filter.Arg()...)
+
}
+
+
whereClause := ""
+
if conditions != nil {
+
whereClause = " where " + strings.Join(conditions, " and ")
+
}
+
+
rows, err := e.Query(
+
fmt.Sprintf(
+
`select from_at, to_at from reference_links %s`,
+
whereClause,
+
),
+
args...,
+
)
+
if err != nil {
+
return nil, fmt.Errorf("query reference_links: %w", err)
+
}
+
defer rows.Close()
+
+
result := make(map[syntax.ATURI][]syntax.ATURI)
+
+
for rows.Next() {
+
var from, to syntax.ATURI
+
if err := rows.Scan(&from, &to); err != nil {
+
return nil, fmt.Errorf("scan row: %w", err)
+
}
+
+
result[from] = append(result[from], to)
+
}
+
if err := rows.Err(); err != nil {
+
return nil, fmt.Errorf("iterate rows: %w", err)
+
}
+
+
return result, nil
+
}
+22 -5
appview/ingester.go
···
return nil
case jmodels.CommitOperationDelete:
+
tx, err := ddb.BeginTx(ctx, nil)
+
if err != nil {
+
l.Error("failed to begin transaction", "err", err)
+
return err
+
}
+
defer tx.Rollback()
+
if err := db.DeleteIssues(
-
ddb,
-
db.FilterEq("did", did),
-
db.FilterEq("rkey", rkey),
+
tx,
+
did,
+
rkey,
); err != nil {
l.Error("failed to delete", "err", err)
return fmt.Errorf("failed to delete issue record: %w", err)
}
+
if err := tx.Commit(); err != nil {
+
l.Error("failed to commit txn", "err", err)
+
return err
+
}
return nil
}
···
return fmt.Errorf("failed to validate comment: %w", err)
}
-
_, err = db.AddIssueComment(ddb, *comment)
+
tx, err := ddb.Begin()
+
if err != nil {
+
return fmt.Errorf("failed to start transaction: %w", err)
+
}
+
defer tx.Rollback()
+
+
_, err = db.AddIssueComment(tx, *comment)
if err != nil {
return fmt.Errorf("failed to create issue comment: %w", err)
}
-
return nil
+
return tx.Commit()
case jmodels.CommitOperationDelete:
if err := db.DeleteIssueComments(
+55 -19
appview/issues/issues.go
···
}
l = l.With("did", issue.Did, "rkey", issue.Rkey)
+
tx, err := rp.db.Begin()
+
if err != nil {
+
l.Error("failed to start transaction", "err", err)
+
rp.pages.Notice(w, "issue-comment", "Failed to create comment, try again later.")
+
return
+
}
+
defer tx.Rollback()
+
// delete from PDS
client, err := rp.oauth.AuthorizedClient(r)
if err != nil {
···
}
// delete from db
-
if err := db.DeleteIssues(rp.db, db.FilterEq("id", issue.Id)); err != nil {
+
if err := db.DeleteIssues(tx, issue.Did, issue.Rkey); err != nil {
l.Error("failed to delete issue", "err", err)
rp.pages.Notice(w, noticeId, "Failed to delete issue.")
return
}
+
tx.Commit()
rp.notifier.DeleteIssue(r.Context(), issue)
···
replyTo = &replyToUri
}
-
mentions, _ := rp.refResolver.Resolve(r.Context(), body)
+
mentions, references := rp.refResolver.Resolve(r.Context(), body)
comment := models.IssueComment{
-
Did: user.Did,
-
Rkey: tid.TID(),
-
IssueAt: issue.AtUri().String(),
-
ReplyTo: replyTo,
-
Body: body,
-
Created: time.Now(),
+
Did: user.Did,
+
Rkey: tid.TID(),
+
IssueAt: issue.AtUri().String(),
+
ReplyTo: replyTo,
+
Body: body,
+
Created: time.Now(),
+
Mentions: mentions,
+
References: references,
}
if err = rp.validator.ValidateIssueComment(&comment); err != nil {
l.Error("failed to validate comment", "err", err)
···
}
}()
-
commentId, err := db.AddIssueComment(rp.db, comment)
+
tx, err := rp.db.Begin()
+
if err != nil {
+
l.Error("failed to start transaction", "err", err)
+
rp.pages.Notice(w, "issue-comment", "Failed to create comment, try again later.")
+
return
+
}
+
defer tx.Rollback()
+
+
commentId, err := db.AddIssueComment(tx, comment)
if err != nil {
l.Error("failed to create comment", "err", err)
rp.pages.Notice(w, "issue-comment", "Failed to create comment.")
return
}
+
err = tx.Commit()
+
if err != nil {
+
l.Error("failed to commit transaction", "err", err)
+
rp.pages.Notice(w, "issue-comment", "Failed to create comment, try again later.")
+
return
+
}
// reset atUri to make rollback a no-op
atUri = ""
···
newComment.Edited = &now
record := newComment.AsRecord()
-
_, err = db.AddIssueComment(rp.db, newComment)
+
tx, err := rp.db.Begin()
+
if err != nil {
+
l.Error("failed to start transaction", "err", err)
+
rp.pages.Notice(w, "repo-notice", "Failed to update description, try again later.")
+
return
+
}
+
defer tx.Rollback()
+
+
_, err = db.AddIssueComment(tx, newComment)
if err != nil {
l.Error("failed to perferom update-description query", "err", err)
rp.pages.Notice(w, "repo-notice", "Failed to update description, try again later.")
return
}
+
tx.Commit()
// rkey is optional, it was introduced later
if newComment.Rkey != "" {
···
})
case http.MethodPost:
body := r.FormValue("body")
-
mentions, _ := rp.refResolver.Resolve(r.Context(), body)
+
mentions, references := rp.refResolver.Resolve(r.Context(), body)
issue := &models.Issue{
-
RepoAt: f.RepoAt(),
-
Rkey: tid.TID(),
-
Title: r.FormValue("title"),
-
Body: body,
-
Open: true,
-
Did: user.Did,
-
Created: time.Now(),
-
Repo: f,
+
RepoAt: f.RepoAt(),
+
Rkey: tid.TID(),
+
Title: r.FormValue("title"),
+
Body: body,
+
Open: true,
+
Did: user.Did,
+
Created: time.Now(),
+
Mentions: mentions,
+
References: references,
+
Repo: f,
}
if err := rp.validator.ValidateIssue(issue); err != nil {
+70 -34
appview/models/issue.go
···
)
type Issue struct {
-
Id int64
-
Did string
-
Rkey string
-
RepoAt syntax.ATURI
-
IssueId int
-
Created time.Time
-
Edited *time.Time
-
Deleted *time.Time
-
Title string
-
Body string
-
Open bool
+
Id int64
+
Did string
+
Rkey string
+
RepoAt syntax.ATURI
+
IssueId int
+
Created time.Time
+
Edited *time.Time
+
Deleted *time.Time
+
Title string
+
Body string
+
Open bool
+
Mentions []syntax.DID
+
References []syntax.ATURI
// optionally, populate this when querying for reverse mappings
// like comment counts, parent repo etc.
···
}
func (i *Issue) AsRecord() tangled.RepoIssue {
+
mentions := make([]string, len(i.Mentions))
+
for i, did := range i.Mentions {
+
mentions[i] = string(did)
+
}
+
references := make([]string, len(i.References))
+
for i, uri := range i.References {
+
references[i] = string(uri)
+
}
return tangled.RepoIssue{
-
Repo: i.RepoAt.String(),
-
Title: i.Title,
-
Body: &i.Body,
-
CreatedAt: i.Created.Format(time.RFC3339),
+
Repo: i.RepoAt.String(),
+
Title: i.Title,
+
Body: &i.Body,
+
Mentions: mentions,
+
References: references,
+
CreatedAt: i.Created.Format(time.RFC3339),
}
}
···
}
type IssueComment struct {
-
Id int64
-
Did string
-
Rkey string
-
IssueAt string
-
ReplyTo *string
-
Body string
-
Created time.Time
-
Edited *time.Time
-
Deleted *time.Time
+
Id int64
+
Did string
+
Rkey string
+
IssueAt string
+
ReplyTo *string
+
Body string
+
Created time.Time
+
Edited *time.Time
+
Deleted *time.Time
+
Mentions []syntax.DID
+
References []syntax.ATURI
}
func (i *IssueComment) AtUri() syntax.ATURI {
···
}
func (i *IssueComment) AsRecord() tangled.RepoIssueComment {
+
mentions := make([]string, len(i.Mentions))
+
for i, did := range i.Mentions {
+
mentions[i] = string(did)
+
}
+
references := make([]string, len(i.References))
+
for i, uri := range i.References {
+
references[i] = string(uri)
+
}
return tangled.RepoIssueComment{
-
Body: i.Body,
-
Issue: i.IssueAt,
-
CreatedAt: i.Created.Format(time.RFC3339),
-
ReplyTo: i.ReplyTo,
+
Body: i.Body,
+
Issue: i.IssueAt,
+
CreatedAt: i.Created.Format(time.RFC3339),
+
ReplyTo: i.ReplyTo,
+
Mentions: mentions,
+
References: references,
}
}
···
return nil, err
}
+
i := record
+
mentions := make([]syntax.DID, len(record.Mentions))
+
for i, did := range record.Mentions {
+
mentions[i] = syntax.DID(did)
+
}
+
references := make([]syntax.ATURI, len(record.References))
+
for i, uri := range i.References {
+
references[i] = syntax.ATURI(uri)
+
}
+
comment := IssueComment{
-
Did: ownerDid,
-
Rkey: rkey,
-
Body: record.Body,
-
IssueAt: record.Issue,
-
ReplyTo: record.ReplyTo,
-
Created: created,
+
Did: ownerDid,
+
Rkey: rkey,
+
Body: record.Body,
+
IssueAt: record.Issue,
+
ReplyTo: record.ReplyTo,
+
Created: created,
+
Mentions: mentions,
+
References: references,
}
return &comment, nil
+26
appview/models/pull.go
···
// content
Body string
+
// meta
+
Mentions []syntax.DID
+
References []syntax.ATURI
+
// meta
Created time.Time
}
+
func (p *PullComment) AtUri() syntax.ATURI {
+
return syntax.ATURI(p.CommentAt)
+
}
+
+
// func (p *PullComment) AsRecord() tangled.RepoPullComment {
+
// mentions := make([]string, len(p.Mentions))
+
// for i, did := range p.Mentions {
+
// mentions[i] = string(did)
+
// }
+
// references := make([]string, len(p.References))
+
// for i, uri := range p.References {
+
// references[i] = string(uri)
+
// }
+
// return tangled.RepoPullComment{
+
// Pull: p.PullAt,
+
// Body: p.Body,
+
// Mentions: mentions,
+
// References: references,
+
// CreatedAt: p.Created.Format(time.RFC3339),
+
// }
+
// }
+
func (p *Pull) LastRoundNumber() int {
return len(p.Submissions) - 1
}
+3 -1
appview/pulls/pulls.go
···
return
}
-
mentions, _ := s.refResolver.Resolve(r.Context(), body)
+
mentions, references := s.refResolver.Resolve(r.Context(), body)
// Start a transaction
tx, err := s.db.BeginTx(r.Context(), nil)
···
Body: body,
CommentAt: atResp.Uri,
SubmissionId: pull.Submissions[roundNumber].ID,
+
Mentions: mentions,
+
References: references,
}
// Create the pull comment in the database with the commentAt field
+2 -2
appview/refresolver/resolver.go
···
}
func (r *Resolver) Resolve(ctx context.Context, source string) ([]syntax.DID, []syntax.ATURI) {
-
l := r.logger.With("method", "find_references")
+
l := r.logger.With("method", "Resolve")
rawMentions, rawRefs := markup.FindReferences(r.config.Core.AppviewHost, source)
l.Debug("found possible references", "mentions", rawMentions, "refs", rawRefs)
idents := r.idResolver.ResolveIdents(ctx, rawMentions)
···
rawRef.Handle = string(ident.DID)
resolvedRefs = append(resolvedRefs, rawRef)
}
-
aturiRefs, err := db.FindReferences(r.execer, resolvedRefs)
+
aturiRefs, err := db.ValidateReferenceLinks(r.execer, resolvedRefs)
if err != nil {
l.Error("failed running query", "err", err)
}
+14
lexicons/issue/comment.json
···
"replyTo": {
"type": "string",
"format": "at-uri"
+
},
+
"mentions": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "did"
+
}
+
},
+
"references": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "at-uri"
+
}
}
}
}
+14
lexicons/issue/issue.json
···
"createdAt": {
"type": "string",
"format": "datetime"
+
},
+
"mentions": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "did"
+
}
+
},
+
"references": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "at-uri"
+
}
}
}
}
+14
lexicons/pulls/comment.json
···
"createdAt": {
"type": "string",
"format": "datetime"
+
},
+
"mentions": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "did"
+
}
+
},
+
"references": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "at-uri"
+
}
}
}
}
+14
lexicons/pulls/pull.json
···
"createdAt": {
"type": "string",
"format": "datetime"
+
},
+
"mentions": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "did"
+
}
+
},
+
"references": {
+
"type": "array",
+
"items": {
+
"type": "string",
+
"format": "at-uri"
+
}
}
}
}