lexicons: pulls: add stack information to pull records #405

open
opened by nel.pet targeting master from nel.pet/core: push-kyupnpkvqmsy
Changed files
+306 -20
api
appview
cmd
lexicons
pulls
+202 -1
api/tangled/cbor_gen.go
···
return nil
+
func (t *RepoPull_StackInfo) MarshalCBOR(w io.Writer) error {
+
if t == nil {
+
_, err := w.Write(cbg.CborNull)
+
return err
+
}
+
+
cw := cbg.NewCborWriter(w)
+
fieldCount := 2
+
+
if t.Parent == nil {
+
fieldCount--
+
}
+
+
if _, err := cw.Write(cbg.CborEncodeMajorType(cbg.MajMap, uint64(fieldCount))); err != nil {
+
return err
+
}
+
+
// t.Parent (string) (string)
+
if t.Parent != nil {
+
+
if len("parent") > 1000000 {
+
return xerrors.Errorf("Value in field \"parent\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("parent"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("parent")); err != nil {
+
return err
+
}
+
+
if t.Parent == nil {
+
if _, err := cw.Write(cbg.CborNull); err != nil {
+
return err
+
}
+
} else {
+
if len(*t.Parent) > 1000000 {
+
return xerrors.Errorf("Value in field t.Parent was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(*t.Parent))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(*t.Parent)); err != nil {
+
return err
+
}
+
}
+
}
+
+
// t.ChangeId (string) (string)
+
if len("changeId") > 1000000 {
+
return xerrors.Errorf("Value in field \"changeId\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("changeId"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("changeId")); err != nil {
+
return err
+
}
+
+
if len(t.ChangeId) > 1000000 {
+
return xerrors.Errorf("Value in field t.ChangeId was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.ChangeId))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string(t.ChangeId)); err != nil {
+
return err
+
}
+
return nil
+
}
+
+
func (t *RepoPull_StackInfo) UnmarshalCBOR(r io.Reader) (err error) {
+
*t = RepoPull_StackInfo{}
+
+
cr := cbg.NewCborReader(r)
+
+
maj, extra, err := cr.ReadHeader()
+
if err != nil {
+
return err
+
}
+
defer func() {
+
if err == io.EOF {
+
err = io.ErrUnexpectedEOF
+
}
+
}()
+
+
if maj != cbg.MajMap {
+
return fmt.Errorf("cbor input should be of type map")
+
}
+
+
if extra > cbg.MaxLength {
+
return fmt.Errorf("RepoPull_StackInfo: map struct too large (%d)", extra)
+
}
+
+
n := extra
+
+
nameBuf := make([]byte, 8)
+
for i := uint64(0); i < n; i++ {
+
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
+
if err != nil {
+
return err
+
}
+
+
if !ok {
+
// Field doesn't exist on this type, so ignore it
+
if err := cbg.ScanForLinks(cr, func(cid.Cid) {}); err != nil {
+
return err
+
}
+
continue
+
}
+
+
switch string(nameBuf[:nameLen]) {
+
// t.Parent (string) (string)
+
case "parent":
+
+
{
+
b, err := cr.ReadByte()
+
if err != nil {
+
return err
+
}
+
if b != cbg.CborNull[0] {
+
if err := cr.UnreadByte(); err != nil {
+
return err
+
}
+
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.Parent = (*string)(&sval)
+
}
+
}
+
// t.ChangeId (string) (string)
+
case "changeId":
+
+
{
+
sval, err := cbg.ReadStringWithMax(cr, 1000000)
+
if err != nil {
+
return err
+
}
+
+
t.ChangeId = string(sval)
+
}
+
+
default:
+
// Field doesn't exist on this type, so ignore it
+
if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil {
+
return err
+
}
+
}
+
}
+
+
return nil
+
}
func (t *RepoPull_Target) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
···
cw := cbg.NewCborWriter(w)
-
fieldCount := 7
+
fieldCount := 8
if t.Body == nil {
fieldCount--
···
fieldCount--
+
if t.StackInfo == nil {
+
fieldCount--
+
}
+
if _, err := cw.Write(cbg.CborEncodeMajorType(cbg.MajMap, uint64(fieldCount))); err != nil {
return err
···
if _, err := cw.WriteString(string(t.CreatedAt)); err != nil {
return err
+
+
// t.StackInfo (tangled.RepoPull_StackInfo) (struct)
+
if t.StackInfo != nil {
+
+
if len("stackInfo") > 1000000 {
+
return xerrors.Errorf("Value in field \"stackInfo\" was too long")
+
}
+
+
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("stackInfo"))); err != nil {
+
return err
+
}
+
if _, err := cw.WriteString(string("stackInfo")); err != nil {
+
return err
+
}
+
+
if err := t.StackInfo.MarshalCBOR(cw); err != nil {
+
return err
+
}
+
}
return nil
···
t.CreatedAt = string(sval)
+
// t.StackInfo (tangled.RepoPull_StackInfo) (struct)
+
case "stackInfo":
+
+
{
+
+
b, err := cr.ReadByte()
+
if err != nil {
+
return err
+
}
+
if b != cbg.CborNull[0] {
+
if err := cr.UnreadByte(); err != nil {
+
return err
+
}
+
t.StackInfo = new(RepoPull_StackInfo)
+
if err := t.StackInfo.UnmarshalCBOR(cr); err != nil {
+
return xerrors.Errorf("unmarshaling t.StackInfo pointer: %w", err)
+
}
+
}
+
+
}
default:
// Field doesn't exist on this type, so ignore it
+16 -7
api/tangled/repopull.go
···
} //
// RECORDTYPE: RepoPull
type RepoPull struct {
-
LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull" cborgen:"$type,const=sh.tangled.repo.pull"`
-
Body *string `json:"body,omitempty" cborgen:"body,omitempty"`
-
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
-
Patch string `json:"patch" cborgen:"patch"`
-
Source *RepoPull_Source `json:"source,omitempty" cborgen:"source,omitempty"`
-
Target *RepoPull_Target `json:"target" cborgen:"target"`
-
Title string `json:"title" cborgen:"title"`
+
LexiconTypeID string `json:"$type,const=sh.tangled.repo.pull" cborgen:"$type,const=sh.tangled.repo.pull"`
+
Body *string `json:"body,omitempty" cborgen:"body,omitempty"`
+
CreatedAt string `json:"createdAt" cborgen:"createdAt"`
+
Patch string `json:"patch" cborgen:"patch"`
+
Source *RepoPull_Source `json:"source,omitempty" cborgen:"source,omitempty"`
+
StackInfo *RepoPull_StackInfo `json:"stackInfo,omitempty" cborgen:"stackInfo,omitempty"`
+
Target *RepoPull_Target `json:"target" cborgen:"target"`
+
Title string `json:"title" cborgen:"title"`
}
// RepoPull_Source is a "source" in the sh.tangled.repo.pull schema.
···
Sha string `json:"sha" cborgen:"sha"`
}
+
// RepoPull_StackInfo is a "stackInfo" in the sh.tangled.repo.pull schema.
+
type RepoPull_StackInfo struct {
+
// changeId: Change ID of this commit/change. Principly also available in the patch itself as a line in the commit footer.
+
ChangeId string `json:"changeId" cborgen:"changeId"`
+
// parent: AT-URI of the PR for the parent commit/change in the change stack.
+
Parent *string `json:"parent,omitempty" cborgen:"parent,omitempty"`
+
}
+
// RepoPull_Target is a "target" in the sh.tangled.repo.pull schema.
type RepoPull_Target struct {
Branch string `json:"branch" cborgen:"branch"`
+8
appview/db/db.go
···
return err
})
+
+
runMigration(db, "add-parent-at-for-stacks-to-pulls", func(tx *sql.Tx) error {
+
_, err := tx.Exec(`
+
alter table pulls add column parent_at text;
+
`)
+
return err
+
})
+
return &DB{db}, nil
}
+37 -10
appview/db/pulls.go
···
// stacking
StackId string // nullable string
ChangeId string // nullable string
+
ParentAt *syntax.ATURI
ParentChangeId string // nullable string
// meta
···
}
record := tangled.RepoPull{
-
Title: p.Title,
-
Body: &p.Body,
-
CreatedAt: p.Created.Format(time.RFC3339),
+
Title: p.Title,
+
Body: &p.Body,
+
CreatedAt: p.Created.Format(time.RFC3339),
Target: &tangled.RepoPull_Target{
Repo: p.RepoAt.String(),
Branch: p.TargetBranch,
},
-
Patch: p.LatestPatch(),
-
Source: source,
+
Patch: p.LatestPatch(),
+
Source: source,
+
StackInfo: &tangled.RepoPull_StackInfo{
+
ChangeId: p.ChangeId,
+
Parent: (*string)(p.ParentAt),
+
},
}
return record
}
···
}
}
-
var stackId, changeId, parentChangeId *string
+
var stackId, changeId, parentAt, parentChangeId *string
if pull.StackId != "" {
stackId = &pull.StackId
}
if pull.ChangeId != "" {
changeId = &pull.ChangeId
}
+
if pull.ParentAt != nil {
+
parentAt = (*string)(pull.ParentAt)
+
}
if pull.ParentChangeId != "" {
parentChangeId = &pull.ParentChangeId
}
···
_, err = tx.Exec(
`
insert into pulls (
-
repo_at, owner_did, pull_id, title, target_branch, body, rkey, state, source_branch, source_repo_at, stack_id, change_id, parent_change_id
+
repo_at, owner_did, pull_id, title, target_branch, body, rkey, state, source_branch, source_repo_at, stack_id, change_id, parent_at, parent_change_id
)
-
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
+
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
pull.RepoAt,
pull.OwnerDid,
pull.PullId,
···
sourceRepoAt,
stackId,
changeId,
+
parentAt,
parentChangeId,
)
if err != nil {
···
source_repo_at,
stack_id,
change_id,
+
parent_at,
parent_change_id
from
pulls
···
for rows.Next() {
var pull Pull
var createdAt string
-
var sourceBranch, sourceRepoAt, stackId, changeId, parentChangeId sql.NullString
+
var sourceBranch, sourceRepoAt, stackId, changeId, parentAt, parentChangeId sql.NullString
err := rows.Scan(
&pull.OwnerDid,
&pull.RepoAt,
···
&sourceRepoAt,
&stackId,
&changeId,
+
&parentAt,
&parentChangeId,
)
if err != nil {
···
if changeId.Valid {
pull.ChangeId = changeId.String
}
+
if parentAt.Valid {
+
parentAtParsed, err := syntax.ParseATURI(parentAt.String)
+
if err != nil {
+
return nil, err
+
}
+
pull.ParentAt = &parentAtParsed
+
}
if parentChangeId.Valid {
pull.ParentChangeId = parentChangeId.String
}
···
source_repo_at,
stack_id,
change_id,
+
parent_at,
parent_change_id
from
pulls
···
var pull Pull
var createdAt string
-
var sourceBranch, sourceRepoAt, stackId, changeId, parentChangeId sql.NullString
+
var sourceBranch, sourceRepoAt, stackId, changeId, parentAt, parentChangeId sql.NullString
err := row.Scan(
&pull.OwnerDid,
&pull.PullId,
···
&sourceRepoAt,
&stackId,
&changeId,
+
&parentAt,
&parentChangeId,
)
if err != nil {
···
if changeId.Valid {
pull.ChangeId = changeId.String
}
+
if parentAt.Valid {
+
parsedParentAt, err := syntax.ParseATURI(parentAt.String)
+
if err != nil {
+
return nil, err
+
}
+
pull.ParentAt = &parsedParentAt
+
}
if parentChangeId.Valid {
pull.ParentChangeId = parentChangeId.String
}
+21 -2
appview/pulls/pulls.go
···
newStack, err := newStack(f, user, targetBranch, patch, pull.PullSource, stackId)
if err != nil {
log.Println("failed to create resubmitted stack", err)
-
s.pages.Notice(w, "pull-merge-error", "Failed to merge pull request. Try again later.")
+
s.pages.Notice(w, "pull-resubmit-error", "Failed to merge pull request. Try again later.")
return
// find the diff between the stacks, first, map them by changeId
origById := make(map[string]*db.Pull)
newById := make(map[string]*db.Pull)
+
chIdToAtUri := make(map[string]*syntax.ATURI)
for _, p := range origStack {
origById[p.ChangeId] = p
+
+
// build map from change id to existing at uris (ignore error as it shouldnt be possible here)
+
pAtUri, _ := syntax.ParseATURI(fmt.Sprintf("at://%s/%s/%s", user.Did, tangled.RepoPullNSID, p.Rkey))
+
chIdToAtUri[p.ChangeId] = &pAtUri
for _, p := range newStack {
+
// if change id has already been given a PR use its at uri instead of the newly created (and thus incorrect)
+
// one made by newStack
+
if ppAt, ok := chIdToAtUri[p.ParentChangeId]; ok {
+
p.ParentAt = ppAt
+
}
+
newById[p.ChangeId] = p
···
// we still need to update the hash in submission.Patch and submission.SourceRev
if patchutil.Equal(newFiles, origFiles) &&
origHeader.Title == newHeader.Title &&
-
origHeader.Body == newHeader.Body {
+
origHeader.Body == newHeader.Body &&
+
op.ParentChangeId == np.ParentChangeId {
unchanged[op.ChangeId] = struct{}{}
} else {
updated[op.ChangeId] = struct{}{}
···
record := op.AsRecord()
record.Patch = submission.Patch
+
record.StackInfo.Parent = (*string)(np.ParentAt)
writes = append(writes, &comatproto.RepoApplyWrites_Input_Writes_Elem{
RepoApplyWrites_Update: &comatproto.RepoApplyWrites_Update{
···
// the stack is identified by a UUID
var stack db.Stack
parentChangeId := ""
+
var parentAt *syntax.ATURI = nil
for _, fp := range formatPatches {
// all patches must have a jj change-id
changeId, err := fp.ChangeId()
···
StackId: stackId,
ChangeId: changeId,
+
ParentAt: parentAt,
ParentChangeId: parentChangeId,
stack = append(stack, &pull)
parentChangeId = changeId
+
// this is a bit of an ugly way to create the ATURI but its the best we can do with the data flow here
+
// (igore error as it shouldnt be possible here)
+
parsedParentAt, _ := syntax.ParseATURI(fmt.Sprintf("at://%s/%s/%s", user.Did, tangled.RepoPullNSID, pull.Rkey));
+
parentAt = &parsedParentAt
return stack, nil
+1
cmd/gen.go
···
tangled.RepoIssueState{},
tangled.RepoPull{},
tangled.RepoPullComment{},
+
tangled.RepoPull_StackInfo{},
tangled.RepoPull_Source{},
tangled.RepoPull_Target{},
tangled.RepoPullStatus{},
+21
lexicons/pulls/pull.json
···
"patch": {
"type": "string"
},
+
"stackInfo": {
+
"type": "ref",
+
"ref": "#stackInfo"
+
},
"source": {
"type": "ref",
"ref": "#source"
···
"format": "at-uri"
}
}
+
},
+
"stackInfo": {
+
"type": "object",
+
"required": [
+
"changeId"
+
],
+
"properties": {
+
"changeId": {
+
"type": "string",
+
"description": "Change ID of this commit/change."
+
},
+
"parent": {
+
"type": "string",
+
"description": "AT-URI of the PR for the parent commit/change in the change stack.",
+
"format": "at-uri"
+
}
+
}
}
}
}