workflow,spindle: regenerate api with new lexicon #284

merged
opened by oppi.li targeting master from push-myqmppunmplu
Changed files
+64 -330
api
cmd
spindle
models
workflow
+35 -295
api/tangled/cbor_gen.go
···
return nil
}
-
func (t *Pipeline_Dependencies_Elem) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
return err
···
return nil
}
-
func (t *Pipeline_Dependencies_Elem) UnmarshalCBOR(r io.Reader) (err error) {
-
*t = Pipeline_Dependencies_Elem{}
cr := cbg.NewCborReader(r)
···
}
if extra > cbg.MaxLength {
-
return fmt.Errorf("Pipeline_Dependencies_Elem: map struct too large (%d)", extra)
}
n := extra
···
return err
}
-
// t.Inputs ([]*tangled.Pipeline_ManualTriggerData_Inputs_Elem) (slice)
if t.Inputs != nil {
if len("inputs") > 1000000 {
···
}
switch string(nameBuf[:nameLen]) {
-
// t.Inputs ([]*tangled.Pipeline_ManualTriggerData_Inputs_Elem) (slice)
case "inputs":
maj, extra, err = cr.ReadHeader()
···
}
if extra > 0 {
-
t.Inputs = make([]*Pipeline_ManualTriggerData_Inputs_Elem, extra)
}
for i := 0; i < int(extra); i++ {
···
if err := cr.UnreadByte(); err != nil {
return err
}
-
t.Inputs[i] = new(Pipeline_ManualTriggerData_Inputs_Elem)
if err := t.Inputs[i].UnmarshalCBOR(cr); err != nil {
return xerrors.Errorf("unmarshaling t.Inputs[i] pointer: %w", err)
}
···
return nil
}
-
func (t *Pipeline_ManualTriggerData_Inputs_Elem) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
return err
···
return nil
}
-
func (t *Pipeline_ManualTriggerData_Inputs_Elem) UnmarshalCBOR(r io.Reader) (err error) {
-
*t = Pipeline_ManualTriggerData_Inputs_Elem{}
cr := cbg.NewCborReader(r)
···
}
if extra > cbg.MaxLength {
-
return fmt.Errorf("Pipeline_ManualTriggerData_Inputs_Elem: map struct too large (%d)", extra)
}
n := extra
···
return nil
}
-
-
func (t *Pipeline_Step_Environment_Elem) MarshalCBOR(w io.Writer) error {
-
if t == nil {
-
_, err := w.Write(cbg.CborNull)
-
return err
-
}
-
-
cw := cbg.NewCborWriter(w)
-
-
if _, err := cw.Write([]byte{162}); err != nil {
-
return err
-
}
-
-
// t.Key (string) (string)
-
if len("key") > 1000000 {
-
return xerrors.Errorf("Value in field \"key\" was too long")
-
}
-
-
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("key"))); err != nil {
-
return err
-
}
-
if _, err := cw.WriteString(string("key")); err != nil {
-
return err
-
}
-
-
if len(t.Key) > 1000000 {
-
return xerrors.Errorf("Value in field t.Key was too long")
-
}
-
-
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.Key))); err != nil {
-
return err
-
}
-
if _, err := cw.WriteString(string(t.Key)); err != nil {
-
return err
-
}
-
-
// t.Value (string) (string)
-
if len("value") > 1000000 {
-
return xerrors.Errorf("Value in field \"value\" was too long")
-
}
-
-
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("value"))); err != nil {
-
return err
-
}
-
if _, err := cw.WriteString(string("value")); err != nil {
-
return err
-
}
-
-
if len(t.Value) > 1000000 {
-
return xerrors.Errorf("Value in field t.Value was too long")
-
}
-
-
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.Value))); err != nil {
-
return err
-
}
-
if _, err := cw.WriteString(string(t.Value)); err != nil {
-
return err
-
}
-
return nil
-
}
-
-
func (t *Pipeline_Step_Environment_Elem) UnmarshalCBOR(r io.Reader) (err error) {
-
*t = Pipeline_Step_Environment_Elem{}
-
-
cr := cbg.NewCborReader(r)
-
-
maj, extra, err := cr.ReadHeader()
-
if err != nil {
-
return err
-
}
-
defer func() {
-
if err == io.EOF {
-
err = io.ErrUnexpectedEOF
-
}
-
}()
-
-
if maj != cbg.MajMap {
-
return fmt.Errorf("cbor input should be of type map")
-
}
-
-
if extra > cbg.MaxLength {
-
return fmt.Errorf("Pipeline_Step_Environment_Elem: map struct too large (%d)", extra)
-
}
-
-
n := extra
-
-
nameBuf := make([]byte, 5)
-
for i := uint64(0); i < n; i++ {
-
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
-
if err != nil {
-
return err
-
}
-
-
if !ok {
-
// Field doesn't exist on this type, so ignore it
-
if err := cbg.ScanForLinks(cr, func(cid.Cid) {}); err != nil {
-
return err
-
}
-
continue
-
}
-
-
switch string(nameBuf[:nameLen]) {
-
// t.Key (string) (string)
-
case "key":
-
-
{
-
sval, err := cbg.ReadStringWithMax(cr, 1000000)
-
if err != nil {
-
return err
-
}
-
-
t.Key = string(sval)
-
}
-
// t.Value (string) (string)
-
case "value":
-
-
{
-
sval, err := cbg.ReadStringWithMax(cr, 1000000)
-
if err != nil {
-
return err
-
}
-
-
t.Value = string(sval)
-
}
-
-
default:
-
// Field doesn't exist on this type, so ignore it
-
if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil {
-
return err
-
}
-
}
-
}
-
-
return nil
-
}
func (t *PipelineStatus) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
···
return nil
}
-
func (t *Pipeline_Step) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
···
return err
}
-
// t.Environment ([]*tangled.Pipeline_Step_Environment_Elem) (slice)
if t.Environment != nil {
if len("environment") > 1000000 {
···
t.Command = string(sval)
}
-
// t.Environment ([]*tangled.Pipeline_Step_Environment_Elem) (slice)
case "environment":
maj, extra, err = cr.ReadHeader()
···
}
if extra > 0 {
-
t.Environment = make([]*Pipeline_Step_Environment_Elem, extra)
}
for i := 0; i < int(extra); i++ {
···
if err := cr.UnreadByte(); err != nil {
return err
}
-
t.Environment[i] = new(Pipeline_Step_Environment_Elem)
if err := t.Environment[i].UnmarshalCBOR(cr); err != nil {
return xerrors.Errorf("unmarshaling t.Environment[i] pointer: %w", err)
}
···
}
-
// t.Environment ([]*tangled.Pipeline_Workflow_Environment_Elem) (slice)
if len("environment") > 1000000 {
return xerrors.Errorf("Value in field \"environment\" was too long")
}
···
}
-
// t.Dependencies ([]tangled.Pipeline_Dependencies_Elem) (slice)
if len("dependencies") > 1000000 {
return xerrors.Errorf("Value in field \"dependencies\" was too long")
}
···
}
}
-
// t.Environment ([]*tangled.Pipeline_Workflow_Environment_Elem) (slice)
case "environment":
maj, extra, err = cr.ReadHeader()
···
}
if extra > 0 {
-
t.Environment = make([]*Pipeline_Workflow_Environment_Elem, extra)
}
for i := 0; i < int(extra); i++ {
···
if err := cr.UnreadByte(); err != nil {
return err
}
-
t.Environment[i] = new(Pipeline_Workflow_Environment_Elem)
if err := t.Environment[i].UnmarshalCBOR(cr); err != nil {
return xerrors.Errorf("unmarshaling t.Environment[i] pointer: %w", err)
}
···
}
}
-
// t.Dependencies ([]tangled.Pipeline_Dependencies_Elem) (slice)
case "dependencies":
maj, extra, err = cr.ReadHeader()
···
}
if extra > 0 {
-
t.Dependencies = make([]Pipeline_Dependencies_Elem, extra)
}
for i := 0; i < int(extra); i++ {
···
{
-
if err := t.Dependencies[i].UnmarshalCBOR(cr); err != nil {
-
return xerrors.Errorf("unmarshaling t.Dependencies[i]: %w", err)
}
}
···
return nil
}
-
func (t *Pipeline_Workflow_Environment_Elem) MarshalCBOR(w io.Writer) error {
-
if t == nil {
-
_, err := w.Write(cbg.CborNull)
-
return err
-
}
-
-
cw := cbg.NewCborWriter(w)
-
-
if _, err := cw.Write([]byte{162}); err != nil {
-
return err
-
}
-
-
// t.Key (string) (string)
-
if len("key") > 1000000 {
-
return xerrors.Errorf("Value in field \"key\" was too long")
-
}
-
-
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("key"))); err != nil {
-
return err
-
}
-
if _, err := cw.WriteString(string("key")); err != nil {
-
return err
-
}
-
-
if len(t.Key) > 1000000 {
-
return xerrors.Errorf("Value in field t.Key was too long")
-
}
-
-
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.Key))); err != nil {
-
return err
-
}
-
if _, err := cw.WriteString(string(t.Key)); err != nil {
-
return err
-
}
-
-
// t.Value (string) (string)
-
if len("value") > 1000000 {
-
return xerrors.Errorf("Value in field \"value\" was too long")
-
}
-
-
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len("value"))); err != nil {
-
return err
-
}
-
if _, err := cw.WriteString(string("value")); err != nil {
-
return err
-
}
-
-
if len(t.Value) > 1000000 {
-
return xerrors.Errorf("Value in field t.Value was too long")
-
}
-
-
if err := cw.WriteMajorTypeHeader(cbg.MajTextString, uint64(len(t.Value))); err != nil {
-
return err
-
}
-
if _, err := cw.WriteString(string(t.Value)); err != nil {
-
return err
-
}
-
return nil
-
}
-
-
func (t *Pipeline_Workflow_Environment_Elem) UnmarshalCBOR(r io.Reader) (err error) {
-
*t = Pipeline_Workflow_Environment_Elem{}
-
-
cr := cbg.NewCborReader(r)
-
-
maj, extra, err := cr.ReadHeader()
-
if err != nil {
-
return err
-
}
-
defer func() {
-
if err == io.EOF {
-
err = io.ErrUnexpectedEOF
-
}
-
}()
-
-
if maj != cbg.MajMap {
-
return fmt.Errorf("cbor input should be of type map")
-
}
-
-
if extra > cbg.MaxLength {
-
return fmt.Errorf("Pipeline_Workflow_Environment_Elem: map struct too large (%d)", extra)
-
}
-
-
n := extra
-
-
nameBuf := make([]byte, 5)
-
for i := uint64(0); i < n; i++ {
-
nameLen, ok, err := cbg.ReadFullStringIntoBuf(cr, nameBuf, 1000000)
-
if err != nil {
-
return err
-
}
-
-
if !ok {
-
// Field doesn't exist on this type, so ignore it
-
if err := cbg.ScanForLinks(cr, func(cid.Cid) {}); err != nil {
-
return err
-
}
-
continue
-
}
-
-
switch string(nameBuf[:nameLen]) {
-
// t.Key (string) (string)
-
case "key":
-
-
{
-
sval, err := cbg.ReadStringWithMax(cr, 1000000)
-
if err != nil {
-
return err
-
}
-
-
t.Key = string(sval)
-
}
-
// t.Value (string) (string)
-
case "value":
-
-
{
-
sval, err := cbg.ReadStringWithMax(cr, 1000000)
-
if err != nil {
-
return err
-
}
-
-
t.Value = string(sval)
-
}
-
-
default:
-
// Field doesn't exist on this type, so ignore it
-
if err := cbg.ScanForLinks(r, func(cid.Cid) {}); err != nil {
-
return err
-
}
-
}
-
}
-
-
return nil
-
}
func (t *PublicKey) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
···
return nil
}
+
func (t *Pipeline_Dependency) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
return err
···
return nil
}
+
func (t *Pipeline_Dependency) UnmarshalCBOR(r io.Reader) (err error) {
+
*t = Pipeline_Dependency{}
cr := cbg.NewCborReader(r)
···
}
if extra > cbg.MaxLength {
+
return fmt.Errorf("Pipeline_Dependency: map struct too large (%d)", extra)
}
n := extra
···
return err
}
+
// t.Inputs ([]*tangled.Pipeline_Pair) (slice)
if t.Inputs != nil {
if len("inputs") > 1000000 {
···
}
switch string(nameBuf[:nameLen]) {
+
// t.Inputs ([]*tangled.Pipeline_Pair) (slice)
case "inputs":
maj, extra, err = cr.ReadHeader()
···
}
if extra > 0 {
+
t.Inputs = make([]*Pipeline_Pair, extra)
}
for i := 0; i < int(extra); i++ {
···
if err := cr.UnreadByte(); err != nil {
return err
}
+
t.Inputs[i] = new(Pipeline_Pair)
if err := t.Inputs[i].UnmarshalCBOR(cr); err != nil {
return xerrors.Errorf("unmarshaling t.Inputs[i] pointer: %w", err)
}
···
return nil
}
+
func (t *Pipeline_Pair) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
return err
···
return nil
}
+
func (t *Pipeline_Pair) UnmarshalCBOR(r io.Reader) (err error) {
+
*t = Pipeline_Pair{}
cr := cbg.NewCborReader(r)
···
}
if extra > cbg.MaxLength {
+
return fmt.Errorf("Pipeline_Pair: map struct too large (%d)", extra)
}
n := extra
···
return nil
}
func (t *PipelineStatus) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
···
return nil
}
func (t *Pipeline_Step) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
···
return err
}
+
// t.Environment ([]*tangled.Pipeline_Pair) (slice)
if t.Environment != nil {
if len("environment") > 1000000 {
···
t.Command = string(sval)
}
+
// t.Environment ([]*tangled.Pipeline_Pair) (slice)
case "environment":
maj, extra, err = cr.ReadHeader()
···
}
if extra > 0 {
+
t.Environment = make([]*Pipeline_Pair, extra)
}
for i := 0; i < int(extra); i++ {
···
if err := cr.UnreadByte(); err != nil {
return err
}
+
t.Environment[i] = new(Pipeline_Pair)
if err := t.Environment[i].UnmarshalCBOR(cr); err != nil {
return xerrors.Errorf("unmarshaling t.Environment[i] pointer: %w", err)
}
···
}
+
// t.Environment ([]*tangled.Pipeline_Pair) (slice)
if len("environment") > 1000000 {
return xerrors.Errorf("Value in field \"environment\" was too long")
}
···
}
+
// t.Dependencies ([]*tangled.Pipeline_Dependency) (slice)
if len("dependencies") > 1000000 {
return xerrors.Errorf("Value in field \"dependencies\" was too long")
}
···
}
}
+
// t.Environment ([]*tangled.Pipeline_Pair) (slice)
case "environment":
maj, extra, err = cr.ReadHeader()
···
}
if extra > 0 {
+
t.Environment = make([]*Pipeline_Pair, extra)
}
for i := 0; i < int(extra); i++ {
···
if err := cr.UnreadByte(); err != nil {
return err
}
+
t.Environment[i] = new(Pipeline_Pair)
if err := t.Environment[i].UnmarshalCBOR(cr); err != nil {
return xerrors.Errorf("unmarshaling t.Environment[i] pointer: %w", err)
}
···
}
}
+
// t.Dependencies ([]*tangled.Pipeline_Dependency) (slice)
case "dependencies":
maj, extra, err = cr.ReadHeader()
···
}
if extra > 0 {
+
t.Dependencies = make([]*Pipeline_Dependency, extra)
}
for i := 0; i < int(extra); i++ {
···
{
+
b, err := cr.ReadByte()
+
if err != nil {
+
return err
+
}
+
if b != cbg.CborNull[0] {
+
if err := cr.UnreadByte(); err != nil {
+
return err
+
}
+
t.Dependencies[i] = new(Pipeline_Dependency)
+
if err := t.Dependencies[i].UnmarshalCBOR(cr); err != nil {
+
return xerrors.Errorf("unmarshaling t.Dependencies[i] pointer: %w", err)
+
}
}
}
···
return nil
}
func (t *PublicKey) MarshalCBOR(w io.Writer) error {
if t == nil {
_, err := w.Write(cbg.CborNull)
+13 -21
api/tangled/tangledpipeline.go
···
Submodules bool `json:"submodules" cborgen:"submodules"`
}
-
type Pipeline_Dependencies_Elem struct {
Packages []string `json:"packages" cborgen:"packages"`
Registry string `json:"registry" cborgen:"registry"`
}
// Pipeline_ManualTriggerData is a "manualTriggerData" in the sh.tangled.pipeline schema.
type Pipeline_ManualTriggerData struct {
-
Inputs []*Pipeline_ManualTriggerData_Inputs_Elem `json:"inputs,omitempty" cborgen:"inputs,omitempty"`
}
-
type Pipeline_ManualTriggerData_Inputs_Elem struct {
Key string `json:"key" cborgen:"key"`
Value string `json:"value" cborgen:"value"`
}
···
// Pipeline_Step is a "step" in the sh.tangled.pipeline schema.
type Pipeline_Step struct {
-
Command string `json:"command" cborgen:"command"`
-
Environment []*Pipeline_Step_Environment_Elem `json:"environment,omitempty" cborgen:"environment,omitempty"`
-
Name string `json:"name" cborgen:"name"`
-
}
-
-
type Pipeline_Step_Environment_Elem struct {
-
Key string `json:"key" cborgen:"key"`
-
Value string `json:"value" cborgen:"value"`
}
// Pipeline_TriggerMetadata is a "triggerMetadata" in the sh.tangled.pipeline schema.
···
// Pipeline_Workflow is a "workflow" in the sh.tangled.pipeline schema.
type Pipeline_Workflow struct {
-
Clone *Pipeline_CloneOpts `json:"clone" cborgen:"clone"`
-
Dependencies []Pipeline_Dependencies_Elem `json:"dependencies" cborgen:"dependencies"`
-
Environment []*Pipeline_Workflow_Environment_Elem `json:"environment" cborgen:"environment"`
-
Name string `json:"name" cborgen:"name"`
-
Steps []*Pipeline_Step `json:"steps" cborgen:"steps"`
-
}
-
-
type Pipeline_Workflow_Environment_Elem struct {
-
Key string `json:"key" cborgen:"key"`
-
Value string `json:"value" cborgen:"value"`
}
···
Submodules bool `json:"submodules" cborgen:"submodules"`
}
+
// Pipeline_Dependency is a "dependency" in the sh.tangled.pipeline schema.
+
type Pipeline_Dependency struct {
Packages []string `json:"packages" cborgen:"packages"`
Registry string `json:"registry" cborgen:"registry"`
}
// Pipeline_ManualTriggerData is a "manualTriggerData" in the sh.tangled.pipeline schema.
type Pipeline_ManualTriggerData struct {
+
Inputs []*Pipeline_Pair `json:"inputs,omitempty" cborgen:"inputs,omitempty"`
}
+
// Pipeline_Pair is a "pair" in the sh.tangled.pipeline schema.
+
type Pipeline_Pair struct {
Key string `json:"key" cborgen:"key"`
Value string `json:"value" cborgen:"value"`
}
···
// Pipeline_Step is a "step" in the sh.tangled.pipeline schema.
type Pipeline_Step struct {
+
Command string `json:"command" cborgen:"command"`
+
Environment []*Pipeline_Pair `json:"environment,omitempty" cborgen:"environment,omitempty"`
+
Name string `json:"name" cborgen:"name"`
}
// Pipeline_TriggerMetadata is a "triggerMetadata" in the sh.tangled.pipeline schema.
···
// Pipeline_Workflow is a "workflow" in the sh.tangled.pipeline schema.
type Pipeline_Workflow struct {
+
Clone *Pipeline_CloneOpts `json:"clone" cborgen:"clone"`
+
Dependencies []*Pipeline_Dependency `json:"dependencies" cborgen:"dependencies"`
+
Environment []*Pipeline_Pair `json:"environment" cborgen:"environment"`
+
Name string `json:"name" cborgen:"name"`
+
Steps []*Pipeline_Step `json:"steps" cborgen:"steps"`
}
+2 -4
cmd/gen.go
···
tangled.KnotMember{},
tangled.Pipeline{},
tangled.Pipeline_CloneOpts{},
-
tangled.Pipeline_Dependencies_Elem{},
tangled.Pipeline_ManualTriggerData{},
-
tangled.Pipeline_ManualTriggerData_Inputs_Elem{},
tangled.Pipeline_PullRequestTriggerData{},
tangled.Pipeline_PushTriggerData{},
-
tangled.Pipeline_Step_Environment_Elem{},
tangled.PipelineStatus{},
tangled.Pipeline_Step{},
tangled.Pipeline_TriggerMetadata{},
tangled.Pipeline_TriggerRepo{},
tangled.Pipeline_Workflow{},
-
tangled.Pipeline_Workflow_Environment_Elem{},
tangled.PublicKey{},
tangled.Repo{},
tangled.RepoArtifact{},
···
tangled.KnotMember{},
tangled.Pipeline{},
tangled.Pipeline_CloneOpts{},
+
tangled.Pipeline_Dependency{},
tangled.Pipeline_ManualTriggerData{},
+
tangled.Pipeline_Pair{},
tangled.Pipeline_PullRequestTriggerData{},
tangled.Pipeline_PushTriggerData{},
tangled.PipelineStatus{},
tangled.Pipeline_Step{},
tangled.Pipeline_TriggerMetadata{},
tangled.Pipeline_TriggerRepo{},
tangled.Pipeline_Workflow{},
tangled.PublicKey{},
tangled.Repo{},
tangled.RepoArtifact{},
+9 -5
spindle/models/pipeline.go
···
return &Pipeline{Workflows: workflows}
}
-
func workflowEnvToMap(envs []*tangled.Pipeline_Workflow_Environment_Elem) map[string]string {
envMap := map[string]string{}
for _, env := range envs {
-
envMap[env.Key] = env.Value
}
return envMap
}
-
func stepEnvToMap(envs []*tangled.Pipeline_Step_Environment_Elem) map[string]string {
envMap := map[string]string{}
for _, env := range envs {
-
envMap[env.Key] = env.Value
}
return envMap
}
-
func workflowImage(deps []tangled.Pipeline_Dependencies_Elem, nixery string) string {
var dependencies string
for _, d := range deps {
if d.Registry == "nixpkgs" {
···
return &Pipeline{Workflows: workflows}
}
+
func workflowEnvToMap(envs []*tangled.Pipeline_Pair) map[string]string {
envMap := map[string]string{}
for _, env := range envs {
+
if env != nil {
+
envMap[env.Key] = env.Value
+
}
}
return envMap
}
+
func stepEnvToMap(envs []*tangled.Pipeline_Pair) map[string]string {
envMap := map[string]string{}
for _, env := range envs {
+
if env != nil {
+
envMap[env.Key] = env.Value
+
}
}
return envMap
}
+
func workflowImage(deps []*tangled.Pipeline_Dependency, nixery string) string {
var dependencies string
for _, d := range deps {
if d.Registry == "nixpkgs" {
+2 -2
workflow/compile.go
···
Name: s.Name,
}
for k, v := range s.Environment {
-
e := &tangled.Pipeline_Step_Environment_Elem{
Key: k,
Value: v,
}
···
cw.Steps = append(cw.Steps, &step)
}
for k, v := range w.Environment {
-
e := &tangled.Pipeline_Workflow_Environment_Elem{
Key: k,
Value: v,
}
···
Name: s.Name,
}
for k, v := range s.Environment {
+
e := &tangled.Pipeline_Pair{
Key: k,
Value: v,
}
···
cw.Steps = append(cw.Steps, &step)
}
for k, v := range w.Environment {
+
e := &tangled.Pipeline_Pair{
Key: k,
Value: v,
}
+3 -3
workflow/def.go
···
}
// conversion utilities to atproto records
-
func (d Dependencies) AsRecord() []tangled.Pipeline_Dependencies_Elem {
-
var deps []tangled.Pipeline_Dependencies_Elem
for registry, packages := range d {
-
deps = append(deps, tangled.Pipeline_Dependencies_Elem{
Registry: registry,
Packages: packages,
})
···
}
// conversion utilities to atproto records
+
func (d Dependencies) AsRecord() []*tangled.Pipeline_Dependency {
+
var deps []*tangled.Pipeline_Dependency
for registry, packages := range d {
+
deps = append(deps, &tangled.Pipeline_Dependency{
Registry: registry,
Packages: packages,
})