Frequently Asked Questions (FAQ)

Questions

How to create an entity from a struct T?
How to create a struct (or a mutation) level validator?
How to write an audit-log extension?
How to write custom predicates?
How to add custom predicates to the codegen assets?
How to define a network address field in PostgreSQL?
How to customize time fields to type DATETIME in MySQL?
How to use a custom generator of IDs?
How to use a custom XID globally unique ID?
How to define a spatial data type field in MySQL?
How to extend the generated models?
How to extend the generated builders?
How to store Protobuf objects in a BLOB column?
How to add CHECK constraints to table?
How to define a custom precision numeric field?
How to configure two or more DB to separate read and write?
How to change the character set and/or collation of a MySQL table?
How to configure json.Marshal to inline the edges keys in the top level object?

Answers

How to create an entity from a struct T?

The different builders don’t support the option of setting the entity fields (or edges) from a given struct T. The reason is that there’s no way to distinguish between zero/real values when updating the database (for example, &ent.T{Age: 0, Name: ""}). Setting these values, may set incorrect values in the database or update unnecessary columns.

However, the external template option lets you extend the default code-generation assets by adding custom logic. For example, in order to generate a method for each of the create-builders, that accepts a struct as an input and configure the builder, use the following template:

  1. {{ range $n := $.Nodes }}
  2. {{ $builder := $n.CreateName }}
  3. {{ $receiver := receiver $builder }}
  4. func ({{ $receiver }} *{{ $builder }}) Set{{ $n.Name }}(input *{{ $n.Name }}) *{{ $builder }} {
  5. {{- range $f := $n.Fields }}
  6. {{- $setter := print "Set" $f.StructField }}
  7. {{ $receiver }}.{{ $setter }}(input.{{ $f.StructField }})
  8. {{- end }}
  9. return {{ $receiver }}
  10. }
  11. {{ end }}

How to create a mutation level validator?

In order to implement a mutation-level validator, you can either use schema hooks for validating changes applied on one entity type, or use transaction hooks for validating mutations that being applied on multiple entity types (e.g. a GraphQL mutation). For example:

  1. // A VersionHook is a dummy example for a hook that validates the "version" field
  2. // is incremented by 1 on each update. Note that this is just a dummy example, and
  3. // it doesn't promise consistency in the database.
  4. func VersionHook() ent.Hook {
  5. type OldSetVersion interface {
  6. SetVersion(int)
  7. Version() (int, bool)
  8. OldVersion(context.Context) (int, error)
  9. }
  10. return func(next ent.Mutator) ent.Mutator {
  11. return ent.MutateFunc(func(ctx context.Context, m ent.Mutation) (ent.Value, error) {
  12. ver, ok := m.(OldSetVersion)
  13. if !ok {
  14. return next.Mutate(ctx, m)
  15. }
  16. oldV, err := ver.OldVersion(ctx)
  17. if err != nil {
  18. return nil, err
  19. }
  20. curV, exists := ver.Version()
  21. if !exists {
  22. return nil, fmt.Errorf("version field is required in update mutation")
  23. }
  24. if curV != oldV+1 {
  25. return nil, fmt.Errorf("version field must be incremented by 1")
  26. }
  27. // Add an SQL predicate that validates the "version" column is equal
  28. // to "oldV" (ensure it wasn't changed during the mutation by others).
  29. return next.Mutate(ctx, m)
  30. })
  31. }
  32. }

How to write an audit-log extension?

The preferred way for writing such an extension is to use ent.Mixin. Use the Fields option for setting the fields that are shared between all schemas that import the mixed-schema, and use the Hooks option for attaching a mutation-hook for all mutations that are being applied on these schemas. Here’s an example, based on a discussion in the repository issue-tracker:

  1. // AuditMixin implements the ent.Mixin for sharing
  2. // audit-log capabilities with package schemas.
  3. type AuditMixin struct{
  4. mixin.Schema
  5. }
  6. // Fields of the AuditMixin.
  7. func (AuditMixin) Fields() []ent.Field {
  8. return []ent.Field{
  9. field.Time("created_at").
  10. Immutable().
  11. Default(time.Now),
  12. field.Int("created_by").
  13. Optional(),
  14. field.Time("updated_at").
  15. Default(time.Now).
  16. UpdateDefault(time.Now),
  17. field.Int("updated_by").
  18. Optional(),
  19. }
  20. }
  21. // Hooks of the AuditMixin.
  22. func (AuditMixin) Hooks() []ent.Hook {
  23. return []ent.Hook{
  24. hooks.AuditHook,
  25. }
  26. }
  27. // A AuditHook is an example for audit-log hook.
  28. func AuditHook(next ent.Mutator) ent.Mutator {
  29. // AuditLogger wraps the methods that are shared between all mutations of
  30. // schemas that embed the AuditLog mixin. The variable "exists" is true, if
  31. // the field already exists in the mutation (e.g. was set by a different hook).
  32. type AuditLogger interface {
  33. SetCreatedAt(time.Time)
  34. CreatedAt() (value time.Time, exists bool)
  35. SetCreatedBy(int)
  36. CreatedBy() (id int, exists bool)
  37. SetUpdatedAt(time.Time)
  38. UpdatedAt() (value time.Time, exists bool)
  39. SetUpdatedBy(int)
  40. UpdatedBy() (id int, exists bool)
  41. }
  42. return ent.MutateFunc(func(ctx context.Context, m ent.Mutation) (ent.Value, error) {
  43. ml, ok := m.(AuditLogger)
  44. if !ok {
  45. return nil, fmt.Errorf("unexpected audit-log call from mutation type %T", m)
  46. }
  47. usr, err := viewer.UserFromContext(ctx)
  48. if err != nil {
  49. return nil, err
  50. }
  51. switch op := m.Op(); {
  52. case op.Is(ent.OpCreate):
  53. ml.SetCreatedAt(time.Now())
  54. if _, exists := ml.CreatedBy(); !exists {
  55. ml.SetCreatedBy(usr.ID)
  56. }
  57. case op.Is(ent.OpUpdateOne | ent.OpUpdate):
  58. ml.SetUpdatedAt(time.Now())
  59. if _, exists := ml.UpdatedBy(); !exists {
  60. ml.SetUpdatedBy(usr.ID)
  61. }
  62. }
  63. return next.Mutate(ctx, m)
  64. })
  65. }

How to write custom predicates?

Users can provide custom predicates to apply on the query before it’s executed. For example:

  1. pets := client.Pet.
  2. Query().
  3. Where(predicate.Pet(func(s *sql.Selector) {
  4. s.Where(sql.InInts(pet.OwnerColumn, 1, 2, 3))
  5. })).
  6. AllX(ctx)
  7. users := client.User.
  8. Query().
  9. Where(predicate.User(func(s *sql.Selector) {
  10. s.Where(sqljson.ValueContains(user.FieldTags, "tag"))
  11. })).
  12. AllX(ctx)

For more examples, go to the predicates page, or search in the repository issue-tracker for more advance examples like issue-842.

How to add custom predicates to the codegen assets?

The template option enables the capability for extending or overriding the default codegen assets. In order to generate a type-safe predicate for the example above, use the template option for doing it as follows:

  1. {{/* A template that adds the "<F>Glob" predicate for all string fields. */}}
  2. {{ define "where/additional/strings" }}
  3. {{ range $f := $.Fields }}
  4. {{ if $f.IsString }}
  5. {{ $func := print $f.StructField "Glob" }}
  6. // {{ $func }} applies the Glob predicate on the {{ quote $f.Name }} field.
  7. func {{ $func }}(pattern string) predicate.{{ $.Name }} {
  8. return predicate.{{ $.Name }}(func(s *sql.Selector) {
  9. s.Where(sql.P(func(b *sql.Builder) {
  10. b.Ident(s.C({{ $f.Constant }})).WriteString(" glob" ).Arg(pattern)
  11. }))
  12. })
  13. }
  14. {{ end }}
  15. {{ end }}
  16. {{ end }}

How to define a network address field in PostgreSQL?

The GoType and the SchemaType options allow users to define database-specific fields. For example, in order to define a macaddr field, use the following configuration:

  1. func (T) Fields() []ent.Field {
  2. return []ent.Field{
  3. field.String("mac").
  4. GoType(&MAC{}).
  5. SchemaType(map[string]string{
  6. dialect.Postgres: "macaddr",
  7. }).
  8. Validate(func(s string) error {
  9. _, err := net.ParseMAC(s)
  10. return err
  11. }),
  12. }
  13. }
  14. // MAC represents a physical hardware address.
  15. type MAC struct {
  16. net.HardwareAddr
  17. }
  18. // Scan implements the Scanner interface.
  19. func (m *MAC) Scan(value any) (err error) {
  20. switch v := value.(type) {
  21. case nil:
  22. case []byte:
  23. m.HardwareAddr, err = net.ParseMAC(string(v))
  24. case string:
  25. m.HardwareAddr, err = net.ParseMAC(v)
  26. default:
  27. err = fmt.Errorf("unexpected type %T", v)
  28. }
  29. return
  30. }
  31. // Value implements the driver Valuer interface.
  32. func (m MAC) Value() (driver.Value, error) {
  33. return m.HardwareAddr.String(), nil
  34. }

Note that, if the database doesn’t support the macaddr type (e.g. SQLite on testing), the field fallback to its native type (i.e. string).

inet example:

  1. func (T) Fields() []ent.Field {
  2. return []ent.Field{
  3. field.String("ip").
  4. GoType(&Inet{}).
  5. SchemaType(map[string]string{
  6. dialect.Postgres: "inet",
  7. }).
  8. Validate(func(s string) error {
  9. if net.ParseIP(s) == nil {
  10. return fmt.Errorf("invalid value for ip %q", s)
  11. }
  12. return nil
  13. }),
  14. }
  15. }
  16. // Inet represents a single IP address
  17. type Inet struct {
  18. net.IP
  19. }
  20. // Scan implements the Scanner interface
  21. func (i *Inet) Scan(value any) (err error) {
  22. switch v := value.(type) {
  23. case nil:
  24. case []byte:
  25. if i.IP = net.ParseIP(string(v)); i.IP == nil {
  26. err = fmt.Errorf("invalid value for ip %q", v)
  27. }
  28. case string:
  29. if i.IP = net.ParseIP(v); i.IP == nil {
  30. err = fmt.Errorf("invalid value for ip %q", v)
  31. }
  32. default:
  33. err = fmt.Errorf("unexpected type %T", v)
  34. }
  35. return
  36. }
  37. // Value implements the driver Valuer interface
  38. func (i Inet) Value() (driver.Value, error) {
  39. return i.IP.String(), nil
  40. }

How to customize time fields to type DATETIME in MySQL?

Time fields use the MySQL TIMESTAMP type in the schema creation by default, and this type has a range of ‘1970-01-01 00:00:01’ UTC to ‘2038-01-19 03:14:07’ UTC (see, MySQL docs).

In order to customize time fields for a wider range, use the MySQL DATETIME as follows:

  1. field.Time("birth_date").
  2. Optional().
  3. SchemaType(map[string]string{
  4. dialect.MySQL: "datetime",
  5. }),

How to use a custom generator of IDs?

If you’re using a custom ID generator instead of using auto-incrementing IDs in your database (e.g. Twitter’s Snowflake), you will need to write a custom ID field which automatically calls the generator on resource creation.

To achieve this, you can either make use of DefaultFunc or of schema hooks - depending on your use case. If the generator does not return an error, DefaultFunc is more concise, whereas setting a hook on resource creation will allow you to capture errors as well. An example of how to use DefaultFunc can be seen in the section regarding the ID field.

Here is an example of how to use a custom generator with hooks, taking as an example sonyflake.

  1. // BaseMixin to be shared will all different schemas.
  2. type BaseMixin struct {
  3. mixin.Schema
  4. }
  5. // Fields of the Mixin.
  6. func (BaseMixin) Fields() []ent.Field {
  7. return []ent.Field{
  8. field.Uint64("id"),
  9. }
  10. }
  11. // Hooks of the Mixin.
  12. func (BaseMixin) Hooks() []ent.Hook {
  13. return []ent.Hook{
  14. hook.On(IDHook(), ent.OpCreate),
  15. }
  16. }
  17. func IDHook() ent.Hook {
  18. sf := sonyflake.NewSonyflake(sonyflake.Settings{})
  19. type IDSetter interface {
  20. SetID(uint64)
  21. }
  22. return func(next ent.Mutator) ent.Mutator {
  23. return ent.MutateFunc(func(ctx context.Context, m ent.Mutation) (ent.Value, error) {
  24. is, ok := m.(IDSetter)
  25. if !ok {
  26. return nil, fmt.Errorf("unexpected mutation %T", m)
  27. }
  28. id, err := sf.NextID()
  29. if err != nil {
  30. return nil, err
  31. }
  32. is.SetID(id)
  33. return next.Mutate(ctx, m)
  34. })
  35. }
  36. }
  37. // User holds the schema definition for the User entity.
  38. type User struct {
  39. ent.Schema
  40. }
  41. // Mixin of the User.
  42. func (User) Mixin() []ent.Mixin {
  43. return []ent.Mixin{
  44. // Embed the BaseMixin in the user schema.
  45. BaseMixin{},
  46. }
  47. }

How to use a custom XID globally unique ID?

Package xid is a globally unique ID generator library that uses the Mongo Object ID algorithm to generate a 12 byte, 20 character ID with no configuration. The xid package comes with database/sql sql.Scanner and driver.Valuer interfaces required by Ent for serialization.

To store an XID in any string field use the GoType schema configuration:

  1. // Fields of type T.
  2. func (T) Fields() []ent.Field {
  3. return []ent.Field{
  4. field.String("id").
  5. GoType(xid.ID{}).
  6. DefaultFunc(xid.New),
  7. }
  8. }

Or as a reusable Mixin across multiple schemas:

  1. package schema
  2. import (
  3. "entgo.io/ent"
  4. "entgo.io/ent/schema/field"
  5. "entgo.io/ent/schema/mixin"
  6. "github.com/rs/xid"
  7. )
  8. // BaseMixin to be shared will all different schemas.
  9. type BaseMixin struct {
  10. mixin.Schema
  11. }
  12. // Fields of the User.
  13. func (BaseMixin) Fields() []ent.Field {
  14. return []ent.Field{
  15. field.String("id").
  16. GoType(xid.ID{}).
  17. DefaultFunc(xid.New),
  18. }
  19. }
  20. // User holds the schema definition for the User entity.
  21. type User struct {
  22. ent.Schema
  23. }
  24. // Mixin of the User.
  25. func (User) Mixin() []ent.Mixin {
  26. return []ent.Mixin{
  27. // Embed the BaseMixin in the user schema.
  28. BaseMixin{},
  29. }
  30. }

In order to use extended identifiers (XIDs) with gqlgen, follow the configuration mentioned in the issue tracker.

How to define a spatial data type field in MySQL?

The GoType and the SchemaType options allow users to define database-specific fields. For example, in order to define a POINT field, use the following configuration:

  1. // Fields of the Location.
  2. func (Location) Fields() []ent.Field {
  3. return []ent.Field{
  4. field.String("name"),
  5. field.Other("coords", &Point{}).
  6. SchemaType(Point{}.SchemaType()),
  7. }
  8. }
  1. package schema
  2. import (
  3. "database/sql/driver"
  4. "fmt"
  5. "entgo.io/ent/dialect"
  6. "entgo.io/ent/dialect/sql"
  7. "github.com/paulmach/orb"
  8. "github.com/paulmach/orb/encoding/wkb"
  9. )
  10. // A Point consists of (X,Y) or (Lat, Lon) coordinates
  11. // and it is stored in MySQL the POINT spatial data type.
  12. type Point [2]float64
  13. // Scan implements the Scanner interface.
  14. func (p *Point) Scan(value any) error {
  15. bin, ok := value.([]byte)
  16. if !ok {
  17. return fmt.Errorf("invalid binary value for point")
  18. }
  19. var op orb.Point
  20. if err := wkb.Scanner(&op).Scan(bin[4:]); err != nil {
  21. return err
  22. }
  23. p[0], p[1] = op.X(), op.Y()
  24. return nil
  25. }
  26. // Value implements the driver Valuer interface.
  27. func (p Point) Value() (driver.Value, error) {
  28. op := orb.Point{p[0], p[1]}
  29. return wkb.Value(op).Value()
  30. }
  31. // FormatParam implements the sql.ParamFormatter interface to tell the SQL
  32. // builder that the placeholder for a Point parameter needs to be formatted.
  33. func (p Point) FormatParam(placeholder string, info *sql.StmtInfo) string {
  34. if info.Dialect == dialect.MySQL {
  35. return "ST_GeomFromWKB(" + placeholder + ")"
  36. }
  37. return placeholder
  38. }
  39. // SchemaType defines the schema-type of the Point object.
  40. func (Point) SchemaType() map[string]string {
  41. return map[string]string{
  42. dialect.MySQL: "POINT",
  43. }
  44. }

A full example exists in the example repository.

How to extend the generated models?

Ent supports extending generated types (both global types and models) using custom templates. For example, in order to add additional struct fields or methods to the generated model, we can override the model/fields/additional template like in this example.

If your custom fields/methods require additional imports, you can add those imports using custom templates as well:

  1. {{- define "import/additional/field_types" -}}
  2. "github.com/path/to/your/custom/type"
  3. {{- end -}}
  4. {{- define "import/additional/client_dependencies" -}}
  5. "github.com/path/to/your/custom/type"
  6. {{- end -}}

How to extend the generated builders?

See the Injecting External Dependencies section, or follow the example on GitHub.

How to store Protobuf objects in a BLOB column?

Assuming we have a Protobuf message defined:

  1. syntax = "proto3";
  2. package pb;
  3. option go_package = "project/pb";
  4. message Hi {
  5. string Greeting = 1;
  6. }

We add receiver methods to the generated protobuf struct such that it implements ValueScanner

  1. func (x *Hi) Value() (driver.Value, error) {
  2. return proto.Marshal(x)
  3. }
  4. func (x *Hi) Scan(src any) error {
  5. if src == nil {
  6. return nil
  7. }
  8. if b, ok := src.([]byte); ok {
  9. if err := proto.Unmarshal(b, x); err != nil {
  10. return err
  11. }
  12. return nil
  13. }
  14. return fmt.Errorf("unexpected type %T", src)
  15. }

We add a new field.Bytes to our schema, setting the generated protobuf struct as its underlying GoType:

  1. // Fields of the Message.
  2. func (Message) Fields() []ent.Field {
  3. return []ent.Field{
  4. field.Bytes("hi").
  5. GoType(&pb.Hi{}),
  6. }
  7. }

Test that it works:

  1. package main
  2. import (
  3. "context"
  4. "testing"
  5. "project/ent/enttest"
  6. "project/pb"
  7. _ "github.com/mattn/go-sqlite3"
  8. "github.com/stretchr/testify/require"
  9. )
  10. func TestMain(t *testing.T) {
  11. client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1")
  12. defer client.Close()
  13. msg := client.Message.Create().
  14. SetHi(&pb.Hi{
  15. Greeting: "hello",
  16. }).
  17. SaveX(context.TODO())
  18. ret := client.Message.GetX(context.TODO(), msg.ID)
  19. require.Equal(t, "hello", ret.Hi.Greeting)
  20. }

How to add CHECK constraints to table?

The entsql.Annotation option allows adding custom CHECK constraints to the CREATE TABLE statement. In order to add CHECK constraints to your schema, use the following example:

  1. func (User) Annotations() []schema.Annotation {
  2. return []schema.Annotation{
  3. &entsql.Annotation{
  4. // The `Check` option allows adding an
  5. // unnamed CHECK constraint to table DDL.
  6. Check: "website <> 'entgo.io'",
  7. // The `Checks` option allows adding multiple CHECK constraints
  8. // to table creation. The keys are used as the constraint names.
  9. Checks: map[string]string{
  10. "valid_nickname": "nickname <> firstname",
  11. "valid_firstname": "length(first_name) > 1",
  12. },
  13. },
  14. }
  15. }

How to define a custom precision numeric field?

Using GoType and SchemaType it is possible to define custom precision numeric fields. For example, defining a field that uses big.Int.

  1. func (T) Fields() []ent.Field {
  2. return []ent.Field{
  3. field.Int("precise").
  4. GoType(new(BigInt)).
  5. SchemaType(map[string]string{
  6. dialect.SQLite: "numeric(78, 0)",
  7. dialect.Postgres: "numeric(78, 0)",
  8. }),
  9. }
  10. }
  11. type BigInt struct {
  12. big.Int
  13. }
  14. func (b *BigInt) Scan(src any) error {
  15. var i sql.NullString
  16. if err := i.Scan(src); err != nil {
  17. return err
  18. }
  19. if !i.Valid {
  20. return nil
  21. }
  22. if _, ok := b.Int.SetString(i.String, 10); ok {
  23. return nil
  24. }
  25. return fmt.Errorf("could not scan type %T with value %v into BigInt", src, src)
  26. }
  27. func (b *BigInt) Value() (driver.Value, error) {
  28. return b.String(), nil
  29. }

How to configure two or more DB to separate read and write?

You can wrap the dialect.Driver with your own driver and implement this logic. For example.

You can extend it, add support for multiple read replicas and add some load-balancing magic.

  1. func main() {
  2. // ...
  3. wd, err := sql.Open(dialect.MySQL, "root:pass@tcp(<addr>)/<database>?parseTime=True")
  4. if err != nil {
  5. log.Fatal(err)
  6. }
  7. rd, err := sql.Open(dialect.MySQL, "readonly:pass@tcp(<addr>)/<database>?parseTime=True")
  8. if err != nil {
  9. log.Fatal(err)
  10. }
  11. client := ent.NewClient(ent.Driver(&multiDriver{w: wd, r: rd}))
  12. defer client.Close()
  13. // Use the client here.
  14. }
  15. type multiDriver struct {
  16. r, w dialect.Driver
  17. }
  18. var _ dialect.Driver = (*multiDriver)(nil)
  19. func (d *multiDriver) Query(ctx context.Context, query string, args, v any) error {
  20. e := d.r
  21. // Mutation statements that use the RETURNING clause.
  22. if ent.QueryFromContext(ctx) == nil {
  23. e = d.w
  24. }
  25. return e.Query(ctx, query, args, v)
  26. }
  27. func (d *multiDriver) Exec(ctx context.Context, query string, args, v any) error {
  28. return d.w.Exec(ctx, query, args, v)
  29. }
  30. func (d *multiDriver) Tx(ctx context.Context) (dialect.Tx, error) {
  31. return d.w.Tx(ctx)
  32. }
  33. func (d *multiDriver) BeginTx(ctx context.Context, opts *sql.TxOptions) (dialect.Tx, error) {
  34. return d.w.(interface {
  35. BeginTx(context.Context, *sql.TxOptions) (dialect.Tx, error)
  36. }).BeginTx(ctx, opts)
  37. }
  38. func (d *multiDriver) Close() error {
  39. rerr := d.r.Close()
  40. werr := d.w.Close()
  41. if rerr != nil {
  42. return rerr
  43. }
  44. if werr != nil {
  45. return werr
  46. }
  47. return nil
  48. }
  49. func (d *multiDriver) Dialect() string {
  50. return d.r.Dialect()
  51. }

How to change the character set and/or collation of a MySQL table?

By default for MySQL the character set utf8mb4 is used and the collation of utf8mb4_bin. However if you’d like to change the schema’s character set and/or collation you need to use an annotation.

Here’s an example where we set the character set to ascii and the collation to ascii_general_ci.

  1. // Annotations of the Entity.
  2. func (Entity) Annotations() []schema.Annotation {
  3. return []schema.Annotation{
  4. entsql.Annotation{
  5. Charset: "ascii",
  6. Collation: "ascii_general_ci",
  7. },
  8. }
  9. }

How to configure json.Marshal to inline the edges keys in the top level object?

To encode entities without the edges attribute, users can follow these two steps:

  1. Omit the default edges tag generated by Ent.
  2. Extend the generated models with a custom MarshalJSON method.

These two steps can be automated using codegen extensions, and a full working example is available under the examples/jsonencode directory.

ent/entc.go

  1. //go:build ignore
  2. // +build ignore
  3. package main
  4. import (
  5. "log"
  6. "entgo.io/ent/entc"
  7. "entgo.io/ent/entc/gen"
  8. "entgo.io/ent/schema/edge"
  9. )
  10. func main() {
  11. opts := []entc.Option{
  12. entc.Extensions{
  13. &EncodeExtension{},
  14. ),
  15. }
  16. err := entc.Generate("./schema", &gen.Config{}, opts...)
  17. if err != nil {
  18. log.Fatalf("running ent codegen: %v", err)
  19. }
  20. }
  21. // EncodeExtension is an implementation of entc.Extension that adds a MarshalJSON
  22. // method to each generated type <T> and inlines the Edges field to the top level JSON.
  23. type EncodeExtension struct {
  24. entc.DefaultExtension
  25. }
  26. // Templates of the extension.
  27. func (e *EncodeExtension) Templates() []*gen.Template {
  28. return []*gen.Template{
  29. gen.MustParse(gen.NewTemplate("model/additional/jsonencode").
  30. Parse(`
  31. {{ if $.Edges }}
  32. // MarshalJSON implements the json.Marshaler interface.
  33. func ({{ $.Receiver }} *{{ $.Name }}) MarshalJSON() ([]byte, error) {
  34. type Alias {{ $.Name }}
  35. return json.Marshal(&struct {
  36. *Alias
  37. {{ $.Name }}Edges
  38. }{
  39. Alias: (*Alias)({{ $.Receiver }}),
  40. {{ $.Name }}Edges: {{ $.Receiver }}.Edges,
  41. })
  42. }
  43. {{ end }}
  44. `)),
  45. }
  46. }
  47. // Hooks of the extension.
  48. func (e *EncodeExtension) Hooks() []gen.Hook {
  49. return []gen.Hook{
  50. func(next gen.Generator) gen.Generator {
  51. return gen.GenerateFunc(func(g *gen.Graph) error {
  52. tag := edge.Annotation{StructTag: `json:"-"`}
  53. for _, n := range g.Nodes {
  54. n.Annotations.Set(tag.Name(), tag)
  55. }
  56. return next.Generate(g)
  57. })
  58. },
  59. }
  60. }