diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..f3c0421 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,21 @@ +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file +root = true + +[*] +indent_style = space +indent_size = 2 +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.go|*.go.tpl] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + + +[*.py] +indent_size = 4 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..379460e --- /dev/null +++ b/.gitignore @@ -0,0 +1,37 @@ +*.iml +*.o +*.a +*.so +_obj +_test + +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof + +.DS_Store +.idea/ +coverage.out + +/dist +/config.yaml +logs/*.log +logs/*.gz +__debug_bin +.idea +.venv +go.sum + +!.gitkeep +!.gitignore diff --git a/CONTRIBUTORS b/CONTRIBUTORS new file mode 100644 index 0000000..93722aa --- /dev/null +++ b/CONTRIBUTORS @@ -0,0 +1 @@ +yinheli diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..3bd152c --- /dev/null +++ b/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2018-2019 xinpianchang.com +Copyright (c) 2019 Tang Ye + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..7599ceb --- /dev/null +++ b/README.md @@ -0,0 +1,38 @@ +# xservice [WIP] + +Another excellent micro service framework + +## Features + +- RESTFull api (base echo/v4) +- gRPC & gRPC gateway service & swagger document generation +- Service discovery (with ETCD/v3) +- Embed toolset for code generation + +## Quick start + +Install toolset. + +```bash +go install github.com/xinpianchang/xservice/tools/xservice@latest +``` + +Create new project via toolset. + +```bash +mkdir hello +cd hello +xservice new --module github.com/example/hello +``` + +Open the generated `README.md` file, following the initialize steps, and happing coding. 🎉 + +## Resource + +- go-zero https://github.com/tal-tech/go-zero (special thanks) +- micro https://github.com/asim/go-micro +- gRPC generate tool/buf https://buf.build/ +- gRPC validate https://github.com/envoyproxy/protoc-gen-validate +- RESTful validate https://github.com/go-playground/validator +- gRPC-Gateway https://grpc-ecosystem.github.io/grpc-gateway/ +- jaeger https://www.jaegertracing.io/ diff --git a/core/const.go b/core/const.go new file mode 100644 index 0000000..cdba4a9 --- /dev/null +++ b/core/const.go @@ -0,0 +1,24 @@ +package core + +type ContextKey string + +const ( + // context + ContextHeaderXRequestID ContextKey = "X-Request-ID" // requestId key + + DefaultServiceName = "xservice-default" + + // env key + EnvServiceName = "XSERVICE_NAME" + EnvServiceVersion = "XSERVICE_VERSION" + EnvEtcd = "XSERVICE_ETCD" + EnvEtcdUser = "XSERVICE_ETCD_USER" + EnvEtcdPassword = "XSERVICE_ETCD_PASSWORD" + + // config key + ConfigServiceAddr = "http.address" + ConfigServiceAdviceAddr = "http.advice_address" + + ServiceConfigKeyPrefix = "xservice/config" + ServiceRegisterKeyPrefix = "xservice/register" +) diff --git a/core/middleware/pprof.go b/core/middleware/pprof.go new file mode 100644 index 0000000..360ab30 --- /dev/null +++ b/core/middleware/pprof.go @@ -0,0 +1,14 @@ +package middleware + +import ( + "net/http" + _ "net/http/pprof" + + "github.com/labstack/echo/v4" +) + +func Pprof() echo.MiddlewareFunc { + return echo.WrapMiddleware(func(handler http.Handler) http.Handler { + return http.DefaultServeMux + }) +} diff --git a/core/middleware/prometheus.go b/core/middleware/prometheus.go new file mode 100644 index 0000000..4e0615b --- /dev/null +++ b/core/middleware/prometheus.go @@ -0,0 +1,39 @@ +package middleware + +import ( + "fmt" + "time" + + "github.com/labstack/echo/v4" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" +) + +func Prometheus(namespace, subsystem string) echo.MiddlewareFunc { + requests := promauto.NewCounterVec(prometheus.CounterOpts{ + Namespace: namespace, + Subsystem: subsystem, + Name: "requests_total", + Help: "Number of requests", + }, []string{"status", "method", "handler"}) + + durations := promauto.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: namespace, + Subsystem: subsystem, + Name: "request_duration_millisecond", + Help: "Request duration", + Buckets: []float64{50, 100, 200, 300, 500, 1000, 2000, 3000, 5000}, + }, []string{"method", "handler"}) + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + method := c.Request().Method + path := c.Request().URL.Path + start := time.Now() + err := next(c) + durations.WithLabelValues(method, path).Observe(float64(time.Since(start).Milliseconds())) + requests.WithLabelValues(fmt.Sprint(c.Response().Status), method, path).Inc() + return err + } + } +} diff --git a/core/middleware/trace.go b/core/middleware/trace.go new file mode 100644 index 0000000..f7ea132 --- /dev/null +++ b/core/middleware/trace.go @@ -0,0 +1,16 @@ +package middleware + +import ( + "github.com/labstack/echo-contrib/jaegertracing" + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" + "github.com/opentracing/opentracing-go" +) + +func Trace(bodyDump bool, skipper middleware.Skipper) echo.MiddlewareFunc { + return jaegertracing.TraceWithConfig(jaegertracing.TraceConfig{ + Tracer: opentracing.GlobalTracer(), + Skipper: skipper, + IsBodyDump: bodyDump, + }) +} diff --git a/core/xservice/client.go b/core/xservice/client.go new file mode 100644 index 0000000..d089294 --- /dev/null +++ b/core/xservice/client.go @@ -0,0 +1,56 @@ +package xservice + +import ( + "context" + "fmt" + "os" + + resolver "go.etcd.io/etcd/client/v3/naming/resolver" + "go.uber.org/zap" + "google.golang.org/grpc" + gresolver "google.golang.org/grpc/resolver" + + "github.com/xinpianchang/xservice/core" + "github.com/xinpianchang/xservice/pkg/log" +) + +type Client interface { + GrpcClientConn(ctx context.Context, service string, desc *grpc.ServiceDesc, endpoint ...string) (*grpc.ClientConn, error) +} + +type clientImpl struct { + options *Options + resolver gresolver.Builder +} + +func newClient(opts *Options) Client { + client := &clientImpl{ + options: opts, + } + + if os.Getenv(core.EnvEtcd) != "" { + cli, err := serviceEtcdClient() + if err != nil { + log.Fatal("etcd client", zap.Error(err)) + } + client.resolver, err = resolver.NewBuilder(cli) + if err != nil { + log.Fatal("endpoints manager", zap.Error(err)) + } + } + + return client +} + +func (t *clientImpl) GrpcClientConn(ctx context.Context, service string, desc *grpc.ServiceDesc, endpoint ...string) (*grpc.ClientConn, error) { + if len(endpoint) > 0 { + return grpc.DialContext(ctx, endpoint[0], grpc.WithInsecure()) + } + + if os.Getenv(core.EnvEtcd) == "" { + log.Fatal("etcd not configured") + } + target := fmt.Sprint("etcd:///", serviceKeyPrefix(service, desc)) + log.For(ctx).Debug("client conn", zap.String("target", target)) + return grpc.DialContext(ctx, target, grpc.WithInsecure(), grpc.WithBlock(), grpc.WithResolvers(t.resolver)) +} diff --git a/core/xservice/option.go b/core/xservice/option.go new file mode 100644 index 0000000..fa23575 --- /dev/null +++ b/core/xservice/option.go @@ -0,0 +1,146 @@ +package xservice + +import ( + "fmt" + "log" + "net" + "os" + "regexp" + "time" + + "github.com/getsentry/sentry-go" + "github.com/labstack/echo/v4/middleware" + "github.com/spf13/viper" + "go.uber.org/zap" + "google.golang.org/grpc" + + "github.com/xinpianchang/xservice/core" + "github.com/xinpianchang/xservice/pkg/config" + "github.com/xinpianchang/xservice/pkg/netx" +) + +type Options struct { + Name string + Version string + Build string + Description string + Config *viper.Viper + GrpcOptions []grpc.ServerOption + SentryOptions sentry.ClientOptions + EchoTracingSkipper middleware.Skipper +} + +type Option func(*Options) + +func Name(name string) Option { + return func(o *Options) { + o.Name = name + } +} + +func Version(version string) Option { + return func(o *Options) { + o.Version = version + } +} + +func Build(build string) Option { + return func(o *Options) { + o.Build = build + } +} + +func Description(description string) Option { + return func(o *Options) { + o.Description = description + } +} + +func Config(config *viper.Viper) Option { + return func(o *Options) { + o.Config = config + } +} + +func WithGrpcOptions(options ...grpc.ServerOption) Option { + return func(o *Options) { + o.GrpcOptions = options + } +} + +func WithSentry(options sentry.ClientOptions) Option { + return func(o *Options) { + o.SentryOptions = options + } +} + +func WithEchoTracingSkipper(skipper middleware.Skipper) Option { + return func(o *Options) { + o.EchoTracingSkipper = skipper + } +} + +func loadOptions(options ...Option) *Options { + opts := new(Options) + loadEnvOptions(opts) + + for _, option := range options { + option(opts) + } + + if opts.Name == "" { + opts.Name = core.DefaultServiceName + } + + nameexp := `^[a-zA-Z0-9\-\_\.]+$` + if ok, _ := regexp.MatchString(nameexp, opts.Name); !ok { + log.Fatal("invalid service name", zap.String("name", opts.Name), zap.String("suggest", nameexp)) + } + os.Setenv(core.EnvServiceName, opts.Name) + + if opts.Version == "" { + opts.Version = "v0.0.1" + } + os.Setenv(core.EnvServiceVersion, opts.Version) + + if opts.Build == "" { + opts.Build = fmt.Sprint("dev-", time.Now().UnixNano()) + } + + if opts.Config == nil { + opts.loadConfig() + } + + if opts.Config.IsSet(core.ConfigServiceAddr) { + addviceAddr := opts.Config.GetString(core.ConfigServiceAdviceAddr) + if addviceAddr == "" { + address := opts.Config.GetString(core.ConfigServiceAddr) + _, port, err := net.SplitHostPort(address) + if err != nil { + log.Fatal("invalid address", zap.Error(err)) + } + addviceAddr = net.JoinHostPort(netx.InternalIp(), port) + + opts.Config.SetDefault(core.ConfigServiceAdviceAddr, addviceAddr) + } + } + + return opts +} + +func loadEnvOptions(opts *Options) { + if opts.Name == "" { + opts.Name = os.Getenv(core.EnvServiceName) + } + + if opts.Version == "" { + opts.Version = os.Getenv(core.EnvServiceVersion) + } +} + +func (t *Options) loadConfig() { + if err := config.LoadGlobal(); err != nil { + log.Fatal("load config", zap.Error(err)) + } + t.Config = viper.GetViper() +} diff --git a/core/xservice/server.go b/core/xservice/server.go new file mode 100644 index 0000000..4b7d614 --- /dev/null +++ b/core/xservice/server.go @@ -0,0 +1,456 @@ +package xservice + +import ( + "context" + "fmt" + "net" + "net/http" + "os" + "os/signal" + "runtime" + "syscall" + "time" + + "github.com/cloudflare/tableflip" + "github.com/getsentry/sentry-go" + sentryecho "github.com/getsentry/sentry-go/echo" + grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" + grpc_recovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" + grpc_opentracing "github.com/grpc-ecosystem/go-grpc-middleware/tracing/opentracing" + grpc_prometheus "github.com/grpc-ecosystem/go-grpc-prometheus" + gwrt "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/labstack/echo/v4" + echomd "github.com/labstack/echo/v4/middleware" + "github.com/opentracing/opentracing-go" + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus/promhttp" + "github.com/soheilhy/cmux" + clientv3 "go.etcd.io/etcd/client/v3" + "go.etcd.io/etcd/client/v3/naming/endpoints" + "go.uber.org/zap" + "google.golang.org/grpc" + + "github.com/xinpianchang/xservice/core" + "github.com/xinpianchang/xservice/core/middleware" + "github.com/xinpianchang/xservice/pkg/echox" + "github.com/xinpianchang/xservice/pkg/grpcx" + "github.com/xinpianchang/xservice/pkg/log" + "github.com/xinpianchang/xservice/pkg/signalx" + "github.com/xinpianchang/xservice/pkg/tracingx" +) + +type Server interface { + Echo() *echo.Echo + Serve() error + GrpcRegister(desc *grpc.ServiceDesc, impl interface{}, handler ...GrpcRegisterHandler) +} + +type grpcService struct { + Desc *grpc.ServiceDesc + Impl interface{} + Handler GrpcRegisterHandler +} + +type GrpcRegisterHandler func(ctx context.Context, mux *gwrt.ServeMux, conn *grpc.ClientConn) error + +type serverImpl struct { + options *Options + echo *echo.Echo + grpc *grpc.Server + grpcServices []*grpcService +} + +func newServer(opts *Options) Server { + server := &serverImpl{ + grpcServices: make([]*grpcService, 0, 128), + } + server.options = opts + + server.initGrpc() + server.initEcho() + + return server +} + +func (t *serverImpl) Echo() *echo.Echo { + return t.echo +} + +func (t *serverImpl) Serve() error { + address := t.getHttpAddress() + + log.Info("server start", + zap.String("name", t.options.Name), + zap.Int("pid", os.Getpid()), + zap.String("version", t.options.Version), + zap.String("build", t.options.Build), + zap.String("address", address), + zap.String("runtime", runtime.Version()), + ) + + upg, err := tableflip.New(tableflip.Options{ + UpgradeTimeout: time.Minute, + }) + if err != nil { + log.Fatal("tableflip init", zap.Error(err)) + } + defer upg.Stop() + + t.waitSignalForTableflip(upg) + + ln, err := upg.Fds.Listen("tcp", address) + if err != nil { + log.Fatal("listen", zap.Error(err)) + } + defer ln.Close() + + mux := cmux.New(ln) + defer mux.Close() + + grpcL := mux.Match(cmux.HTTP2()) + defer grpcL.Close() + + httpL := mux.Match(cmux.HTTP1Fast()) + defer httpL.Close() + + if len(t.grpcServices) > 0 { + go t.serveGrpc(grpcL) + } + + server := http.Server{ + Handler: t.echo, + ReadHeaderTimeout: time.Second * 30, + IdleTimeout: time.Minute * 1, + } + + go func() { + if err := server.Serve(httpL); err != nil { + if err != http.ErrServerClosed { + log.Fatal("start http server", zap.Error(err)) + } + } + }() + + go func() { + _ = mux.Serve() + }() + + if err = upg.Ready(); err != nil { + log.Fatal("ready", zap.Error(err)) + } + + // all ready + t.registerGrpcServiceEtcd() + + signalx.AddShutdownHook(func(os.Signal) { + _ = server.Shutdown(context.Background()) + sentry.Flush(time.Second * 2) + log.Info("shutdown", zap.Int("pid", os.Getpid())) + }) + + <-upg.Exit() + + signalx.Shutdown() + + return nil +} + +func (t *serverImpl) GrpcRegister(desc *grpc.ServiceDesc, impl interface{}, hs ...GrpcRegisterHandler) { + var handler GrpcRegisterHandler + if len(hs) > 0 { + handler = hs[0] + } + t.grpcServices = append(t.grpcServices, &grpcService{desc, impl, handler}) +} + +func (t *serverImpl) getHttpAddress() string { + return t.options.Config.GetString(core.ConfigServiceAddr) +} + +func (t *serverImpl) waitSignalForTableflip(upg *tableflip.Upgrader) { + go func() { + sig := make(chan os.Signal, 1) + signal.Notify(sig, syscall.SIGUSR2, syscall.SIGHUP, os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT) + for s := range sig { + switch s { + case syscall.SIGUSR2, syscall.SIGHUP: + err := upg.Upgrade() + if err != nil { + log.Error("upgrade failed", zap.Error(err)) + continue + } + log.Info("upgrade succeeded", zap.Int("pid", os.Getpid())) + return + default: + upg.Stop() + } + } + }() +} + +func (t *serverImpl) initEcho() { + e := echo.New() + t.echo = e + + e.Logger = log.NewEchoLogger() + e.IPExtractor = echo.ExtractIPFromXFFHeader(echo.TrustPrivateNet(true)) + echox.ConfigValidator(e) + e.HTTPErrorHandler = echox.HTTPErrorHandler + + // recover + e.Use(func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + defer func() { + if x := recover(); x != nil { + log.For(c.Request().Context()).Error("server panic error", zap.Any("error", x)) + _ = c.String(http.StatusInternalServerError, fmt.Sprint("internal server error, ", x)) + } + }() + return next(&echoContext{c}) + } + }) + + e.Use(echomd.RequestID()) + e.Use(middleware.Trace(t.options.Config.GetBool("jaeger.body_dump"), t.options.EchoTracingSkipper)) + e.Use(sentryecho.New(sentryecho.Options{Repanic: true})) + + // logger id & traceId & server-info + e.Use(func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + c.Response().Header().Set("X-Service", fmt.Sprint(t.options.Name, "/", t.options.Version, "/", t.options.Build)) + id := c.Request().Header.Get(echo.HeaderXRequestID) + if id == "" { + id = c.Response().Header().Get(echo.HeaderXRequestID) + } + c.Set(echo.HeaderXRequestID, id) + ctx := context.WithValue(c.Request().Context(), core.ContextHeaderXRequestID, id) + c.SetRequest(c.Request().WithContext(ctx)) + + traceId := tracingx.GetTraceID(c.Request().Context()) + if traceId != "" { + c.Response().Header().Set("X-Trace-Id", traceId) + } + + if span := opentracing.SpanFromContext(c.Request().Context()); span != nil { + span.SetTag("requestId", id) + span.SetTag("ip", c.RealIP()) + } + + if hub := sentryecho.GetHubFromContext(c); hub != nil { + scope := hub.Scope() + scope.SetTag("ip", c.RealIP()) + scope.SetTag("X-Forwarded-For", c.Request().Header.Get("X-Forwarded-For")) + if traceId != "" { + scope.SetTag("traceId", traceId) + } + } + + return next(c) + } + }) + + e.GET("/metrics", echo.WrapHandler(promhttp.Handler())) + e.Group("/debug/*", middleware.Pprof()) +} + +// init grpc +// add middleware https://github.com/grpc-ecosystem/go-grpc-middleware +func (t *serverImpl) initGrpc() { + options := make([]grpc.ServerOption, 0, 8) + options = append(options, + grpc.StreamInterceptor(grpc_middleware.ChainStreamServer( + grpc_recovery.StreamServerInterceptor(), + grpc_opentracing.StreamServerInterceptor(), + grpc_prometheus.StreamServerInterceptor, + grpcx.EnvoyproxyValidatorStreamServerInterceptor(), + )), + grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer( + grpc_recovery.UnaryServerInterceptor(), + grpc_opentracing.UnaryServerInterceptor(), + grpc_prometheus.UnaryServerInterceptor, + grpcx.EnvoyproxyValidatorUnaryServerInterceptor(), + )), + ) + options = append(options, t.options.GrpcOptions...) + g := grpc.NewServer(options...) + t.grpc = g +} + +func (t *serverImpl) serveGrpc(ln net.Listener) { + for _, service := range t.grpcServices { + t.grpc.RegisterService(service.Desc, service.Impl) + // log.Debug("register grpc service", zap.String("impl", reflect.TypeOf(service.Impl).String())) + } + + go func() { + _ = t.grpc.Serve(ln) + }() + + address := t.getHttpAddress() + grpcClientConn, err := grpc.DialContext( + context.Background(), + address, + grpc.WithInsecure(), + grpc.WithUnaryInterceptor( + grpc_opentracing.UnaryClientInterceptor( + grpc_opentracing.WithTracer(opentracing.GlobalTracer()), + ), + ), + ) + + if err != nil { + log.Fatal("grpc gateway client conn", zap.Error(err)) + } + + grpcGateway := gwrt.NewServeMux() + + for _, service := range t.grpcServices { + if service.Handler == nil { + continue + } + err := service.Handler(context.Background(), grpcGateway, grpcClientConn) + if err != nil { + log.Fatal("grpc register handler", zap.Error(err)) + } + // log.Debug("register grpc gateway", zap.String("handler", runtime.FuncForPC(reflect.ValueOf(service.Handler).Pointer()).Name())) + } + + t.echo.Group("/rpc/*", echo.WrapMiddleware(func(handler http.Handler) http.Handler { + return grpcGateway + })) +} + +// registerGrpcServiceEtcd +// refer: https://etcd.io/docs/v3.5/dev-guide/grpc_naming/ +func (t *serverImpl) registerGrpcServiceEtcd() { + if len(t.grpcServices) == 0 { + return + } + + if os.Getenv(core.EnvEtcd) == "" { + log.Warn("etcd not configured, service register ignored") + return + } + + ctx, cancel := context.WithCancel(context.Background()) + + go t.doRegisterGrpcServiceEtcd(ctx) + + signalx.AddShutdownHook(func(s os.Signal) { + cancel() + // deregister + log.Debug("deregister service") + client, err := serviceEtcdClient() + if err != nil { + return + } + + em, _ := endpoints.NewManager(client, core.ServiceRegisterKeyPrefix) + + for _, service := range t.grpcServices { + _ = em.DeleteEndpoint(context.Background(), serviceKey(os.Getenv(core.EnvServiceName), service.Desc)) + } + }) +} + +func (t *serverImpl) doRegisterGrpcServiceEtcd(ctx context.Context) { + l := log.Named("registerGrpcServiceEtcd") + defer func() { + if x := recover(); x != nil { + l.Error("recover", zap.Any("err", x)) + sentry.CaptureException(errors.WithStack(errors.New(fmt.Sprint(x)))) + + time.Sleep(time.Second * 10) + go t.doRegisterGrpcServiceEtcd(ctx) + } + }() + + client, err := serviceEtcdClient() + if err != nil { + l.Error("get client", zap.Error(err)) + return + } + + ticker := time.NewTicker(time.Second * 5) + defer ticker.Stop() + + ttl := int64(10) // seconds + + type serviceLease struct { + id clientv3.LeaseID + lease clientv3.Lease + key string + endpoint endpoints.Endpoint + } + + leaseMap := make(map[string]*serviceLease, len(t.grpcServices)) + getLease := func(desc *grpc.ServiceDesc) *serviceLease { + if sl, ok := leaseMap[desc.ServiceName]; ok { + return sl + } + addr := t.options.Config.GetString(core.ConfigServiceAdviceAddr) + sl := &serviceLease{ + id: 0, + lease: clientv3.NewLease(client), + key: serviceKey(os.Getenv(core.EnvServiceName), desc), + endpoint: endpoints.Endpoint{ + Addr: addr, + Metadata: desc.Metadata, + }, + } + leaseMap[desc.ServiceName] = sl + return sl + } + + em, _ := endpoints.NewManager(client, core.ServiceRegisterKeyPrefix) + + for { + select { + case <-ctx.Done(): + return + default: + for _, service := range t.grpcServices { + sl := getLease(service.Desc) + ll := l.With(zap.String("service", sl.key)) + // ll.Debug("register") + if sl.id == 0 { + leaseRsp, err := sl.lease.Grant(context.Background(), ttl) + if err != nil { + ll.Error("lease.Grant", zap.Error(err)) + continue + } + + err = em.AddEndpoint(context.Background(), sl.key, sl.endpoint, clientv3.WithLease(leaseRsp.ID)) + if err != nil { + ll.Error("kv.Put", zap.Error(err)) + } + sl.id = leaseRsp.ID + } else { + _, err = sl.lease.KeepAliveOnce(context.Background(), sl.id) + if err != nil { + sl.id = 0 + } + } + } + } + + // wait next loop + <-ticker.C + } +} + +type echoContext struct { + echo.Context +} + +func (t *echoContext) Logger() echo.Logger { + logger := t.Context.Logger() + if l, ok := logger.(*log.EchoLogger); ok { + return l.For(t.Request().Context()) + } + return logger +} + +func (t *echoContext) Path() string { + return t.Request().URL.Path +} diff --git a/core/xservice/service.go b/core/xservice/service.go new file mode 100644 index 0000000..8f7ea1e --- /dev/null +++ b/core/xservice/service.go @@ -0,0 +1,48 @@ +package xservice + +import ( + "fmt" + "os" + "strings" + "time" + + clientv3 "go.etcd.io/etcd/client/v3" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" + "google.golang.org/grpc" + + "github.com/xinpianchang/xservice/core" + "github.com/xinpianchang/xservice/pkg/log" +) + +func serviceKey(serviceName string, desc *grpc.ServiceDesc) string { + host, _ := os.Hostname() + if host == "" { + host = fmt.Sprint("unknown-host-pid-", os.Getpid()) + } + return fmt.Sprint(serviceKeyPrefix(serviceName, desc), "/", host) +} + +func serviceKeyPrefix(serviceName string, desc *grpc.ServiceDesc) string { + return fmt.Sprint(core.ServiceRegisterKeyPrefix, "/", serviceName, "/", desc.ServiceName) +} + +func serviceEtcdClient() (*clientv3.Client, error) { + endpoints := strings.Split(os.Getenv(core.EnvEtcd), ",") + + cfg := clientv3.Config{ + Endpoints: endpoints, + DialTimeout: time.Second * 5, + DialKeepAliveTime: time.Second * 5, + AutoSyncInterval: time.Second * 10, + Logger: log.Get().WithOptions(zap.IncreaseLevel(zapcore.ErrorLevel)), + } + if username := os.Getenv(core.EnvEtcdUser); username != "" { + cfg.Username = username + } + if password := os.Getenv(core.EnvEtcdPassword); password != "" { + cfg.Password = password + } + client, err := clientv3.New(cfg) + return client, err +} diff --git a/core/xservice/xservice.go b/core/xservice/xservice.go new file mode 100644 index 0000000..be12b81 --- /dev/null +++ b/core/xservice/xservice.go @@ -0,0 +1,98 @@ +package xservice + +import ( + "fmt" + + "github.com/getsentry/sentry-go" + "github.com/google/gops/agent" + "go.uber.org/zap" + + "github.com/xinpianchang/xservice/core" + "github.com/xinpianchang/xservice/pkg/gormx" + "github.com/xinpianchang/xservice/pkg/kafkax" + "github.com/xinpianchang/xservice/pkg/log" + "github.com/xinpianchang/xservice/pkg/redisx" + "github.com/xinpianchang/xservice/pkg/tracingx" +) + +type Service interface { + Name() string + Options() *Options + Client() Client + Server() Server + String() string +} + +type serviceImpl struct { + options *Options + client Client + server Server +} + +func New(options ...Option) Service { + opts := loadOptions(options...) + + service := new(serviceImpl) + service.options = opts + + service.init() + + return service +} + +func (t *serviceImpl) Name() string { + return t.options.Name +} + +func (t *serviceImpl) Options() *Options { + return t.options +} + +func (t *serviceImpl) Client() Client { + return t.client +} + +func (t *serviceImpl) Server() Server { + return t.server +} + +func (t *serviceImpl) String() string { + return fmt.Sprint(t.Name(), "/", t.options.Version, " - ", t.options.Description) +} + +func (t *serviceImpl) init() { + if err := agent.Listen(agent.Options{}); err != nil { + log.Fatal("agent", zap.Error(err)) + } + + if t.options.Config.IsSet("log") { + log.Config(t.options.Config) + } + + tracingx.Config(t.options.Config) + + if t.options.SentryOptions.Dsn != "" { + err := sentry.Init(t.options.SentryOptions) + if err != nil { + log.Fatal("init sentry", zap.Error(err)) + } + } + + if t.options.Config.IsSet("redis") { + redisx.Config(t.options.Config) + } + + if t.options.Config.IsSet("database") { + gormx.Config(t.options.Config) + } + + if t.options.Config.IsSet("mq") { + kafkax.Config(t.options.Config) + } + + if t.options.Config.IsSet(core.ConfigServiceAddr) { + t.server = newServer(t.options) + } + + t.client = newClient(t.options) +} diff --git a/example/grpc-service/buf.gen.yaml b/example/grpc-service/buf.gen.yaml new file mode 100644 index 0000000..ee10f73 --- /dev/null +++ b/example/grpc-service/buf.gen.yaml @@ -0,0 +1,26 @@ +version: v1beta1 +plugins: + - name: go + out: . + opt: paths=source_relative + - name: go-grpc + out: . + opt: + - paths=source_relative + - require_unimplemented_servers=false + - name: grpc-gateway + out: . + opt: + - paths=source_relative + - allow_repeated_fields_in_body=true + - name: openapiv2 + out: . + opt: + - allow_repeated_fields_in_body=true + - logtostderr=true + - use_go_templates=true + - name: validate + out: . + opt: + - paths=source_relative + - lang=go diff --git a/example/grpc-service/buf.lock b/example/grpc-service/buf.lock new file mode 100644 index 0000000..74ebc0e --- /dev/null +++ b/example/grpc-service/buf.lock @@ -0,0 +1,16 @@ +# Generated by buf. DO NOT EDIT. +deps: + - remote: buf.build + owner: beta + repository: googleapis + branch: main + commit: 1c473ad9220a49bca9320f4cc690eba5 + digest: b1-unlhrcI3tnJd0JEGuOb692LZ_tY_gCGq6mK1bgCn1Pg= + create_time: 2021-06-23T20:16:47.788079Z + - remote: buf.build + owner: beta + repository: protoc-gen-validate + branch: main + commit: 82388a0a0cb04e98a203f95dfed5e84b + digest: b1-lYgUMN58PxyCwvfQoopp40AJ-oHHjWXAzksF7v9U-U4= + create_time: 2021-06-21T22:00:30.152545Z diff --git a/example/grpc-service/buf.yaml b/example/grpc-service/buf.yaml new file mode 100644 index 0000000..1bf1a0e --- /dev/null +++ b/example/grpc-service/buf.yaml @@ -0,0 +1,14 @@ +version: v1beta1 +name: buf.build/xinpianchang/grpc-service +deps: + - buf.build/beta/googleapis + - buf.build/beta/protoc-gen-validate +build: + roots: + - . +lint: + use: + - DEFAULT +breaking: + use: + - FILE diff --git a/example/grpc-service/buf/v1/calculator.pb.go b/example/grpc-service/buf/v1/calculator.pb.go new file mode 100644 index 0000000..adc3e5e --- /dev/null +++ b/example/grpc-service/buf/v1/calculator.pb.go @@ -0,0 +1,243 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.26.0 +// protoc v3.15.2 +// source: buf/v1/calculator.proto + +package v1 + +import ( + reflect "reflect" + sync "sync" + + _ "github.com/envoyproxy/protoc-gen-validate/validate" + _ "google.golang.org/genproto/googleapis/api/annotations" + _ "google.golang.org/genproto/googleapis/api/httpbody" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + _ "google.golang.org/protobuf/types/known/anypb" + _ "google.golang.org/protobuf/types/known/emptypb" + _ "google.golang.org/protobuf/types/known/structpb" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type AddIntRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // param a + A int32 `protobuf:"varint,1,opt,name=a,proto3" json:"a,omitempty"` + // param b + B int32 `protobuf:"varint,2,opt,name=b,proto3" json:"b,omitempty"` +} + +func (x *AddIntRequest) Reset() { + *x = AddIntRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_v1_calculator_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AddIntRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddIntRequest) ProtoMessage() {} + +func (x *AddIntRequest) ProtoReflect() protoreflect.Message { + mi := &file_buf_v1_calculator_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddIntRequest.ProtoReflect.Descriptor instead. +func (*AddIntRequest) Descriptor() ([]byte, []int) { + return file_buf_v1_calculator_proto_rawDescGZIP(), []int{0} +} + +func (x *AddIntRequest) GetA() int32 { + if x != nil { + return x.A + } + return 0 +} + +func (x *AddIntRequest) GetB() int32 { + if x != nil { + return x.B + } + return 0 +} + +type AddIntResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Result int32 `protobuf:"varint,1,opt,name=result,proto3" json:"result,omitempty"` +} + +func (x *AddIntResponse) Reset() { + *x = AddIntResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_v1_calculator_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AddIntResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddIntResponse) ProtoMessage() {} + +func (x *AddIntResponse) ProtoReflect() protoreflect.Message { + mi := &file_buf_v1_calculator_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddIntResponse.ProtoReflect.Descriptor instead. +func (*AddIntResponse) Descriptor() ([]byte, []int) { + return file_buf_v1_calculator_proto_rawDescGZIP(), []int{1} +} + +func (x *AddIntResponse) GetResult() int32 { + if x != nil { + return x.Result + } + return 0 +} + +var File_buf_v1_calculator_proto protoreflect.FileDescriptor + +var file_buf_v1_calculator_proto_rawDesc = []byte{ + 0x0a, 0x17, 0x62, 0x75, 0x66, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, + 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x62, 0x75, 0x66, 0x2e, 0x76, + 0x31, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x68, 0x74, + 0x74, 0x70, 0x62, 0x6f, 0x64, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, + 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x1a, 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x36, 0x0a, 0x0d, 0x41, 0x64, 0x64, + 0x49, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x01, 0x61, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x05, 0x42, 0x09, 0xfa, 0x42, 0x06, 0x1a, 0x04, 0x10, 0x64, 0x28, 0x00, + 0x52, 0x01, 0x61, 0x12, 0x0c, 0x0a, 0x01, 0x62, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x01, + 0x62, 0x22, 0x28, 0x0a, 0x0e, 0x41, 0x64, 0x64, 0x49, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x05, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x32, 0x6b, 0x0a, 0x11, 0x43, + 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x6f, 0x72, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x12, 0x56, 0x0a, 0x06, 0x41, 0x64, 0x64, 0x49, 0x6e, 0x74, 0x12, 0x15, 0x2e, 0x62, 0x75, 0x66, + 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x64, 0x64, 0x49, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x16, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x64, 0x64, 0x49, 0x6e, + 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1d, 0x82, 0xd3, 0xe4, 0x93, 0x02, + 0x17, 0x22, 0x12, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x61, 0x6c, 0x63, 0x75, + 0x6c, 0x61, 0x74, 0x6f, 0x72, 0x3a, 0x01, 0x2a, 0x42, 0x11, 0x5a, 0x0f, 0x67, 0x72, 0x70, 0x63, + 0x2d, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, +} + +var ( + file_buf_v1_calculator_proto_rawDescOnce sync.Once + file_buf_v1_calculator_proto_rawDescData = file_buf_v1_calculator_proto_rawDesc +) + +func file_buf_v1_calculator_proto_rawDescGZIP() []byte { + file_buf_v1_calculator_proto_rawDescOnce.Do(func() { + file_buf_v1_calculator_proto_rawDescData = protoimpl.X.CompressGZIP(file_buf_v1_calculator_proto_rawDescData) + }) + return file_buf_v1_calculator_proto_rawDescData +} + +var file_buf_v1_calculator_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_buf_v1_calculator_proto_goTypes = []interface{}{ + (*AddIntRequest)(nil), // 0: buf.v1.AddIntRequest + (*AddIntResponse)(nil), // 1: buf.v1.AddIntResponse +} +var file_buf_v1_calculator_proto_depIdxs = []int32{ + 0, // 0: buf.v1.CalculatorService.AddInt:input_type -> buf.v1.AddIntRequest + 1, // 1: buf.v1.CalculatorService.AddInt:output_type -> buf.v1.AddIntResponse + 1, // [1:2] is the sub-list for method output_type + 0, // [0:1] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_buf_v1_calculator_proto_init() } +func file_buf_v1_calculator_proto_init() { + if File_buf_v1_calculator_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_buf_v1_calculator_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AddIntRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_v1_calculator_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AddIntResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_buf_v1_calculator_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_buf_v1_calculator_proto_goTypes, + DependencyIndexes: file_buf_v1_calculator_proto_depIdxs, + MessageInfos: file_buf_v1_calculator_proto_msgTypes, + }.Build() + File_buf_v1_calculator_proto = out.File + file_buf_v1_calculator_proto_rawDesc = nil + file_buf_v1_calculator_proto_goTypes = nil + file_buf_v1_calculator_proto_depIdxs = nil +} diff --git a/example/grpc-service/buf/v1/calculator.pb.gw.go b/example/grpc-service/buf/v1/calculator.pb.gw.go new file mode 100644 index 0000000..6092673 --- /dev/null +++ b/example/grpc-service/buf/v1/calculator.pb.gw.go @@ -0,0 +1,167 @@ +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: buf/v1/calculator.proto + +/* +Package v1 is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package v1 + +import ( + "context" + "io" + "net/http" + + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" +) + +// Suppress "imported and not used" errors +var _ codes.Code +var _ io.Reader +var _ status.Status +var _ = runtime.String +var _ = utilities.NewDoubleArray +var _ = metadata.Join + +func request_CalculatorService_AddInt_0(ctx context.Context, marshaler runtime.Marshaler, client CalculatorServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq AddIntRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.AddInt(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_CalculatorService_AddInt_0(ctx context.Context, marshaler runtime.Marshaler, server CalculatorServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq AddIntRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.AddInt(ctx, &protoReq) + return msg, metadata, err + +} + +// RegisterCalculatorServiceHandlerServer registers the http handlers for service CalculatorService to "mux". +// UnaryRPC :call CalculatorServiceServer directly. +// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. +// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterCalculatorServiceHandlerFromEndpoint instead. +func RegisterCalculatorServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server CalculatorServiceServer) error { + + mux.Handle("POST", pattern_CalculatorService_AddInt_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/buf.v1.CalculatorService/AddInt") + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_CalculatorService_AddInt_0(rctx, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_CalculatorService_AddInt_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +// RegisterCalculatorServiceHandlerFromEndpoint is same as RegisterCalculatorServiceHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterCalculatorServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterCalculatorServiceHandler(ctx, mux, conn) +} + +// RegisterCalculatorServiceHandler registers the http handlers for service CalculatorService to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterCalculatorServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterCalculatorServiceHandlerClient(ctx, mux, NewCalculatorServiceClient(conn)) +} + +// RegisterCalculatorServiceHandlerClient registers the http handlers for service CalculatorService +// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "CalculatorServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "CalculatorServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "CalculatorServiceClient" to call the correct interceptors. +func RegisterCalculatorServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client CalculatorServiceClient) error { + + mux.Handle("POST", pattern_CalculatorService_AddInt_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req, "/buf.v1.CalculatorService/AddInt") + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_CalculatorService_AddInt_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_CalculatorService_AddInt_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_CalculatorService_AddInt_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"rpc", "v1", "calculator"}, "")) +) + +var ( + forward_CalculatorService_AddInt_0 = runtime.ForwardResponseMessage +) diff --git a/example/grpc-service/buf/v1/calculator.pb.validate.go b/example/grpc-service/buf/v1/calculator.pb.validate.go new file mode 100644 index 0000000..3baa89b --- /dev/null +++ b/example/grpc-service/buf/v1/calculator.pb.validate.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: buf/v1/calculator.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} +) + +// Validate checks the field values on AddIntRequest with the rules defined in +// the proto definition for this message. If any rules are violated, an error +// is returned. +func (m *AddIntRequest) Validate() error { + if m == nil { + return nil + } + + if val := m.GetA(); val < 0 || val >= 100 { + return AddIntRequestValidationError{ + field: "A", + reason: "value must be inside range [0, 100)", + } + } + + // no validation rules for B + + return nil +} + +// AddIntRequestValidationError is the validation error returned by +// AddIntRequest.Validate if the designated constraints aren't met. +type AddIntRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e AddIntRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e AddIntRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e AddIntRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e AddIntRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e AddIntRequestValidationError) ErrorName() string { return "AddIntRequestValidationError" } + +// Error satisfies the builtin error interface +func (e AddIntRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sAddIntRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = AddIntRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = AddIntRequestValidationError{} + +// Validate checks the field values on AddIntResponse with the rules defined in +// the proto definition for this message. If any rules are violated, an error +// is returned. +func (m *AddIntResponse) Validate() error { + if m == nil { + return nil + } + + // no validation rules for Result + + return nil +} + +// AddIntResponseValidationError is the validation error returned by +// AddIntResponse.Validate if the designated constraints aren't met. +type AddIntResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e AddIntResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e AddIntResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e AddIntResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e AddIntResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e AddIntResponseValidationError) ErrorName() string { return "AddIntResponseValidationError" } + +// Error satisfies the builtin error interface +func (e AddIntResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sAddIntResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = AddIntResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = AddIntResponseValidationError{} diff --git a/example/grpc-service/buf/v1/calculator.proto b/example/grpc-service/buf/v1/calculator.proto new file mode 100644 index 0000000..cb6a378 --- /dev/null +++ b/example/grpc-service/buf/v1/calculator.proto @@ -0,0 +1,34 @@ +syntax = "proto3"; + +package buf.v1; +option go_package = "grpc-service/v1"; + +import "google/api/httpbody.proto"; +import "google/api/annotations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; +import "validate/validate.proto"; + +service CalculatorService { + // {{.MethodDescriptorProto.Name}} + // + // {{import "buf/v1/tables.md"}} + rpc AddInt(AddIntRequest) returns (AddIntResponse) { + option(google.api.http) = { + post: "/rpc/v1/calculator" + body: "*" + }; + } +} + +message AddIntRequest { + // param a + int32 a = 1 [(validate.rules).int32 = {gte:0, lt: 100}]; + // param b + int32 b = 2; +} + +message AddIntResponse { + int32 result = 1; +} diff --git a/example/grpc-service/buf/v1/calculator.swagger.json b/example/grpc-service/buf/v1/calculator.swagger.json new file mode 100644 index 0000000..60c4a57 --- /dev/null +++ b/example/grpc-service/buf/v1/calculator.swagger.json @@ -0,0 +1,113 @@ +{ + "swagger": "2.0", + "info": { + "title": "buf/v1/calculator.proto", + "version": "version not set" + }, + "tags": [ + { + "name": "CalculatorService" + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/rpc/v1/calculator": { + "post": { + "summary": "AddInt", + "description": "## AddIntRequest\n\n| Field ID | Name | Type | Description |\n| ----------- | --------- | --------------------------------------------------------- | ---------------------------- | \n| 1 | a | TYPE_INT32 | param a | \n| 2 | b | TYPE_INT32 | param b | \n\n## AddIntResponse\n\n| Field ID | Name | Type | Description |\n| ----------- | --------- | ---------------------------------------------------------- | ---------------------------- | \n| 1 | result | TYPE_INT32 | |", + "operationId": "CalculatorService_AddInt", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1AddIntResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1AddIntRequest" + } + } + ], + "tags": [ + "CalculatorService" + ] + } + } + }, + "definitions": { + "protobufAny": { + "type": "object", + "properties": { + "typeUrl": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "rpcStatus": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/protobufAny" + } + } + } + }, + "v1AddIntRequest": { + "type": "object", + "properties": { + "a": { + "type": "integer", + "format": "int32", + "title": "param a" + }, + "b": { + "type": "integer", + "format": "int32", + "title": "param b" + } + } + }, + "v1AddIntResponse": { + "type": "object", + "properties": { + "result": { + "type": "integer", + "format": "int32" + } + } + } + } +} diff --git a/example/grpc-service/buf/v1/calculator_grpc.pb.go b/example/grpc-service/buf/v1/calculator_grpc.pb.go new file mode 100644 index 0000000..1e4f15e --- /dev/null +++ b/example/grpc-service/buf/v1/calculator_grpc.pb.go @@ -0,0 +1,106 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. + +package v1 + +import ( + context "context" + + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// CalculatorServiceClient is the client API for CalculatorService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type CalculatorServiceClient interface { + // {{.MethodDescriptorProto.Name}} + // + // {{import "buf/v1/tables.md"}} + AddInt(ctx context.Context, in *AddIntRequest, opts ...grpc.CallOption) (*AddIntResponse, error) +} + +type calculatorServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewCalculatorServiceClient(cc grpc.ClientConnInterface) CalculatorServiceClient { + return &calculatorServiceClient{cc} +} + +func (c *calculatorServiceClient) AddInt(ctx context.Context, in *AddIntRequest, opts ...grpc.CallOption) (*AddIntResponse, error) { + out := new(AddIntResponse) + err := c.cc.Invoke(ctx, "/buf.v1.CalculatorService/AddInt", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CalculatorServiceServer is the server API for CalculatorService service. +// All implementations should embed UnimplementedCalculatorServiceServer +// for forward compatibility +type CalculatorServiceServer interface { + // {{.MethodDescriptorProto.Name}} + // + // {{import "buf/v1/tables.md"}} + AddInt(context.Context, *AddIntRequest) (*AddIntResponse, error) +} + +// UnimplementedCalculatorServiceServer should be embedded to have forward compatible implementations. +type UnimplementedCalculatorServiceServer struct { +} + +func (UnimplementedCalculatorServiceServer) AddInt(context.Context, *AddIntRequest) (*AddIntResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AddInt not implemented") +} + +// UnsafeCalculatorServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to CalculatorServiceServer will +// result in compilation errors. +type UnsafeCalculatorServiceServer interface { + mustEmbedUnimplementedCalculatorServiceServer() +} + +func RegisterCalculatorServiceServer(s grpc.ServiceRegistrar, srv CalculatorServiceServer) { + s.RegisterService(&CalculatorService_ServiceDesc, srv) +} + +func _CalculatorService_AddInt_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddIntRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CalculatorServiceServer).AddInt(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/buf.v1.CalculatorService/AddInt", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CalculatorServiceServer).AddInt(ctx, req.(*AddIntRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// CalculatorService_ServiceDesc is the grpc.ServiceDesc for CalculatorService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var CalculatorService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "buf.v1.CalculatorService", + HandlerType: (*CalculatorServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AddInt", + Handler: _CalculatorService_AddInt_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "buf/v1/calculator.proto", +} diff --git a/example/grpc-service/buf/v1/hello.pb.go b/example/grpc-service/buf/v1/hello.pb.go new file mode 100644 index 0000000..f478c43 --- /dev/null +++ b/example/grpc-service/buf/v1/hello.pb.go @@ -0,0 +1,231 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.26.0 +// protoc v3.15.2 +// source: buf/v1/hello.proto + +package v1 + +import ( + reflect "reflect" + sync "sync" + + _ "google.golang.org/genproto/googleapis/api/annotations" + _ "google.golang.org/genproto/googleapis/api/httpbody" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + _ "google.golang.org/protobuf/types/known/anypb" + _ "google.golang.org/protobuf/types/known/emptypb" + _ "google.golang.org/protobuf/types/known/structpb" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// The request message containing the user's name. +type SayHelloRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *SayHelloRequest) Reset() { + *x = SayHelloRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_v1_hello_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SayHelloRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SayHelloRequest) ProtoMessage() {} + +func (x *SayHelloRequest) ProtoReflect() protoreflect.Message { + mi := &file_buf_v1_hello_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SayHelloRequest.ProtoReflect.Descriptor instead. +func (*SayHelloRequest) Descriptor() ([]byte, []int) { + return file_buf_v1_hello_proto_rawDescGZIP(), []int{0} +} + +func (x *SayHelloRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +// The response message containing the greetings +type SayHelloResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` +} + +func (x *SayHelloResponse) Reset() { + *x = SayHelloResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_v1_hello_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SayHelloResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SayHelloResponse) ProtoMessage() {} + +func (x *SayHelloResponse) ProtoReflect() protoreflect.Message { + mi := &file_buf_v1_hello_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SayHelloResponse.ProtoReflect.Descriptor instead. +func (*SayHelloResponse) Descriptor() ([]byte, []int) { + return file_buf_v1_hello_proto_rawDescGZIP(), []int{1} +} + +func (x *SayHelloResponse) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +var File_buf_v1_hello_proto protoreflect.FileDescriptor + +var file_buf_v1_hello_proto_rawDesc = []byte{ + 0x0a, 0x12, 0x62, 0x75, 0x66, 0x2f, 0x76, 0x31, 0x2f, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x62, 0x75, 0x66, 0x2e, 0x76, 0x31, 0x1a, 0x19, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x68, 0x74, 0x74, 0x70, 0x62, 0x6f, 0x64, + 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, + 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x73, + 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x25, 0x0a, 0x0f, 0x53, + 0x61, 0x79, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x22, 0x2c, 0x0a, 0x10, 0x53, 0x61, 0x79, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x32, 0x68, 0x0a, 0x0e, 0x47, 0x72, 0x65, 0x65, 0x74, 0x65, 0x72, 0x53, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x12, 0x56, 0x0a, 0x08, 0x53, 0x61, 0x79, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x12, 0x17, + 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x61, 0x79, 0x48, 0x65, 0x6c, 0x6c, 0x6f, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x76, 0x31, + 0x2e, 0x53, 0x61, 0x79, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x17, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x11, 0x22, 0x0c, 0x2f, 0x72, 0x70, 0x63, 0x2f, + 0x76, 0x31, 0x2f, 0x65, 0x63, 0x68, 0x6f, 0x3a, 0x01, 0x2a, 0x42, 0x11, 0x5a, 0x0f, 0x67, 0x72, + 0x70, 0x63, 0x2d, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x76, 0x31, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_buf_v1_hello_proto_rawDescOnce sync.Once + file_buf_v1_hello_proto_rawDescData = file_buf_v1_hello_proto_rawDesc +) + +func file_buf_v1_hello_proto_rawDescGZIP() []byte { + file_buf_v1_hello_proto_rawDescOnce.Do(func() { + file_buf_v1_hello_proto_rawDescData = protoimpl.X.CompressGZIP(file_buf_v1_hello_proto_rawDescData) + }) + return file_buf_v1_hello_proto_rawDescData +} + +var file_buf_v1_hello_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_buf_v1_hello_proto_goTypes = []interface{}{ + (*SayHelloRequest)(nil), // 0: buf.v1.SayHelloRequest + (*SayHelloResponse)(nil), // 1: buf.v1.SayHelloResponse +} +var file_buf_v1_hello_proto_depIdxs = []int32{ + 0, // 0: buf.v1.GreeterService.SayHello:input_type -> buf.v1.SayHelloRequest + 1, // 1: buf.v1.GreeterService.SayHello:output_type -> buf.v1.SayHelloResponse + 1, // [1:2] is the sub-list for method output_type + 0, // [0:1] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_buf_v1_hello_proto_init() } +func file_buf_v1_hello_proto_init() { + if File_buf_v1_hello_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_buf_v1_hello_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SayHelloRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_v1_hello_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SayHelloResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_buf_v1_hello_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_buf_v1_hello_proto_goTypes, + DependencyIndexes: file_buf_v1_hello_proto_depIdxs, + MessageInfos: file_buf_v1_hello_proto_msgTypes, + }.Build() + File_buf_v1_hello_proto = out.File + file_buf_v1_hello_proto_rawDesc = nil + file_buf_v1_hello_proto_goTypes = nil + file_buf_v1_hello_proto_depIdxs = nil +} diff --git a/example/grpc-service/buf/v1/hello.pb.gw.go b/example/grpc-service/buf/v1/hello.pb.gw.go new file mode 100644 index 0000000..350710b --- /dev/null +++ b/example/grpc-service/buf/v1/hello.pb.gw.go @@ -0,0 +1,167 @@ +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: buf/v1/hello.proto + +/* +Package v1 is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package v1 + +import ( + "context" + "io" + "net/http" + + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" +) + +// Suppress "imported and not used" errors +var _ codes.Code +var _ io.Reader +var _ status.Status +var _ = runtime.String +var _ = utilities.NewDoubleArray +var _ = metadata.Join + +func request_GreeterService_SayHello_0(ctx context.Context, marshaler runtime.Marshaler, client GreeterServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq SayHelloRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.SayHello(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_GreeterService_SayHello_0(ctx context.Context, marshaler runtime.Marshaler, server GreeterServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq SayHelloRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.SayHello(ctx, &protoReq) + return msg, metadata, err + +} + +// RegisterGreeterServiceHandlerServer registers the http handlers for service GreeterService to "mux". +// UnaryRPC :call GreeterServiceServer directly. +// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. +// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterGreeterServiceHandlerFromEndpoint instead. +func RegisterGreeterServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server GreeterServiceServer) error { + + mux.Handle("POST", pattern_GreeterService_SayHello_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/buf.v1.GreeterService/SayHello") + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_GreeterService_SayHello_0(rctx, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_GreeterService_SayHello_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +// RegisterGreeterServiceHandlerFromEndpoint is same as RegisterGreeterServiceHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterGreeterServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterGreeterServiceHandler(ctx, mux, conn) +} + +// RegisterGreeterServiceHandler registers the http handlers for service GreeterService to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterGreeterServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterGreeterServiceHandlerClient(ctx, mux, NewGreeterServiceClient(conn)) +} + +// RegisterGreeterServiceHandlerClient registers the http handlers for service GreeterService +// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "GreeterServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "GreeterServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "GreeterServiceClient" to call the correct interceptors. +func RegisterGreeterServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client GreeterServiceClient) error { + + mux.Handle("POST", pattern_GreeterService_SayHello_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req, "/buf.v1.GreeterService/SayHello") + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_GreeterService_SayHello_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_GreeterService_SayHello_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_GreeterService_SayHello_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"rpc", "v1", "echo"}, "")) +) + +var ( + forward_GreeterService_SayHello_0 = runtime.ForwardResponseMessage +) diff --git a/example/grpc-service/buf/v1/hello.pb.validate.go b/example/grpc-service/buf/v1/hello.pb.validate.go new file mode 100644 index 0000000..8705859 --- /dev/null +++ b/example/grpc-service/buf/v1/hello.pb.validate.go @@ -0,0 +1,168 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: buf/v1/hello.proto + +package v1 + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "strings" + "time" + "unicode/utf8" + + "google.golang.org/protobuf/types/known/anypb" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = anypb.Any{} +) + +// Validate checks the field values on SayHelloRequest with the rules defined +// in the proto definition for this message. If any rules are violated, an +// error is returned. +func (m *SayHelloRequest) Validate() error { + if m == nil { + return nil + } + + // no validation rules for Name + + return nil +} + +// SayHelloRequestValidationError is the validation error returned by +// SayHelloRequest.Validate if the designated constraints aren't met. +type SayHelloRequestValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e SayHelloRequestValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e SayHelloRequestValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e SayHelloRequestValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e SayHelloRequestValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e SayHelloRequestValidationError) ErrorName() string { return "SayHelloRequestValidationError" } + +// Error satisfies the builtin error interface +func (e SayHelloRequestValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sSayHelloRequest.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = SayHelloRequestValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = SayHelloRequestValidationError{} + +// Validate checks the field values on SayHelloResponse with the rules defined +// in the proto definition for this message. If any rules are violated, an +// error is returned. +func (m *SayHelloResponse) Validate() error { + if m == nil { + return nil + } + + // no validation rules for Message + + return nil +} + +// SayHelloResponseValidationError is the validation error returned by +// SayHelloResponse.Validate if the designated constraints aren't met. +type SayHelloResponseValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e SayHelloResponseValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e SayHelloResponseValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e SayHelloResponseValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e SayHelloResponseValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e SayHelloResponseValidationError) ErrorName() string { return "SayHelloResponseValidationError" } + +// Error satisfies the builtin error interface +func (e SayHelloResponseValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sSayHelloResponse.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = SayHelloResponseValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = SayHelloResponseValidationError{} diff --git a/example/grpc-service/buf/v1/hello.proto b/example/grpc-service/buf/v1/hello.proto new file mode 100644 index 0000000..45e5822 --- /dev/null +++ b/example/grpc-service/buf/v1/hello.proto @@ -0,0 +1,31 @@ +syntax = "proto3"; + +package buf.v1; +option go_package = "grpc-service/v1"; + +import "google/api/httpbody.proto"; +import "google/api/annotations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; + +// The greeting service definition. +service GreeterService { + // Sends a greeting + rpc SayHello (SayHelloRequest) returns (SayHelloResponse) { + option(google.api.http) = { + post: "/rpc/v1/echo" + body: "*" + }; + } +} + +// The request message containing the user's name. +message SayHelloRequest { + string name = 1; +} + +// The response message containing the greetings +message SayHelloResponse { + string message = 1; +} diff --git a/example/grpc-service/buf/v1/hello.swagger.json b/example/grpc-service/buf/v1/hello.swagger.json new file mode 100644 index 0000000..f23ae0e --- /dev/null +++ b/example/grpc-service/buf/v1/hello.swagger.json @@ -0,0 +1,106 @@ +{ + "swagger": "2.0", + "info": { + "title": "buf/v1/hello.proto", + "version": "version not set" + }, + "tags": [ + { + "name": "GreeterService" + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/rpc/v1/echo": { + "post": { + "summary": "Sends a greeting", + "operationId": "GreeterService_SayHello", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v1SayHelloResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/rpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v1SayHelloRequest" + } + } + ], + "tags": [ + "GreeterService" + ] + } + } + }, + "definitions": { + "protobufAny": { + "type": "object", + "properties": { + "typeUrl": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "rpcStatus": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/protobufAny" + } + } + } + }, + "v1SayHelloRequest": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + }, + "description": "The request message containing the user's name." + }, + "v1SayHelloResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "title": "The response message containing the greetings" + } + } +} diff --git a/example/grpc-service/buf/v1/hello_grpc.pb.go b/example/grpc-service/buf/v1/hello_grpc.pb.go new file mode 100644 index 0000000..8a27c98 --- /dev/null +++ b/example/grpc-service/buf/v1/hello_grpc.pb.go @@ -0,0 +1,102 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. + +package v1 + +import ( + context "context" + + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// GreeterServiceClient is the client API for GreeterService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type GreeterServiceClient interface { + // Sends a greeting + SayHello(ctx context.Context, in *SayHelloRequest, opts ...grpc.CallOption) (*SayHelloResponse, error) +} + +type greeterServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewGreeterServiceClient(cc grpc.ClientConnInterface) GreeterServiceClient { + return &greeterServiceClient{cc} +} + +func (c *greeterServiceClient) SayHello(ctx context.Context, in *SayHelloRequest, opts ...grpc.CallOption) (*SayHelloResponse, error) { + out := new(SayHelloResponse) + err := c.cc.Invoke(ctx, "/buf.v1.GreeterService/SayHello", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GreeterServiceServer is the server API for GreeterService service. +// All implementations should embed UnimplementedGreeterServiceServer +// for forward compatibility +type GreeterServiceServer interface { + // Sends a greeting + SayHello(context.Context, *SayHelloRequest) (*SayHelloResponse, error) +} + +// UnimplementedGreeterServiceServer should be embedded to have forward compatible implementations. +type UnimplementedGreeterServiceServer struct { +} + +func (UnimplementedGreeterServiceServer) SayHello(context.Context, *SayHelloRequest) (*SayHelloResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SayHello not implemented") +} + +// UnsafeGreeterServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to GreeterServiceServer will +// result in compilation errors. +type UnsafeGreeterServiceServer interface { + mustEmbedUnimplementedGreeterServiceServer() +} + +func RegisterGreeterServiceServer(s grpc.ServiceRegistrar, srv GreeterServiceServer) { + s.RegisterService(&GreeterService_ServiceDesc, srv) +} + +func _GreeterService_SayHello_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SayHelloRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GreeterServiceServer).SayHello(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/buf.v1.GreeterService/SayHello", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GreeterServiceServer).SayHello(ctx, req.(*SayHelloRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// GreeterService_ServiceDesc is the grpc.ServiceDesc for GreeterService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var GreeterService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "buf.v1.GreeterService", + HandlerType: (*GreeterServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SayHello", + Handler: _GreeterService_SayHello_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "buf/v1/hello.proto", +} diff --git a/example/grpc-service/buf/v1/swagger.go b/example/grpc-service/buf/v1/swagger.go new file mode 100644 index 0000000..e71b97a --- /dev/null +++ b/example/grpc-service/buf/v1/swagger.go @@ -0,0 +1,8 @@ +package v1 + +import "embed" + +var ( + //go:embed *.swagger.json + SwaggerFS embed.FS +) diff --git a/example/grpc-service/buf/v1/tables.md b/example/grpc-service/buf/v1/tables.md new file mode 100644 index 0000000..3430b01 --- /dev/null +++ b/example/grpc-service/buf/v1/tables.md @@ -0,0 +1,11 @@ +## {{.RequestType.Name}} + +| Field ID | Name | Type | Description | +| ----------- | --------- | --------------------------------------------------------- | ---------------------------- | {{range .RequestType.Fields}} +| {{.Number}} | {{.Name}} | {{if eq .Label.String "LABEL_REPEATED"}}[]{{end}}{{.Type}} | {{fieldcomments .Message .}} | {{end}} + +## {{.ResponseType.Name}} + +| Field ID | Name | Type | Description | +| ----------- | --------- | ---------------------------------------------------------- | ---------------------------- | {{range .ResponseType.Fields}} +| {{.Number}} | {{.Name}} | {{if eq .Label.String "LABEL_REPEATED"}}[]{{end}}{{.Type}} | {{fieldcomments .Message .}} | {{end}} diff --git a/example/grpc-service/config-example.yaml b/example/grpc-service/config-example.yaml new file mode 100644 index 0000000..02fee0d --- /dev/null +++ b/example/grpc-service/config-example.yaml @@ -0,0 +1,10 @@ +http: + address: 0.0.0.0:5001 + +jaeger: + agent_host: "192.168.4.66" + agent_port: 6831 + +log: + level: debug + format: console diff --git a/example/grpc-service/go.mod b/example/grpc-service/go.mod new file mode 100644 index 0000000..80fc138 --- /dev/null +++ b/example/grpc-service/go.mod @@ -0,0 +1,17 @@ +module grpc-service + +go 1.16 + +require ( + github.com/envoyproxy/protoc-gen-validate v0.1.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.5.0 + github.com/labstack/echo/v4 v4.3.0 + github.com/stretchr/testify v1.7.0 + github.com/xinpianchang/xservice v1.0.0 + go.uber.org/zap v1.17.0 + google.golang.org/genproto v0.0.0-20210617175327-b9e0b3197ced + google.golang.org/grpc v1.38.0 + google.golang.org/protobuf v1.26.0 +) + +replace github.com/xinpianchang/xservice => ../../ diff --git a/example/grpc-service/server/main.go b/example/grpc-service/server/main.go new file mode 100644 index 0000000..05bd4d0 --- /dev/null +++ b/example/grpc-service/server/main.go @@ -0,0 +1,58 @@ +package main + +import ( + "context" + "fmt" + "log" + "net/http" + + "github.com/labstack/echo/v4" + "go.uber.org/zap" + + "github.com/xinpianchang/xservice/core/xservice" + "github.com/xinpianchang/xservice/pkg/echox" + "github.com/xinpianchang/xservice/pkg/swaggerui" + + pb "grpc-service/buf/v1" +) + +func main() { + srv := xservice.New( + xservice.Name("grpc-service"), + xservice.Version("v1.0.0"), + xservice.Description("example grpc service with enable grpc gateway"), + ) + + server := srv.Server() + + server.Echo().Use(echox.Dump()) + server.Echo().Group("/swagger/*", swaggerui.Serve("/swagger/", pb.SwaggerFS)) + + server.GrpcRegister(&pb.GreeterService_ServiceDesc, &GreeterServer{}, pb.RegisterGreeterServiceHandler) + server.GrpcRegister(&pb.CalculatorService_ServiceDesc, &CalculatorServer{}, pb.RegisterCalculatorServiceHandler) + + server.Echo().GET("/", func(c echo.Context) error { + return c.String(http.StatusOK, "ok") + }) + + if err := server.Serve(); err != nil { + log.Fatal("serve", zap.Error(err)) + } + + // curl -v -X POST -k http://127.0.0.1:5001/rpc/v1/echo -d '{"name": "world"}' + // curl -v -X POST -k http://127.0.0.1:5001/rpc/v1/calculator -d '{"a": 1, "b": 2}' +} + +// implementations + +type GreeterServer struct{} + +func (t *GreeterServer) SayHello(ctx context.Context, request *pb.SayHelloRequest) (*pb.SayHelloResponse, error) { + return &pb.SayHelloResponse{Message: fmt.Sprint("hello ", request.Name)}, nil +} + +type CalculatorServer struct{} + +func (t *CalculatorServer) AddInt(ctx context.Context, request *pb.AddIntRequest) (*pb.AddIntResponse, error) { + return &pb.AddIntResponse{Result: request.A + request.B}, nil +} diff --git a/example/grpc-service/tests/discover_test.go b/example/grpc-service/tests/discover_test.go new file mode 100644 index 0000000..35facbb --- /dev/null +++ b/example/grpc-service/tests/discover_test.go @@ -0,0 +1,30 @@ +package tests + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/xinpianchang/xservice/core/xservice" + + pb "grpc-service/buf/v1" +) + +func Test_service_discover(t *testing.T) { + srv := xservice.New( + xservice.Name("grpc-service"), + ) + + ctx := context.Background() + + conn, err := srv.Client().GrpcClientConn(ctx, "grpc-service", &pb.GreeterService_ServiceDesc) + require.NoError(t, err) + defer conn.Close() + require.NotNil(t, conn) + + client := pb.NewCalculatorServiceClient(conn) + response, err := client.AddInt(ctx, &pb.AddIntRequest{A: 1, B: 2}) + require.NoError(t, err) + require.Equal(t, 3, response.Result) +} diff --git a/example/rest-service/config-example.yaml b/example/rest-service/config-example.yaml new file mode 100644 index 0000000..ade5c09 --- /dev/null +++ b/example/rest-service/config-example.yaml @@ -0,0 +1,10 @@ +http: + address: 0.0.0.0:5000 + +jaeger: + agent_host: "192.168.4.66" + agent_port: 6831 + +log: + level: debug + format: console diff --git a/example/rest-service/go.mod b/example/rest-service/go.mod new file mode 100644 index 0000000..e010182 --- /dev/null +++ b/example/rest-service/go.mod @@ -0,0 +1,10 @@ +module rest-service + +go 1.16 + +require ( + github.com/labstack/echo/v4 v4.3.0 + github.com/xinpianchang/xservice v1.0.0 +) + +replace github.com/xinpianchang/xservice => ../../ diff --git a/example/rest-service/server/main.go b/example/rest-service/server/main.go new file mode 100644 index 0000000..57b3a4e --- /dev/null +++ b/example/rest-service/server/main.go @@ -0,0 +1,27 @@ +package main + +import ( + "net/http" + + "github.com/labstack/echo/v4" + + "github.com/xinpianchang/xservice/core/xservice" +) + +func main() { + srv := xservice.New( + xservice.Name("rest-service"), + xservice.Version("v1.0.0"), + xservice.Description("example RESTFul service"), + ) + + routes(srv.Server().Echo()) + + srv.Server().Serve() +} + +func routes(e *echo.Echo) { + e.GET("/", func(c echo.Context) error { + return c.String(http.StatusOK, "ok") + }) +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..85df84e --- /dev/null +++ b/go.mod @@ -0,0 +1,44 @@ +module github.com/xinpianchang/xservice + +go 1.16 + +require ( + github.com/Shopify/sarama v1.29.1 + github.com/bsm/redislock v0.7.1 + github.com/cloudflare/tableflip v1.2.2 + github.com/dave/jennifer v1.4.1 + github.com/dgrijalva/jwt-go v3.2.0+incompatible + github.com/getsentry/sentry-go v0.11.0 + github.com/go-playground/universal-translator v0.17.0 // indirect + github.com/go-playground/validator v9.31.0+incompatible + github.com/go-redis/redis/v8 v8.10.0 + github.com/google/gops v0.3.18 + github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 + github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.5.0 + github.com/jinzhu/copier v0.3.2 + github.com/labstack/echo-contrib v0.11.0 + github.com/labstack/echo/v4 v4.3.0 + github.com/labstack/gommon v0.3.0 + github.com/leodido/go-urn v1.2.1 // indirect + github.com/opentracing/opentracing-go v1.2.0 + github.com/pkg/errors v0.9.1 + github.com/prometheus/client_golang v1.11.0 + github.com/robfig/cron/v3 v3.0.1 + github.com/soheilhy/cmux v0.1.5 + github.com/speps/go-hashids/v2 v2.0.1 + github.com/spf13/cobra v1.1.3 + github.com/spf13/viper v1.8.1 + github.com/stretchr/testify v1.7.0 + github.com/uber/jaeger-client-go v2.29.1+incompatible + github.com/uber/jaeger-lib v2.4.1+incompatible + go.etcd.io/etcd/client/v3 v3.5.0 + go.uber.org/zap v1.17.0 + google.golang.org/grpc v1.38.0 + gopkg.in/natefinch/lumberjack.v2 v2.0.0 + gopkg.in/yaml.v2 v2.4.0 + gorm.io/datatypes v1.0.1 + gorm.io/driver/mysql v1.1.1 + gorm.io/gorm v1.21.11 + gorm.io/plugin/opentracing v0.0.0-20210506132430-24a9caea7709 +) diff --git a/note.md b/note.md new file mode 100644 index 0000000..b5cb20a --- /dev/null +++ b/note.md @@ -0,0 +1,19 @@ +# xservice + +服务框架 + +- 自动生成框架代码 +- 自动生成 model & 基本 dao +- 集成 opentracing & prometheus +- 支持 gRPC 和 RESTFul +- 支持 etcd 配置 +- 支持 sentry + +TODO + +- 服务发现 + +``` +一个端口搞定所有服务? +https://github.com/soheilhy/cmux +``` diff --git a/pkg/common/nocopy.go b/pkg/common/nocopy.go new file mode 100644 index 0000000..31f01b3 --- /dev/null +++ b/pkg/common/nocopy.go @@ -0,0 +1,12 @@ +package common + +// NoCopy may be embedded into structs which must not be copied +// after the first use. +// +// refer https://github.com/golang/go/issues/8005#issuecomment-190753527 +// for details. +type NoCopy struct{} + +// Lock is a no-op used by -copylocks checker from `go vet`. +func (*NoCopy) Lock() {} +func (*NoCopy) Unlock() {} diff --git a/pkg/common/value.go b/pkg/common/value.go new file mode 100644 index 0000000..ffbbc96 --- /dev/null +++ b/pkg/common/value.go @@ -0,0 +1,21 @@ +package common + +import "reflect" + +func IsEmptyValue(v reflect.Value) bool { + switch v.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + } + return false +} diff --git a/pkg/config/config.go b/pkg/config/config.go new file mode 100644 index 0000000..a96f34e --- /dev/null +++ b/pkg/config/config.go @@ -0,0 +1,65 @@ +package config + +import ( + "fmt" + "os" + "path/filepath" + "runtime" + + "github.com/spf13/viper" + + "github.com/xinpianchang/xservice/core" +) + +func Load() (*viper.Viper, error) { + v := viper.New() + err := load(v) + return v, err +} + +func LoadGlobal() error { + return load(viper.GetViper()) +} + +// Load auto load from local yaml config or etcd if local file not exists +func load(v *viper.Viper) error { + v.SetConfigName("config") + v.SetConfigType("yaml") + + executable, _ := os.Executable() + v.AddConfigPath(filepath.Dir(executable)) + + v.AddConfigPath(".") + v.AddConfigPath("../") + v.AddConfigPath("../../") + + if _, file, _, ok := runtime.Caller(0); ok { + v.AddConfigPath(filepath.Join(filepath.Dir(file), "../../")) + } + + if err := v.ReadInConfig(); err != nil { + if _, ok := err.(viper.ConfigFileNotFoundError); ok { + endpoint := os.Getenv(core.EnvEtcd) + if endpoint != "" { + name := os.Getenv(core.EnvServiceName) + if name == "" { + name = core.DefaultServiceName + } + viper.RemoteConfig = &remoteConfig{} + v.AddRemoteProvider("etcd", endpoint, fmt.Sprint(core.ServiceConfigKeyPrefix, "/", name, ".yaml")) + if err = v.ReadRemoteConfig(); err != nil { + return err + } + v.SetDefault("dir", filepath.Dir(executable)) + _ = v.WatchRemoteConfigOnChannel() + } + } else { + return err + } + } else { + v.SetDefault("dir", filepath.Dir(v.ConfigFileUsed())) + v.WatchConfig() + } + + return nil +} diff --git a/pkg/config/remote.go b/pkg/config/remote.go new file mode 100644 index 0000000..6c7ca9f --- /dev/null +++ b/pkg/config/remote.go @@ -0,0 +1,97 @@ +package config + +import ( + "bytes" + "context" + "io" + "os" + "strings" + "time" + + "github.com/spf13/viper" + clientv3 "go.etcd.io/etcd/client/v3" + "go.uber.org/zap" + + "github.com/xinpianchang/xservice/core" + "github.com/xinpianchang/xservice/pkg/log" +) + +type remoteConfig struct { + viper.RemoteProvider +} + +func (t *remoteConfig) Get(rp viper.RemoteProvider) (io.Reader, error) { + t.RemoteProvider = rp + return t.get() +} + +func (t *remoteConfig) Watch(rp viper.RemoteProvider) (io.Reader, error) { + t.RemoteProvider = rp + return t.get() +} + +func (t *remoteConfig) WatchChannel(rp viper.RemoteProvider) (<-chan *viper.RemoteResponse, chan bool) { + t.RemoteProvider = rp + + rr := make(chan *viper.RemoteResponse) + stop := make(chan bool) + + go func() { + client, err := t.client() + if err != nil { + log.Fatal("watch config channel", zap.Error(err)) + return + } + + defer client.Close() + + for { + ch := client.Watch(context.Background(), t.Path()) + + select { + case <-stop: + return + case res := <-ch: + for _, event := range res.Events { + rr <- &viper.RemoteResponse{ + Value: event.Kv.Value, + } + } + } + } + }() + + return rr, stop +} + +func (t *remoteConfig) client() (*clientv3.Client, error) { + cfg := clientv3.Config{ + Endpoints: []string{t.Endpoint()}, + DialTimeout: time.Second * 5, + } + if username := os.Getenv(core.EnvEtcdUser); username != "" { + cfg.Username = username + } + if password := os.Getenv(core.EnvEtcdPassword); password != "" { + cfg.Password = password + } + client, err := clientv3.New(cfg) + return client, err +} + +func (t *remoteConfig) get() (io.Reader, error) { + client, err := t.client() + if err != nil { + return nil, err + } + defer client.Close() + + resp, err := client.Get(context.Background(), t.Path()) + if err != nil { + return nil, err + } + if len(resp.Kvs) > 0 { + return bytes.NewReader(resp.Kvs[0].Value), nil + } + return strings.NewReader(""), nil +} diff --git a/pkg/cronx/cron.go b/pkg/cronx/cron.go new file mode 100644 index 0000000..d090fec --- /dev/null +++ b/pkg/cronx/cron.go @@ -0,0 +1,74 @@ +package cronx + +import ( + "os" + "sync" + "time" + + "github.com/robfig/cron/v3" + "go.uber.org/zap" + + "github.com/xinpianchang/xservice/pkg/log" + "github.com/xinpianchang/xservice/pkg/signalx" +) + +const ( + SpecManual = "@manual" + SpecMonthly = "@monthly" + SpecWeekly = "@weekly" + SpecHourly = "@hourly" + SpecDaily = "@daily" + SpecMidnight = "@midnight" + SpecEveryMinutes = "@every 1m" + SpecEveryTenMinutes = "@every 10m" + SpecEveryFifteenMinutes = "@every 15m" + SpecEveryThirtyMinutes = "@every 30m" + SpecEverySeconds = "@every 1s" + SpecEveryTenSeconds = "@every 10s" +) + +var ( + c = cron.New(cron.WithSeconds()) + once sync.Once +) + +func start() { + once.Do(func() { + signalx.AddShutdownHook(func(os.Signal) { + log.Info("shutdown cron") + c.Stop() + }) + go c.Start() + }) +} + +func Add(name string, spec string, fn func()) { + var ( + id cron.EntryID + err error + ) + + getEntry := func() cron.Entry { + return c.Entry(id) + } + + id, err = c.AddFunc(spec, func() { + l := log.Named(name) + start := time.Now() + defer func() { + if x := recover(); x != nil { + l.Error("panic", zap.Any("err", x)) + return + } + entry := getEntry() + l.Info("done", zap.Duration("escape", time.Since(start)), zap.Time("next", entry.Next)) + }() + fn() + }) + + if err != nil { + log.Error("cron add", zap.Error(err)) + } + + start() +} diff --git a/pkg/echox/cache.go b/pkg/echox/cache.go new file mode 100644 index 0000000..2d503a0 --- /dev/null +++ b/pkg/echox/cache.go @@ -0,0 +1,24 @@ +package echox + +import ( + "fmt" + "time" + + "github.com/labstack/echo/v4" +) + +func Cache(age time.Duration) echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + c.Response().Header().Set("Cache-Control", fmt.Sprintf("max-age=%d", int64(age.Seconds()))) + return next(c) + } + } +} + +func NoCache(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + c.Response().Header().Set("Cache-Control", "no-store, no-cache, must-revalidate, proxy-revalidate, max-age=0") + return next(c) + } +} diff --git a/pkg/echox/dump.go b/pkg/echox/dump.go new file mode 100644 index 0000000..c8434f6 --- /dev/null +++ b/pkg/echox/dump.go @@ -0,0 +1,68 @@ +package echox + +import ( + "bytes" + "io" + "net/http" + "net/http/httputil" + "time" + + "github.com/labstack/echo/v4" + "go.uber.org/zap" + + "github.com/xinpianchang/xservice/pkg/log" +) + +type bodyDumpResponseWriter struct { + io.Writer + http.ResponseWriter +} + +func (w *bodyDumpResponseWriter) Write(b []byte) (int, error) { + return w.Writer.Write(b) +} + +func Dump(filename ...string) echo.MiddlewareFunc { + var logger log.Logger + if len(filename) > 0 { + logger, _ = log.NewLogger(filename[0]) + } + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + var l log.Logger + if logger == nil { + l = log.For(c.Request().Context()) + } else { + l = logger.For(c.Request().Context()) + } + + req, _ := httputil.DumpRequest(c.Request(), true) + start := time.Now() + l.Debug( + c.Response().Header().Get(echo.HeaderXRequestID), + zap.Any("url", c.Request().URL), + zap.String("req", string(req)), + ) + + resBody := new(bytes.Buffer) + mw := io.MultiWriter(c.Response().Writer, resBody) + writer := &bodyDumpResponseWriter{Writer: mw, ResponseWriter: c.Response().Writer} + c.Response().Writer = writer + + err := next(c) + + if err != nil { + return err + } + + l.For(c.Request().Context()).Debug( + c.Response().Header().Get(echo.HeaderXRequestID), + zap.Int64("cost", time.Since(start).Milliseconds()), + zap.Int("status", c.Response().Status), + zap.Any("header", c.Response().Header()), + zap.String("rsp", resBody.String()), + ) + return nil + } + } +} diff --git a/pkg/echox/error_handler.go b/pkg/echox/error_handler.go new file mode 100644 index 0000000..c8f5b56 --- /dev/null +++ b/pkg/echox/error_handler.go @@ -0,0 +1,84 @@ +package echox + +import ( + "errors" + "fmt" + "net/http" + "reflect" + + sentryecho "github.com/getsentry/sentry-go/echo" + "github.com/go-playground/validator" + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" + "go.uber.org/zap" + "gorm.io/gorm" + + "github.com/xinpianchang/xservice/pkg/log" + "github.com/xinpianchang/xservice/pkg/responsex" + "github.com/xinpianchang/xservice/pkg/tracingx" +) + +func HTTPErrorHandler(err error, c echo.Context) { + if c.Response().Committed { + return + } + + traceId := tracingx.GetTraceID(c.Request().Context()) + + if he, ok := err.(*echo.HTTPError); ok { + switch err { + case middleware.ErrJWTMissing: + _ = c.JSON(http.StatusUnauthorized, echo.Map{"message": "login required"}) + default: + status := he.Code + message := fmt.Sprintf("%v", he.Message) + if he.Internal != nil { + message = fmt.Sprintf("%v, cause: %v", he.Message, he.Internal) + } + _ = responsex.R(c, responsex.New(status, message, nil).SetHttpStatus(status)) + } + return + } else if ve, ok := err.(*validator.ValidationErrors); ok { + _ = responsex.R(c, responsex.New(http.StatusBadRequest, ve.Error(), nil).SetHttpStatus(http.StatusOK)) + return + } else if errors.Is(err, gorm.ErrRecordNotFound) { + _ = responsex.R(c, responsex.New(http.StatusNotFound, err.Error(), nil).SetHttpStatus(http.StatusNotFound)) + return + } else if ve, ok := err.(*responsex.Error); ok { + httpStatus := http.StatusOK + if ve.HttpStatus != http.StatusOK && ve.HttpStatus != 0 { + httpStatus = ve.HttpStatus + } + if httpStatus >= 500 { + sentryecho.GetHubFromContext(c).CaptureException(err) + } + v := responsex.New(ve.Status, ve.Message, nil) + if ve.Internal != nil { + v = v.SetData(map[string]interface{}{ + "internalErr": fmt.Sprint(ve.Internal), + "requestId": c.Get(echo.HeaderXRequestID), + "traceId": traceId, + }) + } + v = v.SetHttpStatus(httpStatus) + _ = responsex.R(c, v) + return + } else { + _ = c.JSON(http.StatusInternalServerError, map[string]interface{}{ + "error": err.Error(), + "requestId": c.Get(echo.HeaderXRequestID), + "traceId": traceId, + }) + } + + log.For(c.Request().Context()).Warn( + c.Response().Header().Get(echo.HeaderXRequestID), + zap.Any("method", c.Request().Method), + zap.Int("status", c.Response().Status), + zap.Any("url", c.Request().URL), + zap.Any("type", reflect.TypeOf(err)), + zap.String("error", err.Error()), + ) + + sentryecho.GetHubFromContext(c).CaptureException(err) +} diff --git a/pkg/echox/jwt.go b/pkg/echox/jwt.go new file mode 100644 index 0000000..56f1c74 --- /dev/null +++ b/pkg/echox/jwt.go @@ -0,0 +1,15 @@ +package echox + +import ( + "github.com/dgrijalva/jwt-go" + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" +) + +func JWT(key []byte, method *jwt.SigningMethodHMAC, skipper func(echo.Context) bool) echo.MiddlewareFunc { + return middleware.JWTWithConfig(middleware.JWTConfig{ + SigningKey: key, + SigningMethod: method.Name, + Skipper: skipper, + }) +} diff --git a/pkg/echox/noindex.go b/pkg/echox/noindex.go new file mode 100644 index 0000000..bc7ebdc --- /dev/null +++ b/pkg/echox/noindex.go @@ -0,0 +1,12 @@ +package echox + +import ( + "github.com/labstack/echo/v4" +) + +func NoIndex(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + c.Response().Header().Set("X-Robots-Tag", "noindex, nofollow, noarchive, nosnippet") + return next(c) + } +} diff --git a/pkg/echox/timestamp.go b/pkg/echox/timestamp.go new file mode 100644 index 0000000..0542ad0 --- /dev/null +++ b/pkg/echox/timestamp.go @@ -0,0 +1,113 @@ +package echox + +import ( + "database/sql/driver" + "fmt" + "reflect" + "regexp" + "strconv" + "strings" + "time" +) + +type Timestamp time.Time + +// UnmarshalParam echo api @see https://echo.labstack.com/guide/request +func (t *Timestamp) UnmarshalParam(src string) error { + if src != "" { + m, err := strconv.ParseInt(src, 10, 64) + if err != nil { + return err + } + + ts := time.Unix(0, m*int64(time.Millisecond)).Local() + *t = Timestamp(ts) + } + return nil +} + +// MarshalJSON echo api json response +func (t *Timestamp) MarshalJSON() ([]byte, error) { + if t != nil { + ts := time.Time(*t) + return []byte(fmt.Sprintf(`%d`, ts.UnixNano()/int64(time.Millisecond))), nil + } + return nil, nil +} + +func (t *Timestamp) UnmarshalJSON(p []byte) error { + data := string(p) + if data == "null" { + return nil + } + + if p != nil { + i, err := strconv.ParseInt(strings.Replace(data, `"`, "", -1), 10, 64) + if err != nil { + return err + } + + *t = Timestamp(time.Unix(0, int64(time.Millisecond)*i)) + } + return nil +} + +// for sql log, print readable format +func (t Timestamp) String() string { + ts := time.Time(t) + return ts.Format("2006-01-02T15:04:05") +} + +// insert into database conversion +func (t Timestamp) Value() (driver.Value, error) { + return time.Time(t), nil +} + +// read from database conversion +func (t *Timestamp) Scan(src interface{}) error { + switch v := src.(type) { + case *Timestamp: + *t = *v + case Timestamp: + *t = v + case time.Time: + *t = Timestamp(v) + case *time.Time: + *t = Timestamp(*v) + case string: + v = strings.TrimSpace(v) + return t.parse(v) + case *string: + str := strings.TrimSpace(*v) + return t.parse(str) + case int, int32, int64, uint, uint32, uint64: + i := reflect.ValueOf(v).Int() + *t = Timestamp(time.Unix(0, int64(time.Millisecond)*int64(i))) + case *int, *int32, *int64, *uint, *uint32, *uint64: + i := reflect.ValueOf(v).Elem().Int() + *t = Timestamp(time.Unix(0, int64(time.Millisecond)*int64(i))) + } + return nil +} + +func (t *Timestamp) parse(v string) error { + switch { + case regexp.MustCompile(`^\d+$`).MatchString(v): + if i, err := strconv.ParseInt(v, 10, 64); err == nil { + *t = Timestamp(time.Unix(0, int64(time.Millisecond)*i)) + } + case regexp.MustCompile(`^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$`).MatchString(v): + if tt, err := time.Parse("2006-01-02 15:04:05", v); err == nil { + *t = Timestamp(tt) + } + case regexp.MustCompile(`^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}$`).MatchString(v): + if tt, err := time.Parse("2006-01-02T15:04:05", v); err == nil { + *t = Timestamp(tt) + } + case regexp.MustCompile(`^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[\+\-]\d{4}$`).MatchString(v): + if tt, err := time.Parse("2006-01-02T15:04:05-0700", v); err == nil { + *t = Timestamp(tt) + } + } + return nil +} diff --git a/pkg/echox/timestamp_test.go b/pkg/echox/timestamp_test.go new file mode 100644 index 0000000..1fd20af --- /dev/null +++ b/pkg/echox/timestamp_test.go @@ -0,0 +1,136 @@ +package echox + +import ( + "encoding/json" + "fmt" + "strings" + "testing" + "time" + + "github.com/jinzhu/copier" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_Timestamp_scan(t *testing.T) { + now := time.Now() + nowStr1 := now.Format("2006-01-02 15:04:05") + nowStr2 := now.Format("2006-01-02T15:04:05") + nowStr3 := now.Format("2006-01-02T15:04:05-0700") + nowStr4 := fmt.Sprint(now.UnixNano() / int64(time.Millisecond)) + + var target1 Timestamp + + tests := []struct { + name string + target Timestamp + src interface{} + equal interface{} + }{ + {"time", target1, &now, Timestamp(now)}, + {"time str1", target1, &nowStr1, Timestamp(now)}, + {"time str2", target1, &nowStr2, Timestamp(now)}, + {"time str3", target1, &nowStr3, Timestamp(now)}, + {"time str4", target1, &nowStr4, Timestamp(now)}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.target.Scan(tt.src) + assert.NoErrorf(t, err, "%v -> %v", tt.src, tt.target) + assert.Equal(t, fmt.Sprint(tt.equal), fmt.Sprint(tt.target), "%v -> %v", tt.src, tt.target) + }) + } +} + +func Test_Timestamp_copy(t *testing.T) { + now := time.Now() + + { + type ts struct { + Tp Timestamp + } + var a = &ts{Tp: Timestamp(now)} + b, err := json.Marshal(a) + require.NoError(t, err) + require.True(t, !strings.HasPrefix(string(b), `{"Tp":"`)) + + type ts2 struct { + Tp Timestamp + } + tp := &ts2{Tp: Timestamp(now)} + err = copier.Copy(a, tp) + require.NoError(t, err) + require.Equal(t, now, time.Time(a.Tp)) + } + + { + type ts struct { + Tp *Timestamp + } + + type ts2 struct { + Tp *Timestamp + } + + tp := Timestamp(now) + a := &ts{} + b := &ts2{&tp} + err := copier.Copy(a, b) + require.NoError(t, err) + // t.Logf("a:%v, b:%v", a, b) + require.Equal(t, now, time.Time(*a.Tp)) + } + + { + type ts struct { + Tp Timestamp + } + + type ts2 struct { + Tp time.Time + } + + a := &ts{} + b := &ts2{now} + err := copier.Copy(a, b) + require.NoError(t, err) + // t.Logf("a:%v, b:%v", a, b) + require.Equal(t, now, time.Time(a.Tp)) + } + + { + type ts struct { + Tp *Timestamp + } + + type ts2 struct { + Tp time.Time + } + + a := &ts{} + b := &ts2{now} + err := copier.Copy(a, b) + require.NoError(t, err) + // t.Logf("a:%v, b:%v", a, b) + require.Equal(t, now, time.Time(*a.Tp)) + } + + { + type ts struct { + Tp *Timestamp + } + + type ts2 struct { + Tp *time.Time + } + + a := &ts{} + b := &ts2{&now} + err := copier.Copy(a, b) + require.NoError(t, err) + // t.Logf("a:%v, b:%v", a, b) + require.Equal(t, now, time.Time(*a.Tp)) + } + +} diff --git a/pkg/echox/validator.go b/pkg/echox/validator.go new file mode 100644 index 0000000..20b867c --- /dev/null +++ b/pkg/echox/validator.go @@ -0,0 +1,20 @@ +package echox + +import ( + "github.com/go-playground/validator" + "github.com/labstack/echo/v4" +) + +// ConfigValidator enable echo use ext validation framework +// ref: https://github.com/go-playground/validator +func ConfigValidator(e *echo.Echo) { + e.Validator = &echoValidator{validator: validator.New()} +} + +type echoValidator struct { + validator *validator.Validate +} + +func (t *echoValidator) Validate(i interface{}) error { + return t.validator.Struct(i) +} diff --git a/pkg/echox/validator_test.go b/pkg/echox/validator_test.go new file mode 100644 index 0000000..6997e93 --- /dev/null +++ b/pkg/echox/validator_test.go @@ -0,0 +1,145 @@ +package echox + +import ( + "testing" + "time" + + "github.com/go-playground/validator" +) + +// test deep validate +func TestValidator_validate(t *testing.T) { + type Inner struct { + InnerName string `validate:"required"` + } + + type inner struct { + InnerName string `validate:"required"` + } + + tests := []struct { + name string + obj interface{} + wantErr bool + }{ + { + name: "simple required 1", + obj: &struct { + Name string `validate:"required"` + }{Name: "name"}, + wantErr: false, + }, + + { + name: "simple required 2", + obj: &struct { + Name string `validate:"required"` + }{}, + wantErr: true, + }, + + { + name: "Inner required 1", + obj: &struct { + Name string `validate:"required"` + Inner + }{"name", Inner{InnerName: "Inner"}}, + wantErr: false, + }, + + { + name: "Inner required 2", + obj: &struct { + Name string `validate:"required"` + Inner + }{"", Inner{InnerName: "Inner"}}, + wantErr: true, + }, + + { + name: "Inner required 3", + obj: &struct { + Inner + }{}, + wantErr: true, + }, + + { + name: "Inner required 4", + obj: &struct { + Inner Inner + }{}, + wantErr: true, + }, + + { + name: "Inner required 5", + obj: &struct { + Inner + }{Inner{InnerName: "ok"}}, + wantErr: false, + }, + + { + name: "Inner required 6", + obj: &struct { + Inner Inner + }{Inner: Inner{InnerName: "ok"}}, + wantErr: false, + }, + + { + name: "unexported inner", + obj: &struct { + inner Inner + }{inner: Inner{InnerName: "ok"}}, + wantErr: false, + }, + + { + name: "unexported inner 2", + obj: &struct { + inner + }{inner: inner{InnerName: ""}}, + wantErr: true, + }, + + { + name: "point innter 1", + obj: &struct { + Inner *Inner `json:"inner" validate:"required"` + }{}, + wantErr: true, + }, + + { + name: "point innter 2", + obj: &struct { + Inner *Inner `json:"inner" validate:"required"` + }{Inner: &Inner{InnerName: "OK"}}, + wantErr: false, + }, + + { + name: "time struct 1", + obj: struct { + Date time.Time `validate:"required"` + }{Date: time.Now()}, + wantErr: false, + }, + + { + name: "time struct 2", + obj: struct { + Date time.Time `validate:"required"` + }{Date: time.Time{}}, + wantErr: true, + }, + } + for _, tt := range tests { + v := &echoValidator{validator: validator.New()} + if err := v.Validate(tt.obj); (err != nil) != tt.wantErr { + t.Errorf("%q. Validator.validate() error = %v, wantErr %v", tt.name, err, tt.wantErr) + } + } +} diff --git a/pkg/gormx/config.go b/pkg/gormx/config.go new file mode 100644 index 0000000..adcc6e8 --- /dev/null +++ b/pkg/gormx/config.go @@ -0,0 +1,112 @@ +package gormx + +import ( + "fmt" + "time" + + "github.com/spf13/viper" + "go.uber.org/zap" + _ "gorm.io/datatypes" + "gorm.io/driver/mysql" + "gorm.io/gorm" + "gorm.io/gorm/schema" + gormopentracing "gorm.io/plugin/opentracing" + + "github.com/xinpianchang/xservice/pkg/log" +) + +var ( + dbs map[string]*gorm.DB +) + +type DbConfig struct { + Name string `yaml:"name"` + Uri string `yaml:"uri"` + MaxConn int `yaml:"maxConn"` + MaxIdleConn int `yaml:"maxIdleConn"` + ConnMaxLifetimeInMillisecond int `yaml:"connMaxLifetimeInMillisecond"` + QueryFields bool `yaml:"queryFields"` + CreateBatchSize int `yaml:"createBatchSize"` +} + +type ConfigureFn func(DbConfig) *gorm.DB + +// Config config db, default use mysql +func Config(v *viper.Viper, configureFn ...ConfigureFn) { + var cfg []DbConfig + if err := v.UnmarshalKey("database", &cfg); err != nil { + log.Fatal("read database config", zap.Error(err)) + } + + dbs = make(map[string]*gorm.DB, len(cfg)) + for _, c := range cfg { + if c.MaxConn <= 0 { + c.MaxConn = 100 + } + + if c.MaxIdleConn <= 0 { + c.MaxIdleConn = 0 + } + + if c.ConnMaxLifetimeInMillisecond <= 0 { + c.ConnMaxLifetimeInMillisecond = int((time.Minute * 5).Milliseconds()) + } + + if c.CreateBatchSize <= 0 { + c.CreateBatchSize = 1000 + } + + var db *gorm.DB + if len(configureFn) > 0 { + db = configureFn[0](c) + } else { + db = MySQLDbConfig(c) + } + if db == nil { + continue + } + if err := db.Use(gormopentracing.New()); err != nil { + log.Error("apply db opentracing", zap.Error(err)) + } + dbs[c.Name] = db + } +} + +// MySQLDbConfig for mysql config +func MySQLDbConfig(cfg DbConfig) *gorm.DB { + db, err := gorm.Open(mysql.Open(cfg.Uri), &gorm.Config{ + PrepareStmt: true, + QueryFields: cfg.QueryFields, + CreateBatchSize: cfg.CreateBatchSize, + NamingStrategy: schema.NamingStrategy{ + SingularTable: true, + }, + }) + + if err != nil { + log.Fatal("open db failed", zap.String("name", cfg.Name), zap.Error(err)) + } + + sqlDB, err := db.DB() + if err != nil { + log.Fatal("get db failed", zap.Error(err)) + } + + sqlDB.SetMaxOpenConns(cfg.MaxConn) + sqlDB.SetMaxIdleConns(cfg.MaxIdleConn) + sqlDB.SetConnMaxLifetime(time.Millisecond * time.Duration(cfg.ConnMaxLifetimeInMillisecond)) + + logger, _ := log.NewLogger(fmt.Sprint("sql-", cfg.Name, ".log")) + db.Logger = &dbLogger{logger: logger.Named(cfg.Name)} + + err = sqlDB.Ping() + if err != nil { + log.Fatal("db ping failed", zap.Error(err)) + } + + return db +} + +func Get(name string) *gorm.DB { + return dbs[name] +} diff --git a/pkg/gormx/logger.go b/pkg/gormx/logger.go new file mode 100644 index 0000000..ee9e8c3 --- /dev/null +++ b/pkg/gormx/logger.go @@ -0,0 +1,45 @@ +package gormx + +import ( + "context" + "errors" + "fmt" + "time" + + "go.uber.org/zap" + "gorm.io/gorm" + "gorm.io/gorm/logger" + + "github.com/xinpianchang/xservice/pkg/log" +) + +type dbLogger struct { + logger log.Logger +} + +func (t *dbLogger) LogMode(logger.LogLevel) logger.Interface { + return t +} + +func (t *dbLogger) Info(ctx context.Context, msg string, data ...interface{}) { + t.logger.For(ctx).CallerSkip(2).Info(fmt.Sprintf(msg, data...)) +} + +func (t *dbLogger) Warn(ctx context.Context, msg string, data ...interface{}) { + t.logger.For(ctx).CallerSkip(2).Warn(fmt.Sprintf(msg, data...)) +} + +func (t *dbLogger) Error(ctx context.Context, msg string, data ...interface{}) { + t.logger.For(ctx).CallerSkip(2).Error(fmt.Sprintf(msg, data...)) +} + +func (t *dbLogger) Trace(ctx context.Context, begin time.Time, fc func() (sql string, rowsAffected int64), err error) { + l := t.logger.For(ctx).CallerSkip(2) + sql, rows := fc() + l = l.With(zap.Duration("elapsed", time.Since(begin)), zap.Int64("rows", rows)) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + l.Warn(sql, zap.Error(err)) + } else { + l.Info(sql) + } +} diff --git a/pkg/grpcx/validate.go b/pkg/grpcx/validate.go new file mode 100644 index 0000000..02dcda5 --- /dev/null +++ b/pkg/grpcx/validate.go @@ -0,0 +1,36 @@ +package grpcx + +import ( + "context" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +type Validator interface { + Validate() error +} + +func EnvoyproxyValidatorStreamServerInterceptor() grpc.StreamServerInterceptor { + return func(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { + if v, ok := srv.(Validator); ok { + if err := v.Validate(); err != nil { + return status.Error(codes.InvalidArgument, err.Error()) + } + } + return handler(srv, stream) + } +} + +func EnvoyproxyValidatorUnaryServerInterceptor() grpc.UnaryServerInterceptor { + return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { + if v, ok := req.(Validator); ok { + if err := v.Validate(); err != nil { + return nil, status.Error(codes.InvalidArgument, err.Error()) + } + } + + return handler(ctx, req) + } +} diff --git a/pkg/iox/copy.go b/pkg/iox/copy.go new file mode 100644 index 0000000..0c57fef --- /dev/null +++ b/pkg/iox/copy.go @@ -0,0 +1,68 @@ +package iox + +import ( + "fmt" + "io" + "io/ioutil" + "os" + "path" +) + +// File copies a single file from src to dst +func CopyFile(src, dst string) error { + var err error + var srcFile *os.File + var dstFile *os.File + var srcinfo os.FileInfo + + if srcFile, err = os.Open(src); err != nil { + return err + } + defer srcFile.Close() + + if dstFile, err = os.Create(dst); err != nil { + return err + } + defer dstFile.Close() + + if _, err = io.Copy(dstFile, srcFile); err != nil { + return err + } + if srcinfo, err = os.Stat(src); err != nil { + return err + } + return os.Chmod(dst, srcinfo.Mode()) +} + +func CopyDir(src string, dst string) error { + var err error + var fds []os.FileInfo + var srcinfo os.FileInfo + + if srcinfo, err = os.Stat(src); err != nil { + return err + } + + if err = os.MkdirAll(dst, srcinfo.Mode()); err != nil { + return err + } + + if fds, err = ioutil.ReadDir(src); err != nil { + return err + } + for _, fd := range fds { + srcfp := path.Join(src, fd.Name()) + dstfp := path.Join(dst, fd.Name()) + + if fd.IsDir() { + if err = CopyDir(srcfp, dstfp); err != nil { + fmt.Println(err) + } + } else { + if err = CopyFile(srcfp, dstfp); err != nil { + fmt.Println(err) + } + } + } + return nil +} diff --git a/pkg/iox/textlinescanner.go b/pkg/iox/textlinescanner.go new file mode 100644 index 0000000..ade9049 --- /dev/null +++ b/pkg/iox/textlinescanner.go @@ -0,0 +1,42 @@ +package iox + +import ( + "bufio" + "io" + "strings" +) + +type TextLineScanner struct { + reader *bufio.Reader + hasNext bool + line string + err error +} + +func NewTextLineScanner(reader io.Reader) *TextLineScanner { + return &TextLineScanner{ + reader: bufio.NewReader(reader), + hasNext: true, + } +} + +func (scanner *TextLineScanner) Scan() bool { + if !scanner.hasNext { + return false + } + + line, err := scanner.reader.ReadString('\n') + scanner.line = strings.TrimRight(line, "\n") + if err == io.EOF { + scanner.hasNext = false + return true + } else if err != nil { + scanner.err = err + return false + } + return true +} + +func (scanner *TextLineScanner) Line() (string, error) { + return scanner.line, scanner.err +} diff --git a/pkg/kafkax/kafka.go b/pkg/kafkax/kafka.go new file mode 100644 index 0000000..e1daa78 --- /dev/null +++ b/pkg/kafkax/kafka.go @@ -0,0 +1,198 @@ +package kafkax + +import ( + "context" + "fmt" + "os" + "time" + + "github.com/Shopify/sarama" + "github.com/getsentry/sentry-go" + "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + "github.com/spf13/viper" + "go.uber.org/zap" + + "github.com/xinpianchang/xservice/core" + "github.com/xinpianchang/xservice/pkg/log" +) + +var ( + clients map[string]*clientWrapper + topicCountVec *prometheus.CounterVec + sendDurationsHistogram prometheus.Histogram +) + +type config struct { + Name string `yaml:"name"` + Version string `yaml:"version"` + Broker []string `yaml:"broker"` +} + +type clientWrapper struct { + client sarama.Client + name string + producer sarama.SyncProducer +} + +func Config(v *viper.Viper) { + var configs []config + if err := v.UnmarshalKey("mq", &configs); err != nil { + log.Fatal("unmarshal kafka config", zap.Error(err)) + } + + serviceName := os.Getenv(core.EnvServiceName) + + topicCountVec = promauto.NewCounterVec(prometheus.CounterOpts{ + Namespace: serviceName, + Subsystem: "kafka", + Name: "send_total", + Help: "Number of kafka message sent", + }, []string{"topic"}) + + sendDurationsHistogram = promauto.NewHistogram(prometheus.HistogramOpts{ + Namespace: serviceName, + Subsystem: "kafka", + Name: "send_duration_millisecond", + Help: "Send duration", + Buckets: []float64{20, 50, 100, 200, 300, 500, 1000, 2000, 3000, 5000}, + }) + + clients = make(map[string]*clientWrapper, len(configs)) + + hostname, _ := os.Hostname() + clientId := fmt.Sprint(serviceName, "_", hostname, "_", os.Getpid()) + + for _, c := range configs { + config := sarama.NewConfig() + if version, err := sarama.ParseKafkaVersion(c.Version); err != nil { + log.Fatal("kafka config", zap.Error(err)) + } else { + config.Version = version + } + + config.Consumer.Return.Errors = true + config.Producer.Return.Successes = true + config.ClientID = clientId + var ( + client sarama.Client + producer sarama.SyncProducer + err error + ) + if client, err = sarama.NewClient(c.Broker, config); err != nil { + log.Fatal("init kafka client error", zap.Error(err), zap.String("name", c.Name)) + } + + if producer, err = sarama.NewSyncProducerFromClient(client); err != nil { + log.Fatal("init kafka producer error", zap.Error(err), zap.String("name", c.Name)) + } + + clients[c.Name] = &clientWrapper{ + name: c.Name, + client: client, + producer: producer, + } + } +} + +func GetClient(name ...string) sarama.Client { + wrap := getClientWrap(name...) + if wrap != nil { + return wrap.client + } + return nil +} + +func GetProducer(name ...string) sarama.SyncProducer { + wrap := getClientWrap(name...) + if wrap != nil { + return wrap.producer + } + return nil +} + +func getClientWrap(name ...string) *clientWrapper { + var wrap *clientWrapper + if name != nil { + wrap = clients[name[0]] + } else { + wrap = clients["default"] + } + return wrap +} + +func SendMessage(ctx context.Context, message *sarama.ProducerMessage, clientName ...string) (err error) { + if span := opentracing.SpanFromContext(ctx); span != nil { + ctx = opentracing.ContextWithSpan(context.Background(), span) + span, _ = opentracing.StartSpanFromContext(ctx, "kafka_send") + defer func() { + if err != nil { + ext.Error.Set(span, true) + span.LogKV("err", err, "clientName", clientName) + } + span.Finish() + }() + } + + producer := GetProducer(clientName...) + if producer == nil { + err = errors.Errorf("producer not found, clientName:%v", clientName) + return + } + start := time.Now() + topicCountVec.WithLabelValues(message.Topic).Inc() + _, _, err = producer.SendMessage(message) + duration := time.Since(start).Milliseconds() + sendDurationsHistogram.Observe(float64(duration)) + if err != nil { + sentry.CaptureException(errors.WithMessage(err, fmt.Sprint("kafka send message to topic:", message.Topic))) + } + return +} + +func StartGroupConsume(group string, topics []string, handler sarama.ConsumerGroupHandler, name ...string) { + l := log.Named("kafka consumer").With(zap.String("groupID", group), zap.Strings("topics", topics)) + + if group == "" || len(topics) == 0 { + return + } + + client := GetClient(name...) + if client == nil { + log.Fatal("kafkaClient not found", zap.Any("name", name)) + return + } + + consumer, err := sarama.NewConsumerGroupFromClient(group, client) + if err != nil { + l.Fatal("init consume group error", zap.Error(err)) + return + } + + go func() { + defer func() { + if x := recover(); x != nil { + l.Error("start consumer error", zap.Error(err)) + } + }() + + go func() { + for it := range consumer.Errors() { + l.Warn("client error", zap.Error(it)) + } + }() + + ctx := context.Background() + for { + l.Debug("start consume client ...") + err = consumer.Consume(ctx, topics, handler) + if err != nil { + l.Warn("consume", zap.Error(err)) + time.Sleep(time.Second * 5) + } + } + }() +} diff --git a/pkg/log/config.go b/pkg/log/config.go new file mode 100644 index 0000000..ac88220 --- /dev/null +++ b/pkg/log/config.go @@ -0,0 +1,167 @@ +package log + +import ( + "os" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/spf13/viper" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" + "gopkg.in/natefinch/lumberjack.v2" +) + +var ( + logger Logger + zaplogger *zap.Logger + cfg Cfg + + loggerFileMap map[string]*zap.Logger = make(map[string]*zap.Logger) + loggerFileMapMutex sync.Mutex +) + +type Cfg struct { + Level string `yaml:"level"` + File string `yaml:"file"` + Stdout bool `yaml:"stdout"` + Format string `yaml:"format"` + Caller bool `yaml:"caller"` + MaxSize int `yaml:"maxSize"` + MaxDays int `yaml:"maxDays"` +} + +func init() { + v := viper.New() + v.SetDefault("log", map[string]interface{}{ + "level": "info", + "format": "console", + "stdout": true, + "caller": true, + }) + Config(v) +} + +func Config(v *viper.Viper) { + if err := v.UnmarshalKey("log", &cfg); err != nil { + Fatal("parse log config", zap.Error(err)) + } + + if cfg.Level == "" { + cfg.Level = "info" + } + + if cfg.Format == "" { + cfg.Format = "console" + } + + if l, err := buildZapLogger(cfg); err != nil { + Fatal("config log", zap.Error(err)) + } else { + zaplogger = l + } + + logger = newLogger(zaplogger) +} + +func NewLogger(file string) (Logger, error) { + c := cfg + if c.File == file { + return logger, nil + } + + if file != "" && c.File != "" { + c.File = filepath.Join(filepath.Dir(cfg.File), file) + } + + if l, err := buildZapLogger(cfg); err != nil { + return nil, err + } else { + return newLogger(l), nil + } +} + +func buildZapLogger(cfg Cfg) (*zap.Logger, error) { + loggerFileMapMutex.Lock() + defer loggerFileMapMutex.Unlock() + + if l, ok := loggerFileMap[cfg.File]; ok { + return l, nil + } + + if cfg.Level == "" { + cfg.Level = "info" + } + + if cfg.Format == "" { + cfg.Format = "console" + } + + ws := make([]zapcore.WriteSyncer, 0, 2) + + if cfg.Stdout && os.Getenv("XSERVICE_DISABLE_STDOUT") == "" { + ws = append(ws, zapcore.AddSync(os.Stdout)) + } + + if cfg.File != "" { + if cfg.MaxSize <= 0 { + cfg.MaxSize = 1024 + } + + if cfg.MaxDays <= 0 { + cfg.MaxDays = 7 + } + } + + if cfg.File != "" { + rotateLogger := &lumberjack.Logger{ + Filename: cfg.File, + MaxSize: cfg.MaxSize, + MaxAge: cfg.MaxDays, + LocalTime: true, + Compress: true, + } + ws = append(ws, zapcore.AddSync(rotateLogger)) + + go scheduleRotate(rotateLogger) + } + + var level zapcore.Level + err := level.UnmarshalText([]byte(cfg.Level)) + if err != nil { + Error("parse level", zap.Error(err)) + level = zapcore.InfoLevel + } + atomicLevel := zap.NewAtomicLevelAt(level) + + writeSynced := zapcore.NewMultiWriteSyncer(ws...) + + encoding := zap.NewProductionEncoderConfig() + encoding.EncodeTime = zapcore.ISO8601TimeEncoder + + var encoder zapcore.Encoder + if strings.ToLower(cfg.Format) == "json" { + encoder = zapcore.NewJSONEncoder(encoding) + } else { + encoder = zapcore.NewConsoleEncoder(encoding) + } + core := zapcore.NewCore(encoder, writeSynced, atomicLevel) + + options := make([]zap.Option, 0, 3) + options = append(options, zap.AddStacktrace(zapcore.ErrorLevel)) + if cfg.Caller { + options = append(options, zap.AddCaller(), zap.AddCallerSkip(2)) + } + return zap.New(core, options...), nil +} + +func scheduleRotate(log *lumberjack.Logger) { + for { + n := time.Now().Add(time.Hour * 24) + next := time.Date(n.Year(), n.Month(), n.Day(), 0, 0, 0, 0, time.Local) + d := time.Until(next) + time.Sleep(d) + _ = log.Rotate() + } +} diff --git a/pkg/log/echo_logger.go b/pkg/log/echo_logger.go new file mode 100644 index 0000000..9688777 --- /dev/null +++ b/pkg/log/echo_logger.go @@ -0,0 +1,140 @@ +package log + +import ( + "context" + "encoding/json" + "fmt" + "io" + "strings" + + "github.com/labstack/gommon/log" +) + +type EchoLogger struct { + prefix string + logger Logger +} + +func NewEchoLogger() *EchoLogger { + return &EchoLogger{ + logger: Named(""), + } +} + +func (t *EchoLogger) For(ctx context.Context) *EchoLogger { + return &EchoLogger{prefix: t.prefix, logger: t.logger.For(ctx)} +} + +func (t *EchoLogger) Output() io.Writer { + return nil +} + +func (t *EchoLogger) SetOutput(w io.Writer) {} + +func (t *EchoLogger) Prefix() string { + return t.prefix +} + +func (t *EchoLogger) SetPrefix(p string) { + t.prefix = p + t.logger = Named(p) +} + +func (t *EchoLogger) Level() log.Lvl { + return log.INFO +} + +func (t *EchoLogger) SetLevel(v log.Lvl) {} + +func (t *EchoLogger) SetHeader(h string) {} + +func (t *EchoLogger) Print(i ...interface{}) { + t.logger.Info(strings.TrimSpace(fmt.Sprintln(i...))) +} + +func (t *EchoLogger) Printf(format string, args ...interface{}) { + t.logger.Info(strings.TrimSpace(fmt.Sprintf(format, args...))) +} + +func (t *EchoLogger) Printj(j log.JSON) { + b, _ := json.Marshal(j) + t.logger.Info(strings.TrimSpace(string(b))) +} + +func (t *EchoLogger) Debug(i ...interface{}) { + t.logger.Debug(strings.TrimSpace(fmt.Sprintln(i...))) +} + +func (t *EchoLogger) Debugf(format string, args ...interface{}) { + t.logger.Debug(strings.TrimSpace(fmt.Sprintf(format, args...))) +} + +func (t *EchoLogger) Debugj(j log.JSON) { + b, _ := json.Marshal(j) + t.logger.Debug(strings.TrimSpace(string(b))) +} + +func (t *EchoLogger) Info(i ...interface{}) { + t.logger.Info(strings.TrimSpace(fmt.Sprintln(i...))) +} + +func (t *EchoLogger) Infof(format string, args ...interface{}) { + t.logger.Info(strings.TrimSpace(fmt.Sprintf(format, args...))) +} + +func (t *EchoLogger) Infoj(j log.JSON) { + b, _ := json.Marshal(j) + t.logger.Info(strings.TrimSpace(string(b))) +} + +func (t *EchoLogger) Warn(i ...interface{}) { + t.logger.Warn(strings.TrimSpace(fmt.Sprintln(i...))) +} + +func (t *EchoLogger) Warnf(format string, args ...interface{}) { + t.logger.Warn(strings.TrimSpace(fmt.Sprintf(format, args...))) +} + +func (t *EchoLogger) Warnj(j log.JSON) { + b, _ := json.Marshal(j) + t.logger.Warn(strings.TrimSpace(string(b))) +} + +func (t *EchoLogger) Error(i ...interface{}) { + t.logger.Error(strings.TrimSpace(fmt.Sprintln(i...))) +} + +func (t *EchoLogger) Errorf(format string, args ...interface{}) { + t.logger.Error(strings.TrimSpace(fmt.Sprintf(format, args...))) +} + +func (t *EchoLogger) Errorj(j log.JSON) { + b, _ := json.Marshal(j) + t.logger.Error(strings.TrimSpace(string(b))) +} + +func (t *EchoLogger) Fatal(i ...interface{}) { + t.logger.Error(strings.TrimSpace(fmt.Sprintln(i...))) +} + +func (t *EchoLogger) Fatalj(j log.JSON) { + b, _ := json.Marshal(j) + t.logger.Error(strings.TrimSpace(string(b))) +} + +func (t *EchoLogger) Fatalf(format string, args ...interface{}) { + t.logger.Error(strings.TrimSpace(fmt.Sprintf(format, args...))) +} + +func (t *EchoLogger) Panic(i ...interface{}) { + t.logger.Fatal(strings.TrimSpace(fmt.Sprintln(i...))) +} + +func (t *EchoLogger) Panicj(j log.JSON) { + b, _ := json.Marshal(j) + t.logger.Fatal(strings.TrimSpace(string(b))) +} + +func (t *EchoLogger) Panicf(format string, args ...interface{}) { + t.logger.Fatal(strings.TrimSpace(fmt.Sprintf(format, args...))) +} diff --git a/pkg/log/log.go b/pkg/log/log.go new file mode 100644 index 0000000..c935c69 --- /dev/null +++ b/pkg/log/log.go @@ -0,0 +1,43 @@ +package log + +import ( + "context" + + "go.uber.org/zap" +) + +func Named(name string) Logger { + return logger.Named(name) +} + +func Debug(msg string, fields ...zap.Field) { + logger.Debug(msg, fields...) +} + +func Info(msg string, fields ...zap.Field) { + logger.Info(msg, fields...) +} + +func Warn(msg string, fields ...zap.Field) { + logger.Warn(msg, fields...) +} + +func Error(msg string, fields ...zap.Field) { + logger.Error(msg, fields...) +} + +func Fatal(msg string, fields ...zap.Field) { + logger.Fatal(msg, fields...) +} + +func With(fields ...zap.Field) Logger { + return logger.With(fields...) +} + +func For(ctx context.Context) Logger { + return logger.For(ctx) +} + +func Get() *zap.Logger { + return zaplogger +} diff --git a/pkg/log/logger.go b/pkg/log/logger.go new file mode 100644 index 0000000..dbbf2de --- /dev/null +++ b/pkg/log/logger.go @@ -0,0 +1,97 @@ +package log + +import ( + "context" + + "github.com/opentracing/opentracing-go" + "github.com/uber/jaeger-client-go" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +type Logger interface { + Named(name string) Logger + Debug(msg string, fields ...zapcore.Field) + Info(msg string, fields ...zapcore.Field) + Warn(msg string, fields ...zapcore.Field) + Error(msg string, fields ...zapcore.Field) + Fatal(msg string, fields ...zapcore.Field) + With(fields ...zapcore.Field) Logger + For(ctx context.Context) Logger + CallerSkip(int) Logger +} + +type defaultLogger struct { + logger *zap.Logger + additionalFields []zapcore.Field + skiped bool +} + +func newLogger(l *zap.Logger) Logger { + return &defaultLogger{ + logger: l, + additionalFields: make([]zapcore.Field, 0, 8), + } +} + +func (t defaultLogger) Named(name string) Logger { + t.logger = t.logger.Named(name) + return t +} + +func (t defaultLogger) Debug(msg string, fields ...zapcore.Field) { + t.logger.Debug(msg, fields...) +} + +func (t defaultLogger) Info(msg string, fields ...zapcore.Field) { + t.logger.Info(msg, fields...) +} + +func (t defaultLogger) Warn(msg string, fields ...zapcore.Field) { + t.logger.Warn(msg, fields...) +} + +func (t defaultLogger) Error(msg string, fields ...zapcore.Field) { + t.logger.Error(msg, fields...) +} + +func (t defaultLogger) Fatal(msg string, fields ...zapcore.Field) { + t.logger.Fatal(msg, fields...) +} + +func (t defaultLogger) With(fields ...zapcore.Field) Logger { + return defaultLogger{ + logger: t.logger.WithOptions(zap.AddCallerSkip(t.skip())).With(fields...), + additionalFields: append(t.additionalFields, fields...), + skiped: true, + } +} + +func (t defaultLogger) For(ctx context.Context) Logger { + if span := opentracing.SpanFromContext(ctx); span != nil { + l := spanLogger{span: span, logger: t.logger.WithOptions(zap.AddCallerSkip(t.skip())), additionalFields: t.additionalFields} + + if jaegerCtx, ok := span.Context().(jaeger.SpanContext); ok { + l.spanFields = []zapcore.Field{ + zap.String("trace_id", jaegerCtx.TraceID().String()), + zap.String("span_id", jaegerCtx.SpanID().String()), + } + } + + return l + } + + return defaultLogger{logger: t.logger.WithOptions(zap.AddCallerSkip(-1))} +} + +func (t defaultLogger) CallerSkip(skip int) Logger { + t.logger = t.logger.WithOptions(zap.AddCallerSkip(skip)) + return t +} + +func (t defaultLogger) skip() int { + if t.skiped { + return 0 + } + return -1 +} diff --git a/pkg/log/spanlogger.go b/pkg/log/spanlogger.go new file mode 100644 index 0000000..137b86e --- /dev/null +++ b/pkg/log/spanlogger.go @@ -0,0 +1,167 @@ +package log + +import ( + "context" + "time" + + "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + "github.com/opentracing/opentracing-go/log" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +type spanLogger struct { + logger *zap.Logger + span opentracing.Span + spanFields []zapcore.Field + additionalFields []zapcore.Field +} + +func (t spanLogger) Named(name string) Logger { + t.logger = t.logger.Named(name) + return t +} + +func (t spanLogger) Debug(msg string, fields ...zapcore.Field) { + if t.logger.Core().Enabled(zap.DebugLevel) { + t.logToSpan("debug", msg, append(t.additionalFields, fields...)...) + } + t.logger.Debug(msg, append(t.spanFields, fields...)...) +} + +func (t spanLogger) Info(msg string, fields ...zapcore.Field) { + if t.logger.Core().Enabled(zap.InfoLevel) { + t.logToSpan("info", msg, append(t.additionalFields, fields...)...) + } + t.logger.Info(msg, append(t.spanFields, fields...)...) +} + +func (t spanLogger) Warn(msg string, fields ...zapcore.Field) { + t.logToSpan("warn", msg, append(t.additionalFields, fields...)...) + t.logger.Warn(msg, append(t.spanFields, fields...)...) +} + +func (t spanLogger) Error(msg string, fields ...zapcore.Field) { + t.logToSpan("error", msg, fields...) + ext.Error.Set(t.span, true) + t.logger.Error(msg, append(t.spanFields, fields...)...) +} + +func (t spanLogger) Fatal(msg string, fields ...zapcore.Field) { + t.logToSpan("fatal", msg, append(t.additionalFields, fields...)...) + ext.Error.Set(t.span, true) + t.logger.Fatal(msg, append(t.spanFields, fields...)...) +} + +func (t spanLogger) With(fields ...zapcore.Field) Logger { + return spanLogger{ + logger: t.logger.With(fields...), + span: t.span, + spanFields: t.spanFields, + additionalFields: append(t.additionalFields, fields...), + } +} + +func (t spanLogger) For(context.Context) Logger { + return t +} + +func (t spanLogger) CallerSkip(skip int) Logger { + t.logger = t.logger.WithOptions(zap.AddCallerSkip(skip)) + return t +} + +func (t spanLogger) logToSpan(level string, msg string, fields ...zapcore.Field) { + fa := fieldAdapter(make([]log.Field, 0, 2+len(fields))) + fa = append(fa, log.String("msg", msg)) + fa = append(fa, log.String("level", level)) + for _, field := range fields { + field.AddTo(&fa) + } + t.span.LogFields(fa...) +} + +type fieldAdapter []log.Field + +func (fa *fieldAdapter) AddBool(key string, value bool) { + *fa = append(*fa, log.Bool(key, value)) +} + +func (fa *fieldAdapter) AddFloat64(key string, value float64) { + *fa = append(*fa, log.Float64(key, value)) +} + +func (fa *fieldAdapter) AddFloat32(key string, value float32) { + *fa = append(*fa, log.Float64(key, float64(value))) +} + +func (fa *fieldAdapter) AddInt(key string, value int) { + *fa = append(*fa, log.Int(key, value)) +} + +func (fa *fieldAdapter) AddInt64(key string, value int64) { + *fa = append(*fa, log.Int64(key, value)) +} + +func (fa *fieldAdapter) AddInt32(key string, value int32) { + *fa = append(*fa, log.Int64(key, int64(value))) +} + +func (fa *fieldAdapter) AddInt16(key string, value int16) { + *fa = append(*fa, log.Int64(key, int64(value))) +} + +func (fa *fieldAdapter) AddInt8(key string, value int8) { + *fa = append(*fa, log.Int64(key, int64(value))) +} + +func (fa *fieldAdapter) AddUint(key string, value uint) { + *fa = append(*fa, log.Uint64(key, uint64(value))) +} + +func (fa *fieldAdapter) AddUint64(key string, value uint64) { + *fa = append(*fa, log.Uint64(key, value)) +} + +func (fa *fieldAdapter) AddUint32(key string, value uint32) { + *fa = append(*fa, log.Uint64(key, uint64(value))) +} + +func (fa *fieldAdapter) AddUint16(key string, value uint16) { + *fa = append(*fa, log.Uint64(key, uint64(value))) +} + +func (fa *fieldAdapter) AddUint8(key string, value uint8) { + *fa = append(*fa, log.Uint64(key, uint64(value))) +} + +func (fa *fieldAdapter) AddUintptr(key string, value uintptr) {} +func (fa *fieldAdapter) AddArray(key string, marshaler zapcore.ArrayMarshaler) error { return nil } +func (fa *fieldAdapter) AddComplex128(key string, value complex128) {} +func (fa *fieldAdapter) AddComplex64(key string, value complex64) {} +func (fa *fieldAdapter) AddObject(key string, value zapcore.ObjectMarshaler) error { return nil } +func (fa *fieldAdapter) AddReflected(key string, value interface{}) error { return nil } +func (fa *fieldAdapter) OpenNamespace(key string) {} + +func (fa *fieldAdapter) AddDuration(key string, value time.Duration) { + *fa = append(*fa, log.String(key, value.String())) +} + +func (fa *fieldAdapter) AddTime(key string, value time.Time) { + *fa = append(*fa, log.String(key, value.String())) +} + +func (fa *fieldAdapter) AddBinary(key string, value []byte) { + *fa = append(*fa, log.Object(key, value)) +} + +func (fa *fieldAdapter) AddByteString(key string, value []byte) { + *fa = append(*fa, log.Object(key, value)) +} + +func (fa *fieldAdapter) AddString(key, value string) { + if key != "" && value != "" { + *fa = append(*fa, log.String(key, value)) + } +} diff --git a/pkg/netx/ip.go b/pkg/netx/ip.go new file mode 100644 index 0000000..d01166c --- /dev/null +++ b/pkg/netx/ip.go @@ -0,0 +1,41 @@ +package netx + +import "net" + +// InternalIp returns an internal ip. +// copy from go-zero +func InternalIp() string { + infs, err := net.Interfaces() + if err != nil { + return "" + } + + for _, inf := range infs { + if isEthDown(inf.Flags) || isLoopback(inf.Flags) { + continue + } + + addrs, err := inf.Addrs() + if err != nil { + continue + } + + for _, addr := range addrs { + if ipnet, ok := addr.(*net.IPNet); ok && !ipnet.IP.IsLoopback() { + if ipnet.IP.To4() != nil { + return ipnet.IP.String() + } + } + } + } + + return "" +} + +func isEthDown(f net.Flags) bool { + return f&net.FlagUp != net.FlagUp +} + +func isLoopback(f net.Flags) bool { + return f&net.FlagLoopback == net.FlagLoopback +} diff --git a/pkg/netx/ip_test.go b/pkg/netx/ip_test.go new file mode 100644 index 0000000..3d13ea5 --- /dev/null +++ b/pkg/netx/ip_test.go @@ -0,0 +1,11 @@ +package netx + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestInternalIp(t *testing.T) { + assert.True(t, len(InternalIp()) > 0) +} diff --git a/pkg/redisx/config.go b/pkg/redisx/config.go new file mode 100644 index 0000000..a7090aa --- /dev/null +++ b/pkg/redisx/config.go @@ -0,0 +1,131 @@ +package redisx + +import ( + "context" + "crypto/md5" + "encoding/hex" + "fmt" + "strings" + "time" + + "github.com/bsm/redislock" + "github.com/go-redis/redis/v8" + "github.com/spf13/viper" + "go.uber.org/zap" + + "github.com/xinpianchang/xservice/pkg/log" +) + +var ( + clients map[string]*redis.Client + Locker *redislock.Client + cfgMap map[*redis.Client]*redisConfig +) + +type redisConfig struct { + Name string `yaml:"name"` + Addr string `yaml:"addr"` + Password string `yaml:"password"` + ReadTimeout int `yaml:"readTimeout"` + DB int `yaml:"db"` + PoolSize int `yaml:"poolSize"` + MaxRetries int `yaml:"maxRetries"` + MinIdleConns int `yaml:"minIdleConns"` + MaxConnAge int `yaml:"maxConnAge"` + Prefix string `yaml:"prefix"` +} + +func Config(v *viper.Viper) { + var cfg []*redisConfig + + if err := v.UnmarshalKey("redis", &cfg); err != nil { + log.Fatal("read redis config", zap.Error(err)) + } + + clients = make(map[string]*redis.Client, len(cfg)) + cfgMap = make(map[*redis.Client]*redisConfig, len(cfg)) + for _, c := range cfg { + if c.MinIdleConns <= 0 { + c.MinIdleConns = 0 + } + if c.MaxRetries < 0 { + c.MaxRetries = 2 + } + if c.PoolSize < 0 { + c.PoolSize = 500 + } + if c.ReadTimeout <= 0 { + c.ReadTimeout = 5 + } + if c.MaxConnAge <= 0 { + c.MaxConnAge = 300 + } + + if c.Prefix == "" { + data := []byte(fmt.Sprint(time.Now().UnixNano())) + hash := md5.Sum(data) + prefix := hex.EncodeToString(hash[:4]) + c.Prefix = prefix + } + + client := redis.NewClient(&redis.Options{ + Addr: c.Addr, + Password: c.Password, + DB: c.DB, + ReadTimeout: time.Second * time.Duration(c.ReadTimeout), + MaxRetries: c.MaxRetries, + MinIdleConns: c.MinIdleConns, + MaxConnAge: time.Second * time.Duration(c.MaxConnAge), + PoolSize: c.PoolSize, + }) + r := client.Ping(context.TODO()) + if err := r.Err(); err != nil { + log.Fatal("ping", zap.String("name", c.Name), zap.Error(err)) + } + + log.Debug(fmt.Sprint("redis ", c.Name, " ping"), + zap.String("rsp", r.Val()), zap.String("addr", c.Addr), zap.Int("db", c.DB)) + + client.AddHook(&redisTracing{}) + + clients[c.Name] = client + cfgMap[client] = c + + // as main redis + if c.Name == "redis" && Locker == nil { + Locker = redislock.New(client) + } + } +} + +func GetClient(name string) *redis.Client { + return clients[name] +} + +func NewLocker(client *redis.Client) *redislock.Client { + return redislock.New(client) +} + +func Key(client *redis.Client, keys ...string) string { + if c, ok := cfgMap[client]; ok { + keys = append(append(make([]string, 0, len(keys)+1), c.Prefix), keys...) + } + return strings.Join(keys, ":") +} + +type ClientWrapper struct { + *redis.Client +} + +func Get(name string) *ClientWrapper { + return &ClientWrapper{GetClient(name)} +} + +func (t *ClientWrapper) Key(keys ...string) string { + return Key(t.Client, keys...) +} + +func (t *ClientWrapper) Obtain(ctx context.Context, key string, ttl time.Duration, opt *redislock.Options) (*redislock.Lock, error) { + locker := NewLocker(t.Client) + return locker.Obtain(ctx, key, ttl, opt) +} diff --git a/pkg/redisx/config_test.go b/pkg/redisx/config_test.go new file mode 100644 index 0000000..6db3e51 --- /dev/null +++ b/pkg/redisx/config_test.go @@ -0,0 +1,40 @@ +package redisx + +import ( + "context" + "testing" + + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" +) + +func newViper() *viper.Viper { + v := viper.New() + v.SetDefault("redis", map[string]interface{}{ + "name": "redis", + "addr": "127.0.0.1:6379", + "password": "123456", + }) + + return v +} + +func Test_config(t *testing.T) { + v := newViper() + Config(v) + assert.NotNil(t, Get("redis")) + assert.NotNil(t, Locker) +} + +func Test_wrapper(t *testing.T) { + v := newViper() + Config(v) + + w := Get("redis") + r := w.Ping(context.Background()) + assert.NoError(t, r.Err()) + + ret, err := r.Result() + assert.NoError(t, err) + t.Log("result:", ret) +} diff --git a/pkg/redisx/tracing.go b/pkg/redisx/tracing.go new file mode 100644 index 0000000..a26202a --- /dev/null +++ b/pkg/redisx/tracing.go @@ -0,0 +1,46 @@ +package redisx + +import ( + "context" + + "github.com/go-redis/redis/v8" + "github.com/opentracing/opentracing-go" +) + +type redisTracing struct{} + +func (redisTracing) BeforeProcess(ctx context.Context, cmd redis.Cmder) (context.Context, error) { + if span := opentracing.SpanFromContext(ctx); span == nil { + return ctx, nil + } + span, ctx := opentracing.StartSpanFromContext(ctx, cmd.FullName()) + span.SetTag("db.system", "redis") + span.SetTag("db.statement", cmd.String()) + + return ctx, nil +} + +func (redisTracing) AfterProcess(ctx context.Context, cmd redis.Cmder) error { + if span := opentracing.SpanFromContext(ctx); span != nil { + span.Finish() + } + return nil +} + +func (redisTracing) BeforeProcessPipeline(ctx context.Context, cmds []redis.Cmder) (context.Context, error) { + if span := opentracing.SpanFromContext(ctx); span == nil { + return ctx, nil + } + span, ctx := opentracing.StartSpanFromContext(ctx, "pipline") + span.SetTag("db.system", "redis") + span.SetTag("db.cmd_count", len(cmds)) + + return ctx, nil +} + +func (t redisTracing) AfterProcessPipeline(ctx context.Context, cmds []redis.Cmder) error { + if span := opentracing.SpanFromContext(ctx); span != nil { + span.Finish() + } + return nil +} diff --git a/pkg/requests/requests.go b/pkg/requests/requests.go new file mode 100644 index 0000000..da3c3af --- /dev/null +++ b/pkg/requests/requests.go @@ -0,0 +1,284 @@ +package requests + +import ( + "bytes" + "context" + "crypto/tls" + "encoding/json" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/http/httputil" + "net/url" + "strings" + "time" + + "github.com/opentracing/opentracing-go" +) + +const ( + ContentTypeJSON = "application/json; charset=utf-8" + ContentTypeForm = "application/x-www-form-urlencoded; charset=utf-8" + ContentTypeText = "text/plain; charset=utf-8" + + HeaderXRequestID = "X-Request-ID" +) + +var ( + defaultClient = &http.Client{ + Timeout: time.Second * 5, + Transport: &http.Transport{ + MaxIdleConns: 100, + MaxIdleConnsPerHost: 100, + IdleConnTimeout: time.Second * 10, + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + }, + } +) + +type requests struct { + client *http.Client + ctx context.Context + method string + headers map[string]string + uri string + query url.Values + form url.Values + body io.Reader + retry RetryStrategy +} + +type Response struct { + err error + escape time.Duration + response *http.Response + cnt int +} + +func New() *requests { + return &requests{ + client: defaultClient, + ctx: context.Background(), + method: http.MethodGet, + headers: make(map[string]string, 8), + uri: "", + } +} + +func (t *requests) WithClient(client *http.Client) *requests { + t.client = client + return t +} + +func (t *requests) WithContext(ctx context.Context) *requests { + t.ctx = ctx + return t +} + +func (t *requests) Retry(retry RetryStrategy) *requests { + t.retry = retry + return t +} + +func (t *requests) Method(method string) *requests { + t.method = method + return t +} + +func (t *requests) Uri(uri string) *requests { + t.uri = uri + return t +} + +func (t *requests) Get(uri string) *requests { + return t.Method(http.MethodGet).Uri(uri) +} + +func (t *requests) Post(uri string) *requests { + return t.Method(http.MethodPost).Uri(uri) +} + +func (t *requests) Delete(uri string) *requests { + return t.Method(http.MethodDelete).Uri(uri) +} + +func (t *requests) Put(uri string) *requests { + return t.Method(http.MethodPut).Uri(uri) +} + +func (t *requests) Patch(uri string) *requests { + return t.Method(http.MethodPatch).Uri(uri) +} + +func (t *requests) Query(query url.Values) *requests { + t.query = query + return t +} + +func (t *requests) Form(form url.Values) *requests { + t.form = form + return t +} + +func (t *requests) Body(body io.Reader) *requests { + t.body = body + return t +} + +func (t *requests) Data(data []byte) *requests { + return t.Body(bytes.NewReader(data)) +} + +func (t *requests) ContentType(contentType string) *requests { + return t.AddHeader("Content-Type", contentType) +} + +func (t *requests) UserAgent(userAgent string) *requests { + return t.AddHeader("User-Agent", userAgent) +} + +func (t *requests) RequestId(requestId string) *requests { + return t.AddHeader(HeaderXRequestID, requestId) +} + +func (t *requests) AddHeader(key, value string) *requests { + t.headers[key] = value + return t +} + +func (t *requests) buildRequest() (*http.Request, error) { + u, err := url.Parse(t.uri) + if err != nil { + return nil, err + } + + q := u.Query() + for k, v := range t.query { + for _, it := range v { + q.Add(k, it) + } + } + + u.RawQuery = q.Encode() + uri := u.String() + + var body io.Reader + if len(t.form) > 0 { + body = strings.NewReader(t.form.Encode()) + } else if t.body != nil { + var buf bytes.Buffer + body = io.TeeReader(t.body, &buf) + defer func() { t.body = &buf }() + } + + req, err := http.NewRequest(t.method, uri, body) + if err != nil { + return nil, err + } + + if span := opentracing.SpanFromContext(t.ctx); span != nil { + carrier := opentracing.HTTPHeadersCarrier(req.Header) + _ = span.Tracer().Inject(span.Context(), opentracing.HTTPHeaders, carrier) + } + + if requestId := t.ctx.Value(HeaderXRequestID); requestId != nil { + req.Header.Set("x-request-id", fmt.Sprint(requestId)) + } + + for k, v := range t.headers { + req.Header.Set(k, v) + } + + return req, nil +} + +func (t *requests) Do() *Response { + start := time.Now() + r := &Response{} + for { + req, err := t.buildRequest() + r.err = err + if err != nil { + return r + } + r.cnt += 1 + rsp, err := t.client.Do(req) + if err != nil { + if t.retry != nil { + backoff := t.retry.NextBackoff() + if backoff > 0 { + // release, prepare for next call + if rsp != nil { + _, _ = ioutil.ReadAll(rsp.Body) + _ = rsp.Body.Close() + } + time.Sleep(backoff) + continue + } + } + } + + r.escape = time.Since(start) + r.response = rsp + if rsp != nil { + r.response.Request.Body = io.NopCloser(t.body) + } + r.err = err + if err != nil { + return r + } + return r + } +} + +func (t *Response) Err() error { + return t.err +} + +func (t *Response) Dump(body bool) map[string]interface{} { + dump := make(map[string]interface{}) + dump["escape"] = t.escape.Milliseconds() + dump["cnt"] = t.cnt + + if t.err != nil { + dump["error"] = t.err + } + + if t.response != nil && t.response.Request != nil { + if b, err := httputil.DumpRequest(t.response.Request, body); err == nil { + dump["request"] = string(b) + } + } + + if t.response != nil { + if b, err := httputil.DumpResponse(t.response, body); err == nil { + dump["response"] = string(b) + } + } + + return dump +} + +func (t *Response) StatusCode() int { + return t.response.StatusCode +} + +func (t *Response) Close() { + if t.response != nil { + _, _ = ioutil.ReadAll(t.response.Body) + _ = t.response.Body.Close() + } +} + +func (t *Response) RawResponse() *http.Response { + return t.response +} + +func (t *Response) JSON(obj interface{}) error { + b, err := io.ReadAll(t.response.Body) + if err != nil { + return err + } + return json.Unmarshal(b, obj) +} diff --git a/pkg/requests/retry.go b/pkg/requests/retry.go new file mode 100644 index 0000000..440f3f2 --- /dev/null +++ b/pkg/requests/retry.go @@ -0,0 +1,73 @@ +package requests + +import "time" + +// RetryStrategy +type RetryStrategy interface { + // NextBackoff returns the next backoff duration. + NextBackoff() time.Duration +} + +type linearBackoff time.Duration + +// LinearBackoff allows retries regularly with intervals +func LinearBackoff(backoff time.Duration) RetryStrategy { + return linearBackoff(backoff) +} + +func (r linearBackoff) NextBackoff() time.Duration { + return time.Duration(r) +} + +type limitedRetry struct { + s RetryStrategy + + cnt, max int +} + +// LimitRetry limits the number of retries to max attempts +func LimitRetry(s RetryStrategy, max int) RetryStrategy { + return &limitedRetry{s: s, max: max} +} + +// LimitLinearRetry +func LimitLinearRetry(backoff time.Duration, max int) RetryStrategy { + return LimitRetry(LinearBackoff(backoff), max) +} + +func (r *limitedRetry) NextBackoff() time.Duration { + if r.cnt >= r.max { + return 0 + } + r.cnt++ + return r.s.NextBackoff() +} + +type exponentialBackoff struct { + cnt uint + + min, max time.Duration +} + +// ExponentialBackoff strategy is an optimization strategy with a retry time of 2**n milliseconds (n means number of times). +// You can set a minimum and maximum value, the recommended minimum value is not less than 16ms. +func ExponentialBackoff(min, max time.Duration) RetryStrategy { + return &exponentialBackoff{min: min, max: max} +} + +func (r *exponentialBackoff) NextBackoff() time.Duration { + r.cnt++ + + ms := 2 << 25 + if r.cnt < 25 { + ms = 2 << r.cnt + } + + if d := time.Duration(ms) * time.Millisecond; d < r.min { + return r.min + } else if r.max != 0 && d > r.max { + return r.max + } else { + return d + } +} diff --git a/pkg/responsex/error.go b/pkg/responsex/error.go new file mode 100644 index 0000000..ad706c6 --- /dev/null +++ b/pkg/responsex/error.go @@ -0,0 +1,42 @@ +package responsex + +import ( + "bytes" + "fmt" +) + +type Error struct { + Status int + HttpStatus int + Message string + Internal error +} + +func NewError(status int, message string) *Error { + return &Error{Status: status, Message: message} +} + +func (e *Error) Error() string { + var buf bytes.Buffer + _, _ = fmt.Fprint(&buf, "status:", e.Status) + if e.HttpStatus != 0 { + _, _ = fmt.Fprint(&buf, ", httpStatus:", e.HttpStatus) + } + if e.Message != "" { + _, _ = fmt.Fprint(&buf, ", message:", e.Message) + } + if e.Internal != nil { + _, _ = fmt.Fprint(&buf, ", internal:", e.Internal) + } + return buf.String() +} + +func (e *Error) SetInternal(internal error) *Error { + e.Internal = internal + return e +} + +func (e *Error) SetHttpStatus(httpStatus int) *Error { + e.HttpStatus = httpStatus + return e +} diff --git a/pkg/responsex/response.go b/pkg/responsex/response.go new file mode 100644 index 0000000..cf54610 --- /dev/null +++ b/pkg/responsex/response.go @@ -0,0 +1,53 @@ +package responsex + +import ( + "net/http" + + "github.com/labstack/echo/v4" +) + +type Response struct { + HttpStatus int `json:"-"` + Status int `json:"status"` + Message string `json:"message"` + Data interface{} `json:"data,omitempty"` +} + +func New(status int, msg string, data interface{}) *Response { + return &Response{ + HttpStatus: http.StatusOK, + Status: status, + Message: msg, + Data: data, + } +} + +func (t *Response) SetHttpStatus(httpStatus int) *Response { + t.HttpStatus = httpStatus + return t +} + +func (t *Response) SetStatus(status int) *Response { + t.Status = status + return t +} + +func (t *Response) SetMsg(msg string) *Response { + t.Message = msg + return t +} + +func (t *Response) SetData(data interface{}) *Response { + t.Data = data + return t +} + +// R response code +func R(c echo.Context, response *Response) error { + return c.JSON(response.HttpStatus, response) +} + +func Data(c echo.Context, data interface{}) error { + response := New(0, "OK", data) + return c.JSON(response.HttpStatus, response) +} diff --git a/pkg/responsex/status.go b/pkg/responsex/status.go new file mode 100644 index 0000000..4756feb --- /dev/null +++ b/pkg/responsex/status.go @@ -0,0 +1,42 @@ +package responsex + +import ( + "fmt" +) + +type StatusCode int + +var statusMap = make(map[StatusCode]string) + +func Status(status StatusCode, message ...string) *Error { + var m string + if message != nil { + item := make([]interface{}, 0, len(message)) + for _, it := range message { + item = append(item, it) + } + m = fmt.Sprint(item...) + } else { + if v, ok := statusMap[status]; ok { + m = v + } else { + m = fmt.Sprintf("status:%d", status) + } + } + return NewError(int(status), m) +} + +func StatusMessage(status StatusCode, args ...interface{}) *Error { + var m string + if v, ok := statusMap[status]; ok { + m = v + } + if len(args) > 0 { + m = fmt.Sprintf(m, args...) + } + return NewError(int(status), m) +} + +func SetStatusMap(m map[StatusCode]string) { + statusMap = m +} diff --git a/pkg/signalx/shutdown_hook.go b/pkg/signalx/shutdown_hook.go new file mode 100644 index 0000000..8677364 --- /dev/null +++ b/pkg/signalx/shutdown_hook.go @@ -0,0 +1,46 @@ +package signalx + +import ( + "os" + "os/signal" + "sync" + "syscall" +) + +var ( + shutdownHooks = make([]ShutdownHook, 0, 64) + shutdownHooksMutex sync.Mutex +) + +type ShutdownHook func(os.Signal) + +func AddShutdownHook(hook ShutdownHook) { + shutdownHooksMutex.Lock() + defer shutdownHooksMutex.Unlock() + shutdownHooks = append(shutdownHooks, hook) +} + +func ShutdownListen(signals ...os.Signal) { + ch := make(chan os.Signal, 1) + if len(signals) == 0 { + signals = []os.Signal{os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT} + } + signal.Notify(ch, signals...) + sig := <-ch + var wg sync.WaitGroup + for i := len(shutdownHooks) - 1; i >= 0; i-- { + wg.Add(1) + go func(sig os.Signal, fn func(os.Signal)) { + defer wg.Done() + fn(sig) + }(sig, shutdownHooks[i]) + } + wg.Wait() +} + +// Shutdown direct shutdown without signal +func Shutdown() { + for i := len(shutdownHooks) - 1; i >= 0; i-- { + shutdownHooks[i](syscall.SIGTERM) + } +} diff --git a/pkg/stringx/camelcase.go b/pkg/stringx/camelcase.go new file mode 100644 index 0000000..47b09fd --- /dev/null +++ b/pkg/stringx/camelcase.go @@ -0,0 +1,62 @@ +package stringx + +func LowerCamelCase(s string) string { + return camelCaseInitCase(s, true) +} + +func CamelCase(s string) string { + return camelCaseInitCase(s, false) +} + +// CamelCase copy & modify from protobuf +func camelCaseInitCase(s string, lowerFirst bool) string { + if s == "" { + return "" + } + t := make([]byte, 0, len(s)) + i := 0 + if s[0] == '_' { + // Need a capital letter; drop the '_'. + if lowerFirst { + t = append(t, 'x') + } else { + t = append(t, 'X') + } + i++ + } + // Invariant: if the next letter is lower case, it must be converted + // to upper case. + // That is, we process a word at a time, where words are marked by _ or + // upper case letter. Digits are treated as words. + for ; i < len(s); i++ { + c := s[i] + if c == '_' && i+1 < len(s) && isASCIILower(s[i+1]) { + continue // Skip the underscore in s. + } + if isASCIIDigit(c) { + t = append(t, c) + continue + } + // Assume we have a letter now - if not, it's a bogus identifier. + // The next word is a sequence of characters that must start upper case. + if isASCIILower(c) && !(i == 0 && lowerFirst) { + c ^= ' ' // Make it a capital letter. + } + t = append(t, c) // Guaranteed not lower case. + // Accept lower case sequence that follows. + for i+1 < len(s) && isASCIILower(s[i+1]) { + i++ + t = append(t, s[i]) + } + } + return string(t) +} + +func isASCIILower(c byte) bool { + return 'a' <= c && c <= 'z' +} + +// Is c an ASCII digit? +func isASCIIDigit(c byte) bool { + return '0' <= c && c <= '9' +} diff --git a/pkg/stringx/camelcase_test.go b/pkg/stringx/camelcase_test.go new file mode 100644 index 0000000..a82621b --- /dev/null +++ b/pkg/stringx/camelcase_test.go @@ -0,0 +1,43 @@ +package stringx + +import ( + "testing" +) + +func TestCamelCase(t *testing.T) { + tests := []struct { + args string + want string + }{ + {"name", "Name"}, + {"name1", "Name1"}, + {"name_if", "NameIf"}, + {"_name_if", "XNameIf"}, + } + for _, tt := range tests { + t.Run(tt.args, func(t *testing.T) { + if got := CamelCase(tt.args); got != tt.want { + t.Errorf("CamelCase() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestLowerCamelCase(t *testing.T) { + tests := []struct { + args string + want string + }{ + {"name", "name"}, + {"name1", "name1"}, + {"name_if", "nameIf"}, + {"_name_if", "xNameIf"}, + } + for _, tt := range tests { + t.Run(tt.args, func(t *testing.T) { + if got := LowerCamelCase(tt.args); got != tt.want { + t.Errorf("CamelCase() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/stringx/hashid.go b/pkg/stringx/hashid.go new file mode 100644 index 0000000..cfbc0dc --- /dev/null +++ b/pkg/stringx/hashid.go @@ -0,0 +1,36 @@ +package stringx + +import "github.com/speps/go-hashids/v2" + +type simpleHashId struct { + hid *hashids.HashID +} + +func NewSimpleHashId(salt string, minLength int, alphabet ...string) *simpleHashId { + hd := hashids.NewData() + hd.Salt = salt + hd.MinLength = minLength + if len(alphabet) > 0 { + hd.Alphabet = alphabet[0] + } + hid, err := hashids.NewWithData(hd) + if err != nil { + panic(err) + } + + return &simpleHashId{ + hid: hid, + } +} + +func (t *simpleHashId) EncodeInt64(id int64) (string, error) { + return t.hid.EncodeInt64([]int64{id}) +} + +func (t *simpleHashId) DecodeInt64(hash string) (int64, error) { + r, err := t.hid.DecodeInt64WithError(hash) + if err != nil { + return 0, err + } + return r[0], nil +} diff --git a/pkg/stringx/hashid_test.go b/pkg/stringx/hashid_test.go new file mode 100644 index 0000000..fbe8294 --- /dev/null +++ b/pkg/stringx/hashid_test.go @@ -0,0 +1,63 @@ +package stringx + +import ( + "math" + "testing" +) + +func Test_simpleHashId(t *testing.T) { + tests := []struct { + name string + instance *simpleHashId + input int64 + wantEncodeErr bool + wantDecodeErr bool + }{ + { + name: "simple hashid 1", + instance: NewSimpleHashId("123", 8), + input: 1, + wantEncodeErr: false, + wantDecodeErr: false, + }, + { + name: "simple hashid max", + instance: NewSimpleHashId("123", 8), + input: math.MaxInt64, + wantEncodeErr: false, + wantDecodeErr: false, + }, + { + name: "simple hashid min", + instance: NewSimpleHashId("123", 8), + input: math.MinInt64, + wantEncodeErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.instance.EncodeInt64(tt.input) + if (err != nil) != tt.wantEncodeErr { + t.Errorf("simpleHashId.EncodeInt64() error = %v, wantErr %v", err, tt.wantEncodeErr) + return + } + + if tt.wantEncodeErr { + return + } + + t.Logf("%s, input: %v, out: %v", tt.name, tt.input, got) + + out, err := tt.instance.DecodeInt64(got) + if (err != nil) != tt.wantDecodeErr { + t.Errorf("simpleHashId.DecodeInt64() error = %v, wantErr %v", err, tt.wantEncodeErr) + return + } + + if out != tt.input { + t.Errorf("simpleHashId.DecodeInt64() = %v, want %v", out, tt.input) + return + } + }) + } +} diff --git a/pkg/stringx/strings.go b/pkg/stringx/strings.go new file mode 100644 index 0000000..0070210 --- /dev/null +++ b/pkg/stringx/strings.go @@ -0,0 +1,107 @@ +package stringx + +import "errors" + +var ( + // ErrInvalidStartPosition is an error that indicates the start position is invalid. + ErrInvalidStartPosition = errors.New("start position is invalid") + // ErrInvalidStopPosition is an error that indicates the stop position is invalid. + ErrInvalidStopPosition = errors.New("stop position is invalid") +) + +// Contains checks if str is in list. +func Contains(list []string, str string) bool { + for _, each := range list { + if each == str { + return true + } + } + + return false +} + +// Filter filters chars from s with given filter function. +func Filter(s string, filter func(r rune) bool) string { + var n int + chars := []rune(s) + for i, x := range chars { + if n < i { + chars[n] = x + } + if !filter(x) { + n++ + } + } + + return string(chars[:n]) +} + +// HasEmpty checks if there are empty strings in args. +func HasEmpty(args ...string) bool { + for _, arg := range args { + if len(arg) == 0 { + return true + } + } + + return false +} + +// NotEmpty checks if all strings are not empty in args. +func NotEmpty(args ...string) bool { + return !HasEmpty(args...) +} + +// Remove removes given strs from strings. +func Remove(strings []string, strs ...string) []string { + out := append([]string(nil), strings...) + + for _, str := range strs { + var n int + for _, v := range out { + if v != str { + out[n] = v + n++ + } + } + out = out[:n] + } + + return out +} + +// Reverse reverses s. +func Reverse(s string) string { + runes := []rune(s) + + for from, to := 0, len(runes)-1; from < to; from, to = from+1, to-1 { + runes[from], runes[to] = runes[to], runes[from] + } + + return string(runes) +} + +// Substr returns runes between start and stop [start, stop) regardless of the chars are ascii or utf8. +func Substr(str string, start, stop int) (string, error) { + rs := []rune(str) + length := len(rs) + + if start < 0 || start > length { + return "", ErrInvalidStartPosition + } + + if stop < 0 || stop > length { + return "", ErrInvalidStopPosition + } + + return string(rs[start:stop]), nil +} + +// TakeOne returns valid string if not empty or later one. +func TakeOne(valid, or string) string { + if len(valid) > 0 { + return valid + } + + return or +} diff --git a/pkg/stringx/strings_test.go b/pkg/stringx/strings_test.go new file mode 100644 index 0000000..1d990b4 --- /dev/null +++ b/pkg/stringx/strings_test.go @@ -0,0 +1,264 @@ +package stringx + +import ( + "path" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestContains(t *testing.T) { + type args struct { + list []string + str string + } + tests := []struct { + args args + want bool + }{ + { + args{[]string{"a", "b"}, "a"}, + true, + }, + { + args{[]string{"a", "b"}, "c"}, + false, + }, + { + args{[]string{"a", "b"}, "aa"}, + false, + }, + } + for _, tt := range tests { + t.Run(path.Join(tt.args.list...), func(t *testing.T) { + if got := Contains(tt.args.list, tt.args.str); got != tt.want { + t.Errorf("Contains() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestFilter(t *testing.T) { + cases := []struct { + input string + ignores []rune + expect string + }{ + {``, nil, ``}, + {`abcd`, nil, `abcd`}, + {`ab,cd,ef`, []rune{','}, `abcdef`}, + {`ab, cd,ef`, []rune{',', ' '}, `abcdef`}, + {`ab, cd, ef`, []rune{',', ' '}, `abcdef`}, + {`ab, cd, ef, `, []rune{',', ' '}, `abcdef`}, + } + + for _, each := range cases { + t.Run(each.input, func(t *testing.T) { + actual := Filter(each.input, func(r rune) bool { + for _, x := range each.ignores { + if x == r { + return true + } + } + return false + }) + assert.Equal(t, each.expect, actual) + }) + } +} + +func TestHasEmpty(t *testing.T) { + cases := []struct { + args []string + expect bool + }{ + { + args: []string{"a", "b", "c"}, + expect: true, + }, + { + args: []string{"a", "", "c"}, + expect: false, + }, + { + args: []string{"a"}, + expect: true, + }, + { + args: []string{""}, + expect: false, + }, + { + args: []string{}, + expect: true, + }, + } + + for _, each := range cases { + t.Run(path.Join(each.args...), func(t *testing.T) { + assert.Equal(t, each.expect, NotEmpty(each.args...)) + }) + } +} + +func TestNotEmpty(t *testing.T) { + type args struct { + args []string + } + tests := []struct { + name string + args args + want bool + }{ + // TODO: Add test cases. + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := NotEmpty(tt.args.args...); got != tt.want { + t.Errorf("NotEmpty() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestRemove(t *testing.T) { + cases := []struct { + input []string + remove []string + expect []string + }{ + { + input: []string{"a", "b", "a", "c"}, + remove: []string{"a", "b"}, + expect: []string{"c"}, + }, + { + input: []string{"b", "c"}, + remove: []string{"a"}, + expect: []string{"b", "c"}, + }, + { + input: []string{"b", "a", "c"}, + remove: []string{"a"}, + expect: []string{"b", "c"}, + }, + { + input: []string{}, + remove: []string{"a"}, + expect: []string{}, + }, + } + + for _, each := range cases { + t.Run(path.Join(each.input...), func(t *testing.T) { + assert.ElementsMatch(t, each.expect, Remove(each.input, each.remove...)) + }) + } +} + +func TestReverse(t *testing.T) { + cases := []struct { + input string + expect string + }{ + { + input: "abcd", + expect: "dcba", + }, + { + input: "", + expect: "", + }, + { + input: "我爱中国", + expect: "国中爱我", + }, + } + + for _, each := range cases { + t.Run(each.input, func(t *testing.T) { + assert.Equal(t, each.expect, Reverse(each.input)) + }) + } +} + +func TestSubstr(t *testing.T) { + cases := []struct { + input string + start int + stop int + err error + expect string + }{ + { + input: "abcdefg", + start: 1, + stop: 4, + expect: "bcd", + }, + { + input: "我爱中国3000遍,even more", + start: 1, + stop: 9, + expect: "爱中国3000遍", + }, + { + input: "abcdefg", + start: -1, + stop: 4, + err: ErrInvalidStartPosition, + expect: "", + }, + { + input: "abcdefg", + start: 100, + stop: 4, + err: ErrInvalidStartPosition, + expect: "", + }, + { + input: "abcdefg", + start: 1, + stop: -1, + err: ErrInvalidStopPosition, + expect: "", + }, + { + input: "abcdefg", + start: 1, + stop: 100, + err: ErrInvalidStopPosition, + expect: "", + }, + } + + for _, each := range cases { + t.Run(each.input, func(t *testing.T) { + val, err := Substr(each.input, each.start, each.stop) + assert.Equal(t, each.err, err) + if err == nil { + assert.Equal(t, each.expect, val) + } + }) + } +} + +func TestTakeOne(t *testing.T) { + cases := []struct { + valid string + or string + expect string + }{ + {"", "", ""}, + {"", "1", "1"}, + {"1", "", "1"}, + {"1", "2", "1"}, + } + + for _, each := range cases { + t.Run(each.valid, func(t *testing.T) { + actual := TakeOne(each.valid, each.or) + assert.Equal(t, each.expect, actual) + }) + } +} diff --git a/pkg/swaggerui/swaggerui.go b/pkg/swaggerui/swaggerui.go new file mode 100644 index 0000000..db66f64 --- /dev/null +++ b/pkg/swaggerui/swaggerui.go @@ -0,0 +1,65 @@ +package swaggerui + +import ( + "bytes" + "embed" + "html/template" + "net/http" + "path" + "strings" + + "github.com/labstack/echo/v4" +) + +var ( + //go:embed ui.html + ui string +) + +const ( + base = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@3.51.0" +) + +func Serve(prefix string, fs embed.FS) echo.MiddlewareFunc { + + tp := template.Must(template.New("index").Parse(ui)) + var index bytes.Buffer + tp.Execute(&index, map[string]interface{}{ + "base": base, + }) + + entries, _ := fs.ReadDir(".") + files := make([]map[string]string, 0, 32) + for _, it := range entries { + name := it.Name() + if !it.IsDir() && strings.HasSuffix(name, ".json") { + files = append(files, map[string]string{ + "name": name, + "url": path.Join(prefix, name), + }) + } + } + + h := http.StripPrefix(prefix, http.FileServer(http.FS(fs))) + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + requestPath := strings.TrimPrefix(c.Path(), prefix) + switch requestPath { + case "", "index.html": + tp.Execute(c.Response().Writer, map[string]interface{}{ + "base": base, + "files": files, + }) + return nil + default: + if strings.HasSuffix(requestPath, ".json") { + h.ServeHTTP(c.Response(), c.Request()) + return nil + } + } + + return c.NoContent(http.StatusNotFound) + } + } +} diff --git a/pkg/swaggerui/ui.html b/pkg/swaggerui/ui.html new file mode 100644 index 0000000..04af494 --- /dev/null +++ b/pkg/swaggerui/ui.html @@ -0,0 +1,58 @@ + + + + + Swagger UI + + + + + + + +
+ + + + + + diff --git a/pkg/tracingx/config.go b/pkg/tracingx/config.go new file mode 100644 index 0000000..1a80c62 --- /dev/null +++ b/pkg/tracingx/config.go @@ -0,0 +1,77 @@ +package tracingx + +import ( + "fmt" + "os" + "strings" + + "github.com/opentracing/opentracing-go" + "github.com/spf13/viper" + "github.com/uber/jaeger-client-go/config" + "github.com/uber/jaeger-client-go/rpcmetrics" + "github.com/uber/jaeger-lib/metrics" + "github.com/uber/jaeger-lib/metrics/prometheus" + "go.uber.org/zap" + + "github.com/xinpianchang/xservice/core" + "github.com/xinpianchang/xservice/pkg/log" + "github.com/xinpianchang/xservice/pkg/signalx" +) + +func Config(v *viper.Viper) { + serviceName := os.Getenv(core.EnvServiceName) + + for k, v := range v.GetStringMapString("jaeger") { + os.Setenv(fmt.Sprintf("JAEGER_%s", strings.ToUpper(k)), v) + } + + os.Setenv("JAEGER_SERVICE_NAME", serviceName) + + cfg, err := config.FromEnv() + if err != nil { + panic(err) + } + + if cfg.Sampler.Type == "" { + cfg.Sampler.Type = "const" + } + if cfg.Sampler.Param == 0 { + cfg.Sampler.Param = 1 + } + + var metricsFactory metrics.Factory + metricsFactory = prometheus.New().Namespace(metrics.NSOptions{Name: serviceName, Tags: nil}) + metricsFactory = metricsFactory.Namespace(metrics.NSOptions{Name: cfg.ServiceName, Tags: nil}) + + tracer, closer, err := cfg.NewTracer( + config.Logger(&jaegerLoggerAdapter{}), + config.Metrics(metricsFactory), + config.Observer(rpcmetrics.NewObserver(metricsFactory, rpcmetrics.DefaultNameNormalizer)), + ) + + if err != nil { + log.Fatal("create tracer", zap.Error(err)) + } + + signalx.AddShutdownHook(func(os.Signal) { + _ = closer.Close() + log.Info("shutdown tracer") + }) + + opentracing.SetGlobalTracer(tracer) +} + +type jaegerLoggerAdapter struct{} + +func (l jaegerLoggerAdapter) Error(msg string) { + log.Error(msg) +} + +func (l jaegerLoggerAdapter) Infof(msg string, args ...interface{}) { + log.Info(fmt.Sprintf(msg, args...)) +} + +func (l jaegerLoggerAdapter) Debugf(msg string, args ...interface{}) { + // ignore debug + // log.Debug(fmt.Sprintf(msg, args...)) +} diff --git a/pkg/tracingx/util.go b/pkg/tracingx/util.go new file mode 100644 index 0000000..18ed666 --- /dev/null +++ b/pkg/tracingx/util.go @@ -0,0 +1,18 @@ +package tracingx + +import ( + "context" + + "github.com/opentracing/opentracing-go" + "github.com/uber/jaeger-client-go" +) + +func GetTraceID(ctx context.Context) string { + if span := opentracing.SpanFromContext(ctx); span != nil { + if sc, ok := span.Context().(jaeger.SpanContext); ok { + return sc.TraceID().String() + } + } + + return "" +} diff --git a/tools/xservice/generator/status_map_generator.go b/tools/xservice/generator/status_map_generator.go new file mode 100644 index 0000000..ff663a0 --- /dev/null +++ b/tools/xservice/generator/status_map_generator.go @@ -0,0 +1,76 @@ +package generator + +import ( + "fmt" + "io/ioutil" + "path/filepath" + "sort" + + "github.com/dave/jennifer/jen" + "github.com/spf13/cobra" + "gopkg.in/yaml.v2" +) + +var ( + StatusMapGeneratorCmd = &cobra.Command{ + Use: "statusmap status.yaml", + Short: "generate status from yaml status mapping file", + Args: cobra.ExactArgs(1), + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + generateStatusMap(args[0]) + }, + } +) + +func generateStatusMap(file string) { + b, err := ioutil.ReadFile(file) + if err != nil { + panic(err) + } + var statusMap map[int]string + err = yaml.Unmarshal(b, &statusMap) + if err != nil { + panic(err) + } + + var codes []int + for k := range statusMap { + codes = append(codes, k) + } + sort.Ints(codes) + + f := jen.NewFile("dto") + f.HeaderComment("auto generated file DO NOT EDIT") + f.HeaderComment(fmt.Sprintf("generate from file: %s ", file)) + dict := jen.Dict{} + consts := make([]jen.Code, 0, len(codes)) + for i, code := range codes { + codeN := fmt.Sprint(code) + if code < 0 { + codeN = fmt.Sprint("_", -code) + } + + name := fmt.Sprint("StatusCode", codeN) + if i == 0 { + consts = append(consts, jen.Id(name).Qual("github.com/xinpianchang/xservice/pkg/responsex", "StatusCode").Op("=").Lit(code)) + } else { + consts = append(consts, jen.Id(name).Op("=").Lit(code).Comment(statusMap[code])) + } + dict[jen.Id(name)] = jen.Lit(statusMap[code]) + } + f.Add(jen.Const().Defs(consts...)) + f.Func().Id("init").Params().Block( + jen.Comment("init status map"), + jen.Id("responsex.SetStatusMap").Call(jen.Map(jen.Id("responsex.StatusCode")).String().Values(dict)), + ) + fileabs, _ := filepath.Abs(file) + target := filepath.Join(filepath.Dir(fileabs), "d_status_map.go") + err = ioutil.WriteFile(target, []byte(f.GoString()), 0600) + if err != nil { + panic(err) + } + + target, _ = filepath.Abs(target) + fmt.Println("generage statusmap:", target) +} diff --git a/tools/xservice/gogen/assets/.gitignore b/tools/xservice/gogen/assets/.gitignore new file mode 100644 index 0000000..a1d35d9 --- /dev/null +++ b/tools/xservice/gogen/assets/.gitignore @@ -0,0 +1 @@ +!*.* diff --git a/tools/xservice/gogen/assets/assets.go b/tools/xservice/gogen/assets/assets.go new file mode 100644 index 0000000..8d9e254 --- /dev/null +++ b/tools/xservice/gogen/assets/assets.go @@ -0,0 +1,8 @@ +package assets + +import "embed" + +var ( + //go:embed project/*.* project project/internal/model/*.* project/internal/dto/*.* + ProjectFS embed.FS +) diff --git a/tools/xservice/gogen/assets/project/.editorconfig b/tools/xservice/gogen/assets/project/.editorconfig new file mode 100644 index 0000000..3e67235 --- /dev/null +++ b/tools/xservice/gogen/assets/project/.editorconfig @@ -0,0 +1,17 @@ +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file +root = true + +[*] +indent_style = space +indent_size = 2 +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.go] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false diff --git a/tools/xservice/gogen/assets/project/.gitignore b/tools/xservice/gogen/assets/project/.gitignore new file mode 100644 index 0000000..323e8a8 --- /dev/null +++ b/tools/xservice/gogen/assets/project/.gitignore @@ -0,0 +1,36 @@ +*.iml +*.o +*.a +*.so +_obj +_test + +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof + +.DS_Store +.idea/ +coverage.out + +/dist +/config.yaml +logs/*.log +logs/*.gz +__debug_bin +.idea +.venv +go.sum + +!.gitkeep diff --git a/tools/xservice/gogen/assets/project/.golangci.yml b/tools/xservice/gogen/assets/project/.golangci.yml new file mode 100644 index 0000000..e6b7f73 --- /dev/null +++ b/tools/xservice/gogen/assets/project/.golangci.yml @@ -0,0 +1,30 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0.8 + gocyclo: + min-complexity: 10 + maligned: + suggest-new: true + dupl: + threshold: 100 + goconst: + min-len: 2 + min-occurrences: 2 + misspell: + locale: US + lll: + line-length: 140 + gocritic: + enabled-tags: + - performance + - style + - experimental + disabled-checks: + - wrapperFunc + +run: + skip-dirs: + - test + - tmp diff --git a/tools/xservice/gogen/assets/project/.vscode/settings.json b/tools/xservice/gogen/assets/project/.vscode/settings.json new file mode 100644 index 0000000..d8e17c6 --- /dev/null +++ b/tools/xservice/gogen/assets/project/.vscode/settings.json @@ -0,0 +1,34 @@ +{ + "cSpell.words": [ + "Cmder", + "Commentf", + "Repanic", + "Syncer", + "clientv", + "cmux", + "cronx", + "echomd", + "echox", + "gogen", + "gops", + "gorm", + "gormopentracing", + "gormx", + "grpc", + "grpcx", + "kafkax", + "mvccpb", + "netx", + "redisx", + "responsex", + "sarama", + "signalx", + "speps", + "stretchr", + "stringx", + "swaggerui", + "tracingx", + "xinpianchang", + "xservice" + ] +} diff --git a/tools/xservice/gogen/assets/project/Dockerfile.tpl b/tools/xservice/gogen/assets/project/Dockerfile.tpl new file mode 100644 index 0000000..fec3902 --- /dev/null +++ b/tools/xservice/gogen/assets/project/Dockerfile.tpl @@ -0,0 +1,15 @@ +FROM golang:alpine AS builder +LABEL stage=gobuilder +WORKDIR /build/xservice +COPY . . +ENV CGO_ENABLED 0 +ENV GOOS linux +ENV GOPROXY https://goproxy.cn,direct +RUN go run mage.go -v build + +FROM alpine +RUN apk update --no-cache && apk add --no-cache ca-certificates tzdata +ENV TZ Asia/Shanghai +WORKDIR /app +COPY --from=builder /build/xservice/{{.Name}}/dist/{{.Name}}-linux-amd64/* /app +CMD ["./{{.Name}}"] diff --git a/tools/xservice/gogen/assets/project/README.md.tpl b/tools/xservice/gogen/assets/project/README.md.tpl new file mode 100644 index 0000000..c136244 --- /dev/null +++ b/tools/xservice/gogen/assets/project/README.md.tpl @@ -0,0 +1,56 @@ +# {{.Name}} + +Robust service powered by `xservice` + +## Prepare + +install protobuf & generator plugins + +```bash +# install specific version (recommended) +go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26.0 +go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@1.1.0 +go install github.com/envoyproxy/protoc-gen-validate@v0.6.1 + +go install \ + github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-grpc-gateway@v2.5.0 \ + github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2@v2.5.0 + + + +# install latest (not well tested) +go install google.golang.org/protobuf/cmd/protoc-gen-go@latest +go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest +go install github.com/envoyproxy/protoc-gen-validate@latest + +go install \ + github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-grpc-gateway@latest \ + github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2@latest +``` + +install buf + +```bash +go install github.com/bufbuild/buf/cmd/buf/main.go@latest +``` + +## Initialize project + +```bash +buf beta mod update && buf generate +go mod tidy && go mod download +``` + +## Run + +```bash +go run cmd/main.go +``` + +## Resource + +- gRPC generate tool/buf https://buf.build/ +- gRPC validate https://github.com/envoyproxy/protoc-gen-validate +- RESTful validate https://github.com/go-playground/validator +- gRPC-Gateway https://grpc-ecosystem.github.io/grpc-gateway/ +- jaeger https://www.jaegertracing.io/ diff --git a/tools/xservice/gogen/assets/project/buf.gen.yaml b/tools/xservice/gogen/assets/project/buf.gen.yaml new file mode 100644 index 0000000..ee10f73 --- /dev/null +++ b/tools/xservice/gogen/assets/project/buf.gen.yaml @@ -0,0 +1,26 @@ +version: v1beta1 +plugins: + - name: go + out: . + opt: paths=source_relative + - name: go-grpc + out: . + opt: + - paths=source_relative + - require_unimplemented_servers=false + - name: grpc-gateway + out: . + opt: + - paths=source_relative + - allow_repeated_fields_in_body=true + - name: openapiv2 + out: . + opt: + - allow_repeated_fields_in_body=true + - logtostderr=true + - use_go_templates=true + - name: validate + out: . + opt: + - paths=source_relative + - lang=go diff --git a/tools/xservice/gogen/assets/project/buf.yaml.tpl b/tools/xservice/gogen/assets/project/buf.yaml.tpl new file mode 100644 index 0000000..16c3238 --- /dev/null +++ b/tools/xservice/gogen/assets/project/buf.yaml.tpl @@ -0,0 +1,14 @@ +version: v1beta1 +name: buf.build/{{.Repo}}/{{.Name}} +deps: + - buf.build/beta/googleapis + - buf.build/beta/protoc-gen-validate +build: + roots: + - . +lint: + use: + - DEFAULT +breaking: + use: + - FILE diff --git a/tools/xservice/gogen/assets/project/buf/v1/helloworld.proto.tpl b/tools/xservice/gogen/assets/project/buf/v1/helloworld.proto.tpl new file mode 100644 index 0000000..8a00a2d --- /dev/null +++ b/tools/xservice/gogen/assets/project/buf/v1/helloworld.proto.tpl @@ -0,0 +1,32 @@ +syntax = "proto3"; + +package buf.v1; +option go_package = "{{.Module}}/buf/v1"; + +import "google/api/httpbody.proto"; +import "google/api/annotations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; +import "validate/validate.proto"; + +// HelloWorldService +service HelloWorldService { + // Hello + // + // {{`{{import "buf/v1/tables.md"}}`}} + rpc Hello(HelloRequest) returns (HelloResponse) { + option(google.api.http) = { + post: "/rpc/v1/hello" + body: "*" + }; + } +} + +message HelloRequest { + string name = 1 [(validate.rules).string = {min_len: 1, max_len: 32}]; +} + +message HelloResponse { + string message = 1; +} diff --git a/tools/xservice/gogen/assets/project/buf/v1/swagger.go b/tools/xservice/gogen/assets/project/buf/v1/swagger.go new file mode 100644 index 0000000..e71b97a --- /dev/null +++ b/tools/xservice/gogen/assets/project/buf/v1/swagger.go @@ -0,0 +1,8 @@ +package v1 + +import "embed" + +var ( + //go:embed *.swagger.json + SwaggerFS embed.FS +) diff --git a/tools/xservice/gogen/assets/project/buf/v1/tables.md b/tools/xservice/gogen/assets/project/buf/v1/tables.md new file mode 100644 index 0000000..3430b01 --- /dev/null +++ b/tools/xservice/gogen/assets/project/buf/v1/tables.md @@ -0,0 +1,11 @@ +## {{.RequestType.Name}} + +| Field ID | Name | Type | Description | +| ----------- | --------- | --------------------------------------------------------- | ---------------------------- | {{range .RequestType.Fields}} +| {{.Number}} | {{.Name}} | {{if eq .Label.String "LABEL_REPEATED"}}[]{{end}}{{.Type}} | {{fieldcomments .Message .}} | {{end}} + +## {{.ResponseType.Name}} + +| Field ID | Name | Type | Description | +| ----------- | --------- | ---------------------------------------------------------- | ---------------------------- | {{range .ResponseType.Fields}} +| {{.Number}} | {{.Name}} | {{if eq .Label.String "LABEL_REPEATED"}}[]{{end}}{{.Type}} | {{fieldcomments .Message .}} | {{end}} diff --git a/tools/xservice/gogen/assets/project/cmd/main.go.tpl b/tools/xservice/gogen/assets/project/cmd/main.go.tpl new file mode 100644 index 0000000..2de1260 --- /dev/null +++ b/tools/xservice/gogen/assets/project/cmd/main.go.tpl @@ -0,0 +1,57 @@ +package main + +import ( + "flag" + "fmt" + "net/http" + "runtime" + + "github.com/labstack/echo/v4" + "github.com/xinpianchang/xservice/core/xservice" + "github.com/xinpianchang/xservice/pkg/swaggerui" + + pb "{{.Module}}/buf/v1" + "{{.Module}}/service" + "{{.Module}}/version" +) + +var ( + showVersion = flag.Bool("version", false, "print version") +) + +func main() { + flag.Parse() + if *showVersion { + fmt.Printf("%s version:%s, build:%s, runtime:%s\n", + version.Name, version.Version, version.Build, runtime.Version()) + return + } + + srv := xservice.New( + xservice.Name(version.Name), + xservice.Version(version.Version), + xservice.Description(version.Description), + ) + + server := srv.Server() + + // swagger doc + server.Echo().Group("/swagger/*", swaggerui.Serve("/swagger/", pb.SwaggerFS)) + + // register grpc service + server.GrpcRegister(&pb.HelloWorldService_ServiceDesc, &service.HelloWorldServiceServerImpl{}, pb.RegisterHelloWorldServiceHandler) + + // routes config + routes(server.Echo()) + + if err := server.Serve(); err != nil { + panic(err) + } +} + +// routes for RESTful api +func routes(e *echo.Echo) { + e.GET("/", func(c echo.Context) error { + return c.String(http.StatusOK, "Hello World!") + }) +} diff --git a/tools/xservice/gogen/assets/project/config-example.yaml b/tools/xservice/gogen/assets/project/config-example.yaml new file mode 100644 index 0000000..0f352d5 --- /dev/null +++ b/tools/xservice/gogen/assets/project/config-example.yaml @@ -0,0 +1,53 @@ +http: + address: 0.0.0.0:5000 + # advice address for service discover. eg load balancer, or server external network address + # advice_address: 192.168.8.20:5000 + +# jaeger configuration +# env configuration is ok, refer: https://www.jaegertracing.io/docs/1.23/client-features +# config following here will overwrite env configuration +jaeger: + agent_host: 127.0.0.1 + agent_port: 6831 + +# redis config +# optional +# redis: +# - name: redis +# addr: 127.0.0.1:6379 +# password: "123456" +# db: 0 +# prefix: app + +# database +# optional +# database: +# - name: mall_v2 +# uri: "root:123456@(192.168.4.200:3306)/hello?charset=utf8mb4&parseTime=True&loc=Local" +# maxConn: 100 +# maxIdleConn: 10 +# connMaxLifetimeInMillisecond: 300000 + +# kafka config +# optional +# mq: +# - name: default +# version: 1.1.0 +# broker: +# - 127.0.0.1:9092 + +# log config +# optional, default print to stdout +log: + # zap log level: info, debug, warn, error + level: debug + # format: console, json + format: console + # enable log caller, default true + caller: true + # stdout print, default true + stdout: true + file: logs/mall-v2-server.log + # enable log file rotate, size in MB + maxSize: 1024 + maxDays: 3 diff --git a/tools/xservice/gogen/assets/project/ctl.sh.tpl b/tools/xservice/gogen/assets/project/ctl.sh.tpl new file mode 100644 index 0000000..23a6f35 --- /dev/null +++ b/tools/xservice/gogen/assets/project/ctl.sh.tpl @@ -0,0 +1,136 @@ +#!/bin/bash +set -e + +# uncomment & config for service discovery +# export XSERVICE_ETCD=127.0.0.1:2379 +# export XSERVICE_ETCD_USER=root +# export XSERVICE_ETCD_PASSWORD=123456 + +appName="{{.Name}}" + +########################## + +cd $(dirname $0) + +getpid() { + echo $(ps -ef | grep -E "\s\.?\/${appName}" | awk '{print $2}') +} + +status() { + local pid=$(getpid) + if [ ! -z $pid ]; then + echo "$appName is runing pid: $pid" + + echo "" + echo "ps status" + ps -p "$pid" -o "user,pid,ppid,lstart,etime,rss,%mem,%cpu,command" + else + echo "$appName is not runing" + fi +} + +start() { + local pid=$(getpid) + if [ -z $pid ]; then + echo "starting $appName" + # disable stdlog, bug keep err log for track panic issues. + XSERVICE_DISABLE_STDOUT=true ./$appName &> .err.log & + echo "$appName is runing pid: $!" + else + echo "$appName is already runing pid:$pid" + fi +} + +stop() { + echo "stopping $appName" + local pid=$(getpid) + if [ ! -z $pid ]; then + kill "$pid" + sleep 2s + pid=$(getpid) + if [ ! -z $pid ]; then + echo "$appName is still runing, try force stop!" + kill -9 "$pid" + sleep 2s + fi + fi + echo "$appName stopped" +} + +reload() { + local pid=$(getpid) + if [ ! -z $pid ]; then + kill -USR2 "$pid" + echo "$appName reloaded" + else + echo "$appName is not runing" + fi +} + +startOrReload() { + local pid=$(getpid) + if [ -z $pid ]; then + start ${@:2} + else + echo "reloading $appName" + reload + fi +} + +version() { + ./$appName -version +} + +help() { + ./$appName -h +} + +case "$1" in +status) + status + ;; +start) + start ${@:2} + ;; +stop) + stop + ;; +restart) + stop + start + ;; +reload) + reload + ;; +startOrReload) + startOrReload ${@:2} + ;; +version) + version + ;; +help) + help + ;; +*) + cat < 0 { + log.Error("target should be empty") + return + } + + err = fs.WalkDir(assets.ProjectFS, "project", func(src string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + path := strings.TrimPrefix(src, "project") + if path == "" { + return nil + } + + tf := filepath.Join(target, path) + + if d.IsDir() { + if err = os.MkdirAll(tf, 0755); err != nil { + return err + } + return nil + } + + if strings.HasSuffix(src, ".tpl") { + data, _ := assets.ProjectFS.ReadFile(src) + tp := template.Must(template.New(src).Parse(string(data))) + file, err := os.OpenFile(strings.TrimSuffix(tf, ".tpl"), os.O_CREATE|os.O_WRONLY, 0655) + if err != nil { + return err + } + if err = tp.Execute(file, project); err != nil { + return err + } + } else { + file, err := os.OpenFile(tf, os.O_CREATE|os.O_WRONLY, 0655) + if err != nil { + return err + } + sf, _ := assets.ProjectFS.Open(src) + _, err = io.Copy(file, sf) + if err != nil { + return err + } + } + + return nil + }) + + if err != nil { + log.Error("walk", zap.Error(err)) + } + }, + } +) + +type Project struct { + Module string + Repo string + Name string +} + +func init() { + pf := NewCmd.PersistentFlags() + pf.StringP("target", "t", ".", "output directory") + pf.StringP("module", "m", "", "module name") + viper.BindPFlag("target", pf.Lookup("target")) + viper.BindPFlag("module", pf.Lookup("module")) +} + +func newProject(module string) *Project { + match, err := regexp.MatchString(`^[a-z0-9\.\-_\/]+$`, module) + if err != nil { + log.Error("match module", zap.Error(err)) + return nil + } + if !match { + log.Error("invalid module") + return nil + } + + project := &Project{Module: module} + ms := strings.Split(module, "/") + switch n := len(ms); { + case n == 3: + project.Repo = ms[1] + project.Name = ms[2] + return project + case n == 1: + project.Repo = ms[0] + project.Name = ms[0] + return project + default: + log.Error("invalid module", zap.String("example", "github.com/example/helloworld")) + return nil + } +} diff --git a/tools/xservice/model/model.go b/tools/xservice/model/model.go new file mode 100644 index 0000000..2433606 --- /dev/null +++ b/tools/xservice/model/model.go @@ -0,0 +1,31 @@ +package model + +import ( + "github.com/spf13/cobra" + "github.com/spf13/viper" + + "github.com/xinpianchang/xservice/tools/xservice/model/mysql" +) + +var ( + ModelCmd = &cobra.Command{ + Use: "model", + Short: "generete model from datasource and basic CRUD base on GORM", + } +) + +func init() { + ModelCmd.AddCommand( + mysql.MySQLCmd, + ) + + pf := ModelCmd.PersistentFlags() + pf.StringP("datasource", "d", "", "datasource, valid golang SQL DSN, e.g. root:123456@(127.0.0.1:3306)/test") + pf.StringP("filter", "f", "", "filter table via regex") + pf.String("dir", "internal/model", "generate go model files to dir") + pf.String("pkg", "model", "model package name") + viper.BindPFlag("datasource", pf.Lookup("datasource")) + viper.BindPFlag("filter", pf.Lookup("filter")) + viper.BindPFlag("dir", pf.Lookup("dir")) + viper.BindPFlag("pkg", pf.Lookup("pkg")) +} diff --git a/tools/xservice/model/mysql/cmd.go b/tools/xservice/model/mysql/cmd.go new file mode 100644 index 0000000..e10ed84 --- /dev/null +++ b/tools/xservice/model/mysql/cmd.go @@ -0,0 +1,28 @@ +package mysql + +import ( + "github.com/spf13/cobra" + "github.com/spf13/viper" + "go.uber.org/zap" + + "github.com/xinpianchang/xservice/pkg/log" +) + +var ( + MySQLCmd = &cobra.Command{ + Use: "mysql", + Short: "generete model from mysql datasource and basic CRUD base on GORM", + DisableFlagsInUseLine: true, + Run: func(cmd *cobra.Command, args []string) { + datasource := viper.GetString("datasource") + filter := viper.GetString("filter") + dir := viper.GetString("dir") + pkg := viper.GetString("pkg") + + err := NewMySQLGenerator(dir, pkg, filter).Gen(datasource) + if err != nil { + log.Error("generate error", zap.Error(err)) + } + }, + } +) diff --git a/tools/xservice/model/mysql/table.go b/tools/xservice/model/mysql/table.go new file mode 100644 index 0000000..312e0eb --- /dev/null +++ b/tools/xservice/model/mysql/table.go @@ -0,0 +1,620 @@ +package mysql + +import ( + "fmt" + "path/filepath" + "regexp" + "strings" + + "github.com/dave/jennifer/jen" + "gorm.io/driver/mysql" + "gorm.io/gorm" + + "github.com/xinpianchang/xservice/pkg/stringx" +) + +type ( + MySQLGenerator struct { + Dir string + Pkg string + filter *regexp.Regexp + Tables []*Table + } + + Table struct { + Name string + Comment string + Fields []*Field + Statement *jen.Statement + } + + Field struct { + TableName string + ColumnName string + ColumnDefault string + IsNullable bool + DataType string + ColumnType string + ColumnKey string + Extra string + ColumnComment string + GoType string `gorm:"-"` + Statement *jen.Statement `gorm:"-"` + } +) + +var ( + typeMysqlDic = map[string]string{ + "smallint": "int16", + "smallint unsigned": "uint16", + "int": "int", + "int unsigned": "uint", + "bigint": "int64", + "bigint unsigned": "uint64", + "varchar": "string", + "char": "string", + "date": "time.Time", + "datetime": "time.Time", + "bit(1)": "int8", + "tinyint": "int8", + "tinyint unsigned": "uint8", + "tinyint(1)": "int8", + "tinyint(1) unsigned": "uint8", + "json": "string", + "text": "string", + "timestamp": "time.Time", + "double": "float64", + "mediumtext": "string", + "longtext": "string", + "float": "float32", + "tinytext": "string", + "enum": "string", + "time": "time.Time", + "blob": "[]byte", + "tinyblob": "[]byte", + } + + // typeMysqlMatch regexp match types + typeMysqlMatch = [][]string{ + {`^(tinyint)[(]\d+[)] unsigned`, "uint8"}, + {`^(tinyint)[(]\d+[)]`, "int8"}, + {`^(smallint)[(]\d+[)] unsigned`, "uint16"}, + {`^(smallint)[(]\d+[)]`, "int16"}, + {`^(int)[(]\d+[)] unsigned`, "uint"}, + {`^(int)[(]\d+[)]`, "int"}, + {`^(bigint)[(]\d+[)] unsigned`, "uint64"}, + {`^(bigint)[(]\d+[)]`, "int64"}, + {`^(char)[(]\d+[)]`, "string"}, + {`^(enum)[(](.)+[)]`, "string"}, + {`^(set)[(](.)+[)]`, "string"}, + {`^(varchar)[(]\d+[)]`, "string"}, + {`^(varbinary)[(]\d+[)]`, "[]byte"}, + {`^(binary)[(]\d+[)]`, "[]byte"}, + {`^(tinyblob)[(]\d+[)]`, "[]byte"}, + {`^(decimal)[(]\d+,\d+[)]`, "float64"}, + {`^(mediumint)[(]\d+[)]`, "string"}, + {`^(double)[(]\d+,\d+[)]`, "float64"}, + {`^(float)[(]\d+,\d+[)]`, "float64"}, + {`^(datetime)[(]\d+[)]`, "time.Time"}, + {`^(timestamp)[(]\d+[)]`, "time.Time"}, + } + + linebreak = regexp.MustCompile(`[\n\r]+`) +) + +func NewMySQLGenerator(dir string, pkg string, filter string) *MySQLGenerator { + var x *regexp.Regexp + if filter != "" { + x = regexp.MustCompile(filter) + } + return &MySQLGenerator{ + Dir: dir, + Pkg: pkg, + filter: x, + Tables: make([]*Table, 0, 512), + } +} + +func (t *MySQLGenerator) Gen(dsn string) error { + if err := t.parse(dsn); err != nil { + return err + } + + c := jen.NewFile(t.Pkg) + c.HeaderComment("auto generated file DO NOT EDIT") + c.Line() + + for _, table := range t.Tables { + c.Add(table.Statement).Line() + } + + file := filepath.Join(t.Dir, "model.gen.go") + err := c.Save(file) + if err == nil { + fmt.Println("generage model:", file) + } + return err +} + +func (t *MySQLGenerator) parse(dsn string) error { + db, err := gorm.Open(mysql.Open(dsn)) + if err != nil { + return err + } + + var tb []map[string]interface{} + err = db.Raw("select table_name, table_comment from information_schema.tables where table_schema = database()").Find(&tb).Error + if err != nil { + return err + } + + names := make([]string, 0, len(tb)) + for _, item := range tb { + name := item["table_name"].(string) + comment := item["table_comment"].(string) + table := &Table{Name: name, Comment: comment} + if t.filter != nil { + if t.filter.MatchString(name) { + t.Tables = append(t.Tables, table) + names = append(names, name) + } + } else { + t.Tables = append(t.Tables, table) + names = append(names, name) + } + } + + var fields = make([]*Field, 0, 1024*8) + err = db.Raw(` + select + table_name, + column_name, + column_default, + lower(is_nullable) = 'yes' is_nullable, + data_type, + lower(column_type) column_type, + column_key, + extra, + column_comment + from + information_schema.columns + where + table_schema = database() + and table_name in(?) + `, names).Scan(&fields).Error + if err != nil { + return err + } + + for _, field := range fields { + if v, ok := typeMysqlDic[field.DataType]; ok { + field.GoType = v + } else { + for _, v := range typeMysqlMatch { + if ok, _ := regexp.MatchString(v[0], field.DataType); ok { + field.GoType = v[1] + } + } + } + + if field.GoType == "" { + panic(fmt.Sprintf("unknown type: %s", field.DataType)) + } + + field.Statement = t.fieldStatement(field) + } + + for _, table := range t.Tables { + for _, field := range fields { + if strings.EqualFold(table.Name, field.TableName) { + if table.Fields == nil { + table.Fields = make([]*Field, 0, 100) + } + table.Fields = append(table.Fields, field) + } + } + + table.Statement = t.tableStatement(table) + } + + return nil +} + +func (t *MySQLGenerator) tableStatement(table *Table) *jen.Statement { + c := jen.Comment(fmt.Sprintf("%s table: %s", stringx.CamelCase(table.Name), table.Name)).Line() + if table.Comment != "" { + c.Comment(table.Comment).Line() + } + + typeName := stringx.CamelCase(table.Name) + + c.Type().Id(typeName).Struct(jen.Do(func(c *jen.Statement) { + for _, field := range table.Fields { + c.Add(field.Statement).Line() + } + t.removeStatement(c, 1) + })).Line() + + // table name + c.Commentf("TableName set table of %v, refer: https://gorm.io/docs/conventions.html", table.Name).Line() + c.Func().Params(jen.Id(typeName)).Id("TableName").Call().String().Block( + jen.Return(jen.Lit(fmt.Sprint(table.Name))), + ).Line().Line() + + // BeforeUpdate Hook + fields := make([]*Field, 0, 2) + for _, field := range table.Fields { + if field.GoType == "time.Time" { + for _, it := range []string{"updated_at", "last_updated_at", "last_changed_at"} { + if strings.EqualFold(field.ColumnName, it) { + fields = append(fields, field) + break + } + } + } + } + if len(fields) > 0 { + c.Comment("BeforeUpdate use for check field has changed, refer: https://gorm.io/docs/update.html").Line() + c.Comment("The Changed method only works with methods Update, Updates,").Line() + c.Comment("and it only checks if the updating value from Update / Updates equals the model value, will return true if it is changed and not omitted").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("BeforeUpdate").Params(jen.Id("tx").Op("*").Id("gorm.DB")).Id("error").Block( + jen.If(jen.Id("tx.Statement.Changed").Call()).Block( + jen.Do(func(c *jen.Statement) { + for _, field := range fields { + c.Id("tx.Statement.SetColumn").Call(jen.Lit(stringx.CamelCase(field.ColumnName)), jen.Id("time.Now").Call()).Line() + } + t.removeStatement(c, 1) + }), + ), + jen.Return(jen.Nil()), + ) + c.Line().Line() + } + + // model basic + c.Add(t.tableDefaultModel(table)) + c.Line() + + return c +} + +func (t *MySQLGenerator) tableDefaultModel(table *Table) *jen.Statement { + typeName := stringx.LowerCamelCase(fmt.Sprint("default_", table.Name, "Model")) + modelName := stringx.CamelCase(table.Name) + + c := jen.Commentf("%s default %sModel implements with basic operation", typeName, stringx.CamelCase(table.Name)).Line() + c.Type().Id(typeName).Struct( + jen.Id("tx").Op("*").Id("gorm.DB"), + ).Line() + + // new model + newModelFn := stringx.CamelCase(fmt.Sprint("New_", table.Name, "Model")) + c.Commentf("%s create new op Model", newModelFn).Line() + c.Func().Id(newModelFn).Params(jen.Id("tx *gorm.DB")).Op("*").Id(typeName).Block( + jen.Return(jen.Op("&").Id(typeName).Block(jen.Id("tx").Op(":").Id("tx.Model").Call(jen.Op("&").Id(modelName).Block()).Op(","))), + ).Line() + + // Model + c.Comment("Model for update tx model").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Model").Params(jen.Id("value interface{}")).Op("*").Id(typeName).Block( + jen.Return(jen.Id(newModelFn).Call(jen.Id("t.tx.Model(value)"))), + ).Line() + + // Scopes + c.Comment("Scopes pass current database connection to arguments `func(DB) DB`, which could be used to add conditions dynamically").Line() + c.Comment("refer: https://gorm.io/docs/advanced_query.html#Scopes").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Scopes").Params(jen.Id("fn ...func(tx *gorm.DB) *gorm.DB")).Op("*").Id(typeName).Block( + jen.Return(jen.Id(newModelFn).Call(jen.Id("t.tx.Scopes(fn...)"))), + ).Line() + + // Clauses + c.Comment("Clauses add clauses").Line() + c.Comment("refer: https://gorm.io/docs/sql_builder.html#Clauses").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Clauses").Params(jen.Id("conds").Op("...").Qual("gorm.io/gorm/clause", "Expression")).Op("*").Id(typeName).Block( + jen.Return(jen.Id(newModelFn).Call(jen.Id("t.tx.Clauses(conds...)"))), + ).Line() + + // Where + c.Comment("Where").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Where").Params(jen.Id("query interface{}, args ...interface{}")).Op("*").Id(typeName).Block( + jen.Return(jen.Id(newModelFn).Call(jen.Id("t.tx.Where(query, args...)"))), + ).Line() + + // Order + c.Comment("Order").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Order").Params(jen.Id("value interface{}")).Op("*").Id(typeName).Block( + jen.Return(jen.Id(newModelFn).Call(jen.Id("t.tx.Order(value)"))), + ).Line() + + // Distinct + c.Comment("Distinct").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Distinct").Params(jen.Id("args interface{}")).Op("*").Id(typeName).Block( + jen.Return(jen.Id(newModelFn).Call(jen.Id("t.tx.Distinct(args)"))), + ).Line() + + // Limit + c.Comment("Limit").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Limit").Params(jen.Id("limit int")).Op("*").Id(typeName).Block( + jen.Return(jen.Id(newModelFn).Call(jen.Id("t.tx.Limit(limit)"))), + ).Line() + + // Offset + c.Comment("Offset").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Offset").Params(jen.Id("offset int")).Op("*").Id(typeName).Block( + jen.Return(jen.Id(newModelFn).Call(jen.Id("t.tx.Offset(offset)"))), + ).Line() + + // Select + c.Comment("Select").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Select").Params(jen.Id("query interface{}, args ...interface{}")).Op("*").Id(typeName).Block( + jen.Return(jen.Id(newModelFn).Call(jen.Id("t.tx.Select(query, args...)"))), + ).Line() + + // Scan + c.Comment("Scan scan value to a struct").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Scan").Params(jen.Id("dest interface{}")).Id("error").Block( + jen.Return(jen.Id("t.tx.Scan(dest).Error")), + ).Line() + + // Pluck + c.Comment("Pluck Query single column from database and scan into a slice").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Pluck").Params(jen.Id("column string, dest interface{}")).Id("error").Block( + jen.Return(jen.Id("t.tx.Pluck(column, dest).Error")), + ).Line() + + // Count + c.Comment("Count Get matched records count").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Count").Params().Id("(int64, error)").Block( + jen.Id("var v int64"), + jen.Id("err := t.tx.Count(&v).Error"), + jen.Return(jen.Id("v, err")), + ).Line() + + // CountMust + c.Comment("CountMust Get matched records count if error occurs just log it").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("CountMust").Params(jen.Id("ctx").Qual("context", "Context")).Id("int64").Block( + jen.Id("var v int64"), + jen.Id("err := t.tx.Count(&v).Error"), + jen.If(jen.Id("err != nil")).Block( + jen.Qual("github.com/xinpianchang/xservice/pkg/log", "For").Id("(ctx).Error").Call(jen.Lit("count"), jen.Qual("go.uber.org/zap", "Error").Call(jen.Id("err"))), + ), + jen.Return(jen.Id("v")), + ).Line() + + // Create + c.Comment("Create insert the value into database").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Create").Params(jen.Id("data *").Id(modelName)).Id("error").Block( + jen.Return(jen.Id("t.tx.Create(data).Error")), + ).Line() + + // CreateAll + c.Comment("CreateAll Batch Insert").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("CreateAll").Params(jen.Id("data []*").Id(modelName)).Id("error").Block( + jen.Return(jen.Id("t.tx.Create(data).Error")), + ).Line() + + // CreateInBatches + c.Comment("CreateInBatches").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("CreateInBatches").Params(jen.Id("data []*").Id(modelName).Id(", batchSize int")).Id("error").Block( + jen.Return(jen.Id("t.tx.CreateInBatches(data, batchSize).Error")), + ).Line() + + // CreateInBatchesMap + c.Comment("CreateInBatchesMap").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("CreateInBatchesMap").Params(jen.Id("data []map[string]interface{}, batchSize int")).Id("error").Block( + jen.Return(jen.Id("t.tx.CreateInBatches(data, batchSize).Error")), + ).Line() + + // CreateMap + c.Comment("CreateMap Create From Map").Line() + c.Comment("common usage eg. Create From SQL Expression/Context Valuer refer https://gorm.io/docs/create.html#Create-From-SQL-Expression-Context-Valuer").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("CreateMap").Params(jen.Id("data []map[string]interface{}")).Id("error").Block( + jen.Return(jen.Id("t.tx.Create(data).Error")), + ).Line() + + // Save + c.Comment("Save update value in database, if the value doesn't have primary key, will insert it").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Save").Params(jen.Id("data *").Id(modelName)).Id("error").Block( + jen.Return(jen.Id("t.tx.Create(data).Error")), + ).Line() + + // SaveAll + c.Comment("SaveAll update all items in database, if the value doesn't have primary key, will insert it").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("SaveAll").Params(jen.Id("data []*").Id(modelName)).Id("error").Block( + jen.Return(jen.Id("t.tx.Create(data).Error")), + ).Line() + + // Update + c.Comment("Update attributes with `struct`, will only update non-zero fields").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Update").Params(jen.Id("data *").Id(modelName)).Id("error").Block( + jen.Return(jen.Id("t.tx.Updates(data).Error")), + ).Line() + + // UpdateForce + c.Comment("UpdateForce force update include zero value fields").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("UpdateForce").Params(jen.Id("data *").Id(modelName)).Id("error").Block( + jen.Return(jen.Id(`t.tx.Model(data).Select("*").Updates(data).Error`)), + ).Line() + + // UpdateMap + c.Comment("UpdateMap update attributes with `map`").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("UpdateMap").Params(jen.Id("data map[string]interface{}")).Id("error").Block( + jen.Return(jen.Id("t.tx.Updates(data).Error")), + ).Line() + + // UpdateColumn + c.Comment("UpdateColumn update only one column").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("UpdateColumn").Params(jen.Id("column string, value interface{}")).Id("error").Block( + jen.Return(jen.Id("t.tx.UpdateColumn(column, value).Error")), + ).Line() + + // Get + c.Comment("Get retrieving by primary key").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Get").Params(jen.Id("id interface{}")).Op("(*").Id(modelName).Id(",error").Op(")").Block( + jen.Id("var data").Id(modelName), + jen.Id("err := t.tx.Find(&data, id).Error"), + jen.Return(jen.Id("&data, err")), + ).Line() + + c.Comment("Find").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Find").Params(jen.Id("conds ...interface{}")).Op("([]*").Id(modelName).Id(",error").Op(")").Block( + jen.Id("var data []*").Id(modelName), + jen.Id("err := t.tx.Find(&data, conds...).Error"), + jen.Return(jen.Id("data, err")), + ).Line() + + c.Comment("First").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("First").Params(jen.Id("conds ...interface{}")).Op("(*").Id(modelName).Id(",error").Op(")").Block( + jen.Id("var data ").Id(modelName), + jen.Id("err := t.tx.First(&data, conds...).Error"), + jen.Return(jen.Id("&data, err")), + ).Line() + + c.Comment("Last").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Last").Params(jen.Id("conds ...interface{}")).Op("(*").Id(modelName).Id(",error").Op(")").Block( + jen.Id("var data ").Id(modelName), + jen.Id("err := t.tx.Last(&data, conds...).Error"), + jen.Return(jen.Id("&data, err")), + ).Line() + + c.Comment("Take").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Take").Params(jen.Id("conds ...interface{}")).Op("(*").Id(modelName).Id(",error").Op(")").Block( + jen.Id("var data ").Id(modelName), + jen.Id("err := t.tx.Take(&data, conds...).Error"), + jen.Return(jen.Id("&data, err")), + ).Line() + + c.Comment("FindOne").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("FindOne").Params(jen.Id("conds ...interface{}")).Op("(*").Id(modelName).Id(",error").Op(")").Block( + jen.Id("var data ").Id(modelName), + jen.Id("err := t.tx.Take(&data, conds...).Error"), + jen.Return(jen.Id("&data, err")), + ).Line() + + c.Comment("Delete").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("Delete").Params(jen.Id("conds ...interface{}")).Id("error").Block( + jen.Return(jen.Id("t.tx.Delete(&").Id(modelName).Id("{}, conds...).Error")), + ).Line() + + c.Comment("DeletePermanently").Line() + c.Func().Params(jen.Id("t").Op("*").Id(typeName)).Id("DeletePermanently").Params(jen.Id("conds ...interface{}")).Id("error").Block( + jen.Return(jen.Id("t.tx.Unscoped().Delete(&").Id(modelName).Id("{}, conds...).Error")), + ).Line() + + return c +} + +func (t *MySQLGenerator) fieldTypeStatement(field *Field) *jen.Statement { + comment := t.oneline(field.ColumnComment) + idx := strings.Index(comment, "struct:") + if idx >= 0 { + idx := strings.Index(comment, "struct:") + gs := comment[idx+7:] + idx = strings.Index(gs, " ") + if idx > 0 { + gs = gs[:idx] + } + idx = strings.LastIndex(gs, "/") + path := "" + name := gs + if idx > 0 { + path = gs[:idx] + name = gs[idx+1:] + } + return jen.Qual(path, name) + } + + switch field.GoType { + case "int": + return jen.Int() + case "uint": + return jen.Uint() + case "int8": + return jen.Int8() + case "uint8": + return jen.Uint8() + case "int16": + return jen.Int16() + case "uint16": + return jen.Uint16() + case "int32": + return jen.Int32() + case "uint32": + return jen.Uint32() + case "int64": + return jen.Int64() + case "uint64": + return jen.Uint64() + case "string": + return jen.String() + case "time.Time": + if strings.EqualFold(field.ColumnName, "deleted_at") { + return jen.Qual("gorm.io/gorm", "DeletedAt") + } else { + return jen.Qual("time", "Time") + } + case "float32": + return jen.Float32() + case "float64": + return jen.Float64() + case "[]byte": + return jen.Op("[]").Byte() + default: + panic(fmt.Sprintf("unknown GoType %s", field.GoType)) + } +} + +func (t *MySQLGenerator) fieldStatement(field *Field) *jen.Statement { + c := jen.Id(stringx.CamelCase(field.ColumnName)) + + if field.IsNullable { + c.Op("*") + } + + c.Add(t.fieldTypeStatement(field)) + + // tags + tag := make(map[string]string, 2) + { + v := fmt.Sprintf(`column:%s;type:%s`, field.ColumnName, field.ColumnType) + if strings.ToUpper(field.ColumnKey) == "PRI" { + v += ";primary_key" + } + if strings.EqualFold(field.Extra, "auto_increment") { + v += fmt.Sprint(";autoIncrement", field.ColumnDefault) + } + if !field.IsNullable { + v += ";not null" + } + if field.ColumnDefault != "" { + v += fmt.Sprint(";default:", field.ColumnDefault) + } + tag["gorm"] = v + } + + { + if !strings.Contains(field.ColumnComment, "json_hidden") { + v := stringx.LowerCamelCase(field.ColumnName) + if field.IsNullable { + v += ",omitempty" + } + tag["json"] = v + } + } + + c.Tag(tag) + + if comment := t.oneline(field.ColumnComment); comment != "" { + c.Comment(comment) + } + + return c +} + +func (t *MySQLGenerator) removeStatement(c *jen.Statement, count int) { + *c = (*c)[:len(*c)-count] +} + +func (t *MySQLGenerator) oneline(str string) string { + return linebreak.ReplaceAllString(str, "") +} diff --git a/tools/xservice/xservice.go b/tools/xservice/xservice.go new file mode 100644 index 0000000..f91bc41 --- /dev/null +++ b/tools/xservice/xservice.go @@ -0,0 +1,32 @@ +package main + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/xinpianchang/xservice" + "github.com/xinpianchang/xservice/tools/xservice/generator" + "github.com/xinpianchang/xservice/tools/xservice/gogen" + "github.com/xinpianchang/xservice/tools/xservice/model" +) + +var ( + rootCmd = &cobra.Command{ + Use: "xservice", + Short: "xservice toolset", + Version: xservice.Version, + } +) + +func main() { + rootCmd.AddCommand( + gogen.NewCmd, + model.ModelCmd, + generator.StatusMapGeneratorCmd, + ) + + if err := rootCmd.Execute(); err != nil { + fmt.Println(err) + } +} diff --git a/version.go b/version.go new file mode 100644 index 0000000..faffd65 --- /dev/null +++ b/version.go @@ -0,0 +1,3 @@ +package xservice + +const Version = "v1.0.0"