", result)
+ }
+ }
+
+ func() {
+ ctrl.TplName = "file2.tpl"
+ defer func() {
+ if r := recover(); r == nil {
+ t.Fatal("TestAdditionalViewPaths expected error")
+ }
+ }()
+ ctrl.RenderString()
+ }()
+
+ ctrl.TplName = "file2.tpl"
+ ctrl.ViewPath = dir2
+ ctrl.RenderString()
+}
diff --git a/src/vendor/github.com/astaxie/beego/error.go b/src/vendor/github.com/astaxie/beego/error.go
index 4f48fab21..b913db39d 100644
--- a/src/vendor/github.com/astaxie/beego/error.go
+++ b/src/vendor/github.com/astaxie/beego/error.go
@@ -93,7 +93,11 @@ func showErr(err interface{}, ctx *context.Context, stack string) {
"BeegoVersion": VERSION,
"GoVersion": runtime.Version(),
}
- ctx.ResponseWriter.WriteHeader(500)
+ if ctx.Output.Status != 0 {
+ ctx.ResponseWriter.WriteHeader(ctx.Output.Status)
+ } else {
+ ctx.ResponseWriter.WriteHeader(500)
+ }
t.Execute(ctx.ResponseWriter, data)
}
@@ -210,159 +214,163 @@ var ErrorMaps = make(map[string]*errorInfo, 10)
// show 401 unauthorized error.
func unauthorized(rw http.ResponseWriter, r *http.Request) {
- t, _ := template.New("beegoerrortemp").Parse(errtpl)
- data := map[string]interface{}{
- "Title": http.StatusText(401),
- "BeegoVersion": VERSION,
- }
- data["Content"] = template.HTML(" The page you have requested can't be authorized." +
- " Perhaps you are here because:" +
- "
" +
- " The credentials you supplied are incorrect" +
- " There are errors in the website address" +
- "
")
- t.Execute(rw, data)
+ responseError(rw, r,
+ 401,
+ " The page you have requested can't be authorized."+
+ " Perhaps you are here because:"+
+ "
"+
+ " The credentials you supplied are incorrect"+
+ " There are errors in the website address"+
+ "
",
+ )
}
// show 402 Payment Required
func paymentRequired(rw http.ResponseWriter, r *http.Request) {
- t, _ := template.New("beegoerrortemp").Parse(errtpl)
- data := map[string]interface{}{
- "Title": http.StatusText(402),
- "BeegoVersion": VERSION,
- }
- data["Content"] = template.HTML(" The page you have requested Payment Required." +
- " Perhaps you are here because:" +
- "
" +
- " The credentials you supplied are incorrect" +
- " There are errors in the website address" +
- "
")
- t.Execute(rw, data)
+ responseError(rw, r,
+ 402,
+ " The page you have requested Payment Required."+
+ " Perhaps you are here because:"+
+ "
"+
+ " The credentials you supplied are incorrect"+
+ " There are errors in the website address"+
+ "
",
+ )
}
// show 403 forbidden error.
func forbidden(rw http.ResponseWriter, r *http.Request) {
- t, _ := template.New("beegoerrortemp").Parse(errtpl)
- data := map[string]interface{}{
- "Title": http.StatusText(403),
- "BeegoVersion": VERSION,
- }
- data["Content"] = template.HTML(" The page you have requested is forbidden." +
- " Perhaps you are here because:" +
- "
" +
- " Your address may be blocked" +
- " The site may be disabled" +
- " You need to log in" +
- "
")
- t.Execute(rw, data)
+ responseError(rw, r,
+ 403,
+ " The page you have requested is forbidden."+
+ " Perhaps you are here because:"+
+ "
"+
+ " Your address may be blocked"+
+ " The site may be disabled"+
+ " You need to log in"+
+ "
",
+ )
}
-// show 404 notfound error.
+// show 422 missing xsrf token
+func missingxsrf(rw http.ResponseWriter, r *http.Request) {
+ responseError(rw, r,
+ 422,
+ " The page you have requested is forbidden."+
+ " Perhaps you are here because:"+
+ "
"+
+ " '_xsrf' argument missing from POST"+
+ "
",
+ )
+}
+
+// show 417 invalid xsrf token
+func invalidxsrf(rw http.ResponseWriter, r *http.Request) {
+ responseError(rw, r,
+ 417,
+ " The page you have requested is forbidden."+
+ " Perhaps you are here because:"+
+ "
"+
+ " expected XSRF not found"+
+ "
",
+ )
+}
+
+// show 404 not found error.
func notFound(rw http.ResponseWriter, r *http.Request) {
- t, _ := template.New("beegoerrortemp").Parse(errtpl)
- data := map[string]interface{}{
- "Title": http.StatusText(404),
- "BeegoVersion": VERSION,
- }
- data["Content"] = template.HTML(" The page you have requested has flown the coop." +
- " Perhaps you are here because:" +
- "
" +
- " The page has moved" +
- " The page no longer exists" +
- " You were looking for your puppy and got lost" +
- " You like 404 pages" +
- "
")
- t.Execute(rw, data)
+ responseError(rw, r,
+ 404,
+ " The page you have requested has flown the coop."+
+ " Perhaps you are here because:"+
+ "
"+
+ " The page has moved"+
+ " The page no longer exists"+
+ " You were looking for your puppy and got lost"+
+ " You like 404 pages"+
+ "
",
+ )
}
// show 405 Method Not Allowed
func methodNotAllowed(rw http.ResponseWriter, r *http.Request) {
- t, _ := template.New("beegoerrortemp").Parse(errtpl)
- data := map[string]interface{}{
- "Title": http.StatusText(405),
- "BeegoVersion": VERSION,
- }
- data["Content"] = template.HTML(" The method you have requested Not Allowed." +
- " Perhaps you are here because:" +
- "
" +
- " The method specified in the Request-Line is not allowed for the resource identified by the Request-URI" +
- " The response MUST include an Allow header containing a list of valid methods for the requested resource." +
- "
")
- t.Execute(rw, data)
+ responseError(rw, r,
+ 405,
+ " The method you have requested Not Allowed."+
+ " Perhaps you are here because:"+
+ "
"+
+ " The method specified in the Request-Line is not allowed for the resource identified by the Request-URI"+
+ " The response MUST include an Allow header containing a list of valid methods for the requested resource."+
+ "
",
+ )
}
// show 500 internal server error.
func internalServerError(rw http.ResponseWriter, r *http.Request) {
- t, _ := template.New("beegoerrortemp").Parse(errtpl)
- data := map[string]interface{}{
- "Title": http.StatusText(500),
- "BeegoVersion": VERSION,
- }
- data["Content"] = template.HTML(" The page you have requested is down right now." +
- "
" +
- " Please try again later and report the error to the website administrator" +
- "
")
- t.Execute(rw, data)
+ responseError(rw, r,
+ 500,
+ " The page you have requested is down right now."+
+ "
"+
+ " Please try again later and report the error to the website administrator"+
+ "
",
+ )
}
// show 501 Not Implemented.
func notImplemented(rw http.ResponseWriter, r *http.Request) {
- t, _ := template.New("beegoerrortemp").Parse(errtpl)
- data := map[string]interface{}{
- "Title": http.StatusText(504),
- "BeegoVersion": VERSION,
- }
- data["Content"] = template.HTML(" The page you have requested is Not Implemented." +
- "
" +
- " Please try again later and report the error to the website administrator" +
- "
")
- t.Execute(rw, data)
+ responseError(rw, r,
+ 501,
+ " The page you have requested is Not Implemented."+
+ "
"+
+ " Please try again later and report the error to the website administrator"+
+ "
",
+ )
}
// show 502 Bad Gateway.
func badGateway(rw http.ResponseWriter, r *http.Request) {
- t, _ := template.New("beegoerrortemp").Parse(errtpl)
- data := map[string]interface{}{
- "Title": http.StatusText(502),
- "BeegoVersion": VERSION,
- }
- data["Content"] = template.HTML(" The page you have requested is down right now." +
- "
" +
- " The server, while acting as a gateway or proxy, received an invalid response from the upstream server it accessed in attempting to fulfill the request." +
- " Please try again later and report the error to the website administrator" +
- "
")
- t.Execute(rw, data)
+ responseError(rw, r,
+ 502,
+ " The page you have requested is down right now."+
+ "
"+
+ " The server, while acting as a gateway or proxy, received an invalid response from the upstream server it accessed in attempting to fulfill the request."+
+ " Please try again later and report the error to the website administrator"+
+ "
",
+ )
}
// show 503 service unavailable error.
func serviceUnavailable(rw http.ResponseWriter, r *http.Request) {
- t, _ := template.New("beegoerrortemp").Parse(errtpl)
- data := map[string]interface{}{
- "Title": http.StatusText(503),
- "BeegoVersion": VERSION,
- }
- data["Content"] = template.HTML(" The page you have requested is unavailable." +
- " Perhaps you are here because:" +
- "
" +
- "
The page is overloaded" +
- " Please try again later." +
- "
")
- t.Execute(rw, data)
+ responseError(rw, r,
+ 503,
+ " The page you have requested is unavailable."+
+ " Perhaps you are here because:"+
+ "
"+
+ "
The page is overloaded"+
+ " Please try again later."+
+ "
",
+ )
}
// show 504 Gateway Timeout.
func gatewayTimeout(rw http.ResponseWriter, r *http.Request) {
+ responseError(rw, r,
+ 504,
+ " The page you have requested is unavailable"+
+ " Perhaps you are here because:"+
+ "
"+
+ "
The server, while acting as a gateway or proxy, did not receive a timely response from the upstream server specified by the URI."+
+ " Please try again later."+
+ "
",
+ )
+}
+
+func responseError(rw http.ResponseWriter, r *http.Request, errCode int, errContent string) {
t, _ := template.New("beegoerrortemp").Parse(errtpl)
data := map[string]interface{}{
- "Title": http.StatusText(504),
+ "Title": http.StatusText(errCode),
"BeegoVersion": VERSION,
+ "Content": template.HTML(errContent),
}
- data["Content"] = template.HTML(" The page you have requested is unavailable." +
- " Perhaps you are here because:" +
- "
" +
- "
The server, while acting as a gateway or proxy, did not receive a timely response from the upstream server specified by the URI." +
- " Please try again later." +
- "
")
t.Execute(rw, data)
}
@@ -400,6 +408,11 @@ func ErrorController(c ControllerInterface) *App {
return BeeApp
}
+// Exception Write HttpStatus with errCode and Exec error handler if exist.
+func Exception(errCode uint64, ctx *context.Context) {
+ exception(strconv.FormatUint(errCode, 10), ctx)
+}
+
// show error string as simple text message.
// if error string is empty, show 503 or 500 error as default.
func exception(errCode string, ctx *context.Context) {
@@ -408,7 +421,10 @@ func exception(errCode string, ctx *context.Context) {
if err == nil {
return v
}
- return 503
+ if ctx.Output.Status == 0 {
+ return 503
+ }
+ return ctx.Output.Status
}
for _, ec := range []string{errCode, "503", "500"} {
diff --git a/src/vendor/github.com/astaxie/beego/error_test.go b/src/vendor/github.com/astaxie/beego/error_test.go
new file mode 100644
index 000000000..378aa9538
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/error_test.go
@@ -0,0 +1,88 @@
+// Copyright 2016 beego Author. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package beego
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "strconv"
+ "strings"
+ "testing"
+)
+
+type errorTestController struct {
+ Controller
+}
+
+const parseCodeError = "parse code error"
+
+func (ec *errorTestController) Get() {
+ errorCode, err := ec.GetInt("code")
+ if err != nil {
+ ec.Abort(parseCodeError)
+ }
+ if errorCode != 0 {
+ ec.CustomAbort(errorCode, ec.GetString("code"))
+ }
+ ec.Abort("404")
+}
+
+func TestErrorCode_01(t *testing.T) {
+ registerDefaultErrorHandler()
+ for k := range ErrorMaps {
+ r, _ := http.NewRequest("GET", "/error?code="+k, nil)
+ w := httptest.NewRecorder()
+
+ handler := NewControllerRegister()
+ handler.Add("/error", &errorTestController{})
+ handler.ServeHTTP(w, r)
+ code, _ := strconv.Atoi(k)
+ if w.Code != code {
+ t.Fail()
+ }
+ if !strings.Contains(w.Body.String(), http.StatusText(code)) {
+ t.Fail()
+ }
+ }
+}
+
+func TestErrorCode_02(t *testing.T) {
+ registerDefaultErrorHandler()
+ r, _ := http.NewRequest("GET", "/error?code=0", nil)
+ w := httptest.NewRecorder()
+
+ handler := NewControllerRegister()
+ handler.Add("/error", &errorTestController{})
+ handler.ServeHTTP(w, r)
+ if w.Code != 404 {
+ t.Fail()
+ }
+}
+
+func TestErrorCode_03(t *testing.T) {
+ registerDefaultErrorHandler()
+ r, _ := http.NewRequest("GET", "/error?code=panic", nil)
+ w := httptest.NewRecorder()
+
+ handler := NewControllerRegister()
+ handler.Add("/error", &errorTestController{})
+ handler.ServeHTTP(w, r)
+ if w.Code != 200 {
+ t.Fail()
+ }
+ if w.Body.String() != parseCodeError {
+ t.Fail()
+ }
+}
diff --git a/src/vendor/github.com/astaxie/beego/filter.go b/src/vendor/github.com/astaxie/beego/filter.go
index 863223f79..9cc6e9134 100644
--- a/src/vendor/github.com/astaxie/beego/filter.go
+++ b/src/vendor/github.com/astaxie/beego/filter.go
@@ -27,6 +27,7 @@ type FilterRouter struct {
tree *Tree
pattern string
returnOnOutput bool
+ resetParams bool
}
// ValidRouter checks if the current request is matched by this filter.
diff --git a/src/vendor/github.com/astaxie/beego/filter_test.go b/src/vendor/github.com/astaxie/beego/filter_test.go
index d9928d8d7..4ca4d2b84 100644
--- a/src/vendor/github.com/astaxie/beego/filter_test.go
+++ b/src/vendor/github.com/astaxie/beego/filter_test.go
@@ -20,14 +20,8 @@ import (
"testing"
"github.com/astaxie/beego/context"
- "github.com/astaxie/beego/logs"
)
-func init() {
- BeeLogger = logs.NewLogger(10000)
- BeeLogger.SetLogger("console", "")
-}
-
var FilterUser = func(ctx *context.Context) {
ctx.Output.Body([]byte("i am " + ctx.Input.Param(":last") + ctx.Input.Param(":first")))
}
diff --git a/src/vendor/github.com/astaxie/beego/flash_test.go b/src/vendor/github.com/astaxie/beego/flash_test.go
index 640d54de6..d5e9608dc 100644
--- a/src/vendor/github.com/astaxie/beego/flash_test.go
+++ b/src/vendor/github.com/astaxie/beego/flash_test.go
@@ -48,7 +48,7 @@ func TestFlashHeader(t *testing.T) {
// match for the expected header
res := strings.Contains(sc, "BEEGO_FLASH=%00notice%23BEEGOFLASH%23TestFlashString%00")
// validate the assertion
- if res != true {
+ if !res {
t.Errorf("TestFlashHeader() unable to validate flash message")
}
}
diff --git a/src/vendor/github.com/astaxie/beego/grace/conn.go b/src/vendor/github.com/astaxie/beego/grace/conn.go
index 6807e1ace..e020f8507 100644
--- a/src/vendor/github.com/astaxie/beego/grace/conn.go
+++ b/src/vendor/github.com/astaxie/beego/grace/conn.go
@@ -3,14 +3,17 @@ package grace
import (
"errors"
"net"
+ "sync"
)
type graceConn struct {
net.Conn
server *Server
+ m sync.Mutex
+ closed bool
}
-func (c graceConn) Close() (err error) {
+func (c *graceConn) Close() (err error) {
defer func() {
if r := recover(); r != nil {
switch x := r.(type) {
@@ -23,6 +26,14 @@ func (c graceConn) Close() (err error) {
}
}
}()
+
+ c.m.Lock()
+ if c.closed {
+ c.m.Unlock()
+ return
+ }
c.server.wg.Done()
+ c.closed = true
+ c.m.Unlock()
return c.Conn.Close()
}
diff --git a/src/vendor/github.com/astaxie/beego/grace/grace.go b/src/vendor/github.com/astaxie/beego/grace/grace.go
index af4e90683..6ebf8455f 100644
--- a/src/vendor/github.com/astaxie/beego/grace/grace.go
+++ b/src/vendor/github.com/astaxie/beego/grace/grace.go
@@ -85,23 +85,31 @@ var (
isChild bool
socketOrder string
- once sync.Once
+
+ hookableSignals []os.Signal
)
-func onceInit() {
- regLock = &sync.Mutex{}
+func init() {
flag.BoolVar(&isChild, "graceful", false, "listen on open fd (after forking)")
flag.StringVar(&socketOrder, "socketorder", "", "previous initialization order - used when more than one listener was started")
+
+ regLock = &sync.Mutex{}
runningServers = make(map[string]*Server)
runningServersOrder = []string{}
socketPtrOffsetMap = make(map[string]uint)
+
+ hookableSignals = []os.Signal{
+ syscall.SIGHUP,
+ syscall.SIGINT,
+ syscall.SIGTERM,
+ }
}
// NewServer returns a new graceServer.
func NewServer(addr string, handler http.Handler) (srv *Server) {
- once.Do(onceInit)
regLock.Lock()
defer regLock.Unlock()
+
if !flag.Parsed() {
flag.Parse()
}
diff --git a/src/vendor/github.com/astaxie/beego/grace/listener.go b/src/vendor/github.com/astaxie/beego/grace/listener.go
index 5439d0b20..7ede63a30 100644
--- a/src/vendor/github.com/astaxie/beego/grace/listener.go
+++ b/src/vendor/github.com/astaxie/beego/grace/listener.go
@@ -21,7 +21,7 @@ func newGraceListener(l net.Listener, srv *Server) (el *graceListener) {
server: srv,
}
go func() {
- _ = <-el.stop
+ <-el.stop
el.stopped = true
el.stop <- el.Listener.Close()
}()
@@ -37,7 +37,7 @@ func (gl *graceListener) Accept() (c net.Conn, err error) {
tc.SetKeepAlive(true)
tc.SetKeepAlivePeriod(3 * time.Minute)
- c = graceConn{
+ c = &graceConn{
Conn: tc,
server: gl.server,
}
diff --git a/src/vendor/github.com/astaxie/beego/grace/server.go b/src/vendor/github.com/astaxie/beego/grace/server.go
index 101bda56d..b82423353 100644
--- a/src/vendor/github.com/astaxie/beego/grace/server.go
+++ b/src/vendor/github.com/astaxie/beego/grace/server.go
@@ -162,9 +162,7 @@ func (srv *Server) handleSignals() {
signal.Notify(
srv.sigChan,
- syscall.SIGHUP,
- syscall.SIGINT,
- syscall.SIGTERM,
+ hookableSignals...,
)
pid := syscall.Getpid()
@@ -198,7 +196,6 @@ func (srv *Server) signalHooks(ppFlag int, sig os.Signal) {
for _, f := range srv.SignalHooks[ppFlag][sig] {
f()
}
- return
}
// shutdown closes the listener so that no new connections are accepted. it also
@@ -290,3 +287,19 @@ func (srv *Server) fork() (err error) {
return
}
+
+// RegisterSignalHook registers a function to be run PreSignal or PostSignal for a given signal.
+func (srv *Server) RegisterSignalHook(ppFlag int, sig os.Signal, f func()) (err error) {
+ if ppFlag != PreSignal && ppFlag != PostSignal {
+ err = fmt.Errorf("Invalid ppFlag argument. Must be either grace.PreSignal or grace.PostSignal")
+ return
+ }
+ for _, s := range hookableSignals {
+ if s == sig {
+ srv.SignalHooks[ppFlag][sig] = append(srv.SignalHooks[ppFlag][sig], f)
+ return
+ }
+ }
+ err = fmt.Errorf("Signal '%v' is not supported", sig)
+ return
+}
diff --git a/src/vendor/github.com/astaxie/beego/hooks.go b/src/vendor/github.com/astaxie/beego/hooks.go
index 59b10b32f..c5ec8e2dd 100644
--- a/src/vendor/github.com/astaxie/beego/hooks.go
+++ b/src/vendor/github.com/astaxie/beego/hooks.go
@@ -6,6 +6,8 @@ import (
"net/http"
"path/filepath"
+ "github.com/astaxie/beego/context"
+ "github.com/astaxie/beego/logs"
"github.com/astaxie/beego/session"
)
@@ -30,6 +32,8 @@ func registerDefaultErrorHandler() error {
"502": badGateway,
"503": serviceUnavailable,
"504": gatewayTimeout,
+ "417": invalidxsrf,
+ "422": missingxsrf,
}
for e, h := range m {
if _, ok := ErrorMaps[e]; !ok {
@@ -43,23 +47,25 @@ func registerSession() error {
if BConfig.WebConfig.Session.SessionOn {
var err error
sessionConfig := AppConfig.String("sessionConfig")
+ conf := new(session.ManagerConfig)
if sessionConfig == "" {
- conf := map[string]interface{}{
- "cookieName": BConfig.WebConfig.Session.SessionName,
- "gclifetime": BConfig.WebConfig.Session.SessionGCMaxLifetime,
- "providerConfig": filepath.ToSlash(BConfig.WebConfig.Session.SessionProviderConfig),
- "secure": BConfig.Listen.EnableHTTPS,
- "enableSetCookie": BConfig.WebConfig.Session.SessionAutoSetCookie,
- "domain": BConfig.WebConfig.Session.SessionDomain,
- "cookieLifeTime": BConfig.WebConfig.Session.SessionCookieLifeTime,
- }
- confBytes, err := json.Marshal(conf)
- if err != nil {
+ conf.CookieName = BConfig.WebConfig.Session.SessionName
+ conf.EnableSetCookie = BConfig.WebConfig.Session.SessionAutoSetCookie
+ conf.Gclifetime = BConfig.WebConfig.Session.SessionGCMaxLifetime
+ conf.Secure = BConfig.Listen.EnableHTTPS
+ conf.CookieLifeTime = BConfig.WebConfig.Session.SessionCookieLifeTime
+ conf.ProviderConfig = filepath.ToSlash(BConfig.WebConfig.Session.SessionProviderConfig)
+ conf.DisableHTTPOnly = BConfig.WebConfig.Session.SessionDisableHTTPOnly
+ conf.Domain = BConfig.WebConfig.Session.SessionDomain
+ conf.EnableSidInHTTPHeader = BConfig.WebConfig.Session.SessionEnableSidInHTTPHeader
+ conf.SessionNameInHTTPHeader = BConfig.WebConfig.Session.SessionNameInHTTPHeader
+ conf.EnableSidInURLQuery = BConfig.WebConfig.Session.SessionEnableSidInURLQuery
+ } else {
+ if err = json.Unmarshal([]byte(sessionConfig), conf); err != nil {
return err
}
- sessionConfig = string(confBytes)
}
- if GlobalSessions, err = session.NewManager(BConfig.WebConfig.Session.SessionProvider, sessionConfig); err != nil {
+ if GlobalSessions, err = session.NewManager(BConfig.WebConfig.Session.SessionProvider, conf); err != nil {
return err
}
go GlobalSessions.GC()
@@ -68,26 +74,30 @@ func registerSession() error {
}
func registerTemplate() error {
- if err := BuildTemplate(BConfig.WebConfig.ViewsPath); err != nil {
+ defer lockViewPaths()
+ if err := AddViewPath(BConfig.WebConfig.ViewsPath); err != nil {
if BConfig.RunMode == DEV {
- Warn(err)
+ logs.Warn(err)
}
return err
}
return nil
}
-func registerDocs() error {
- if BConfig.WebConfig.EnableDocs {
- Get("/docs", serverDocs)
- Get("/docs/*", serverDocs)
- }
- return nil
-}
-
func registerAdmin() error {
if BConfig.Listen.EnableAdmin {
go beeAdminApp.Run()
}
return nil
}
+
+func registerGzip() error {
+ if BConfig.EnableGzip {
+ context.InitGzip(
+ AppConfig.DefaultInt("gzipMinLength", -1),
+ AppConfig.DefaultInt("gzipCompressLevel", -1),
+ AppConfig.DefaultStrings("includedMethods", []string{"GET"}),
+ )
+ }
+ return nil
+}
diff --git a/src/vendor/github.com/astaxie/beego/httplib/README.md b/src/vendor/github.com/astaxie/beego/httplib/README.md
index 6a72cf7cf..97df8e6b9 100644
--- a/src/vendor/github.com/astaxie/beego/httplib/README.md
+++ b/src/vendor/github.com/astaxie/beego/httplib/README.md
@@ -32,7 +32,7 @@ The default timeout is `60` seconds, function prototype:
SetTimeout(connectTimeout, readWriteTimeout time.Duration)
-Exmaple:
+Example:
// GET
httplib.Get("http://beego.me/").SetTimeout(100 * time.Second, 30 * time.Second)
diff --git a/src/vendor/github.com/astaxie/beego/httplib/httplib.go b/src/vendor/github.com/astaxie/beego/httplib/httplib.go
index 769841226..4fd572d68 100644
--- a/src/vendor/github.com/astaxie/beego/httplib/httplib.go
+++ b/src/vendor/github.com/astaxie/beego/httplib/httplib.go
@@ -136,9 +136,11 @@ type BeegoHTTPSettings struct {
TLSClientConfig *tls.Config
Proxy func(*http.Request) (*url.URL, error)
Transport http.RoundTripper
+ CheckRedirect func(req *http.Request, via []*http.Request) error
EnableCookie bool
Gzip bool
DumpBody bool
+ Retries int // if set to -1 means will retry forever
}
// BeegoHTTPRequest provides more useful methods for requesting one url than http.Request.
@@ -188,6 +190,15 @@ func (b *BeegoHTTPRequest) Debug(isdebug bool) *BeegoHTTPRequest {
return b
}
+// Retries sets Retries times.
+// default is 0 means no retried.
+// -1 means retried forever.
+// others means retried times.
+func (b *BeegoHTTPRequest) Retries(times int) *BeegoHTTPRequest {
+ b.setting.Retries = times
+ return b
+}
+
// DumpBody setting whether need to Dump the Body.
func (b *BeegoHTTPRequest) DumpBody(isdump bool) *BeegoHTTPRequest {
b.setting.DumpBody = isdump
@@ -265,6 +276,15 @@ func (b *BeegoHTTPRequest) SetProxy(proxy func(*http.Request) (*url.URL, error))
return b
}
+// SetCheckRedirect specifies the policy for handling redirects.
+//
+// If CheckRedirect is nil, the Client uses its default policy,
+// which is to stop after 10 consecutive requests.
+func (b *BeegoHTTPRequest) SetCheckRedirect(redirect func(req *http.Request, via []*http.Request) error) *BeegoHTTPRequest {
+ b.setting.CheckRedirect = redirect
+ return b
+}
+
// Param adds query param in to request.
// params build query string as ?key1=value1&key2=value2...
func (b *BeegoHTTPRequest) Param(key, value string) *BeegoHTTPRequest {
@@ -315,7 +335,7 @@ func (b *BeegoHTTPRequest) JSONBody(obj interface{}) (*BeegoHTTPRequest, error)
func (b *BeegoHTTPRequest) buildURL(paramBody string) {
// build GET url with query string
if b.req.Method == "GET" && len(paramBody) > 0 {
- if strings.Index(b.url, "?") != -1 {
+ if strings.Contains(b.url, "?") {
b.url += "&" + paramBody
} else {
b.url = b.url + "?" + paramBody
@@ -324,7 +344,7 @@ func (b *BeegoHTTPRequest) buildURL(paramBody string) {
}
// build POST/PUT/PATCH url and body
- if (b.req.Method == "POST" || b.req.Method == "PUT" || b.req.Method == "PATCH") && b.req.Body == nil {
+ if (b.req.Method == "POST" || b.req.Method == "PUT" || b.req.Method == "PATCH" || b.req.Method == "DELETE") && b.req.Body == nil {
// with files
if len(b.files) > 0 {
pr, pw := io.Pipe()
@@ -380,7 +400,7 @@ func (b *BeegoHTTPRequest) getResponse() (*http.Response, error) {
}
// DoRequest will do the client.Do
-func (b *BeegoHTTPRequest) DoRequest() (*http.Response, error) {
+func (b *BeegoHTTPRequest) DoRequest() (resp *http.Response, err error) {
var paramBody string
if len(b.params) > 0 {
var buf bytes.Buffer
@@ -409,9 +429,10 @@ func (b *BeegoHTTPRequest) DoRequest() (*http.Response, error) {
if trans == nil {
// create default transport
trans = &http.Transport{
- TLSClientConfig: b.setting.TLSClientConfig,
- Proxy: b.setting.Proxy,
- Dial: TimeoutDialer(b.setting.ConnectTimeout, b.setting.ReadWriteTimeout),
+ TLSClientConfig: b.setting.TLSClientConfig,
+ Proxy: b.setting.Proxy,
+ Dial: TimeoutDialer(b.setting.ConnectTimeout, b.setting.ReadWriteTimeout),
+ MaxIdleConnsPerHost: -1,
}
} else {
// if b.transport is *http.Transport then set the settings.
@@ -445,6 +466,10 @@ func (b *BeegoHTTPRequest) DoRequest() (*http.Response, error) {
b.req.Header.Set("User-Agent", b.setting.UserAgent)
}
+ if b.setting.CheckRedirect != nil {
+ client.CheckRedirect = b.setting.CheckRedirect
+ }
+
if b.setting.ShowDebug {
dump, err := httputil.DumpRequest(b.req, b.setting.DumpBody)
if err != nil {
@@ -452,7 +477,16 @@ func (b *BeegoHTTPRequest) DoRequest() (*http.Response, error) {
}
b.dump = dump
}
- return client.Do(b.req)
+ // retries default value is 0, it will run once.
+ // retries equal to -1, it will run forever until success
+ // retries is setted, it will retries fixed times.
+ for i := 0; b.setting.Retries == -1 || i <= b.setting.Retries; i++ {
+ resp, err = client.Do(b.req)
+ if err == nil {
+ break
+ }
+ }
+ return resp, err
}
// String returns the body string in response.
@@ -486,9 +520,9 @@ func (b *BeegoHTTPRequest) Bytes() ([]byte, error) {
return nil, err
}
b.body, err = ioutil.ReadAll(reader)
- } else {
- b.body, err = ioutil.ReadAll(resp.Body)
+ return b.body, err
}
+ b.body, err = ioutil.ReadAll(resp.Body)
return b.body, err
}
diff --git a/src/vendor/github.com/astaxie/beego/httplib/httplib_test.go b/src/vendor/github.com/astaxie/beego/httplib/httplib_test.go
index 058150547..32d3e7f68 100644
--- a/src/vendor/github.com/astaxie/beego/httplib/httplib_test.go
+++ b/src/vendor/github.com/astaxie/beego/httplib/httplib_test.go
@@ -102,6 +102,14 @@ func TestSimpleDelete(t *testing.T) {
t.Log(str)
}
+func TestSimpleDeleteParam(t *testing.T) {
+ str, err := Delete("http://httpbin.org/delete").Param("key", "val").String()
+ if err != nil {
+ t.Fatal(err)
+ }
+ t.Log(str)
+}
+
func TestWithCookie(t *testing.T) {
v := "smallfish"
str, err := Get("http://httpbin.org/cookies/set?k1=" + v).SetEnableCookie(true).String()
diff --git a/src/vendor/github.com/astaxie/beego/log.go b/src/vendor/github.com/astaxie/beego/log.go
index 46ec57dd8..e9412f920 100644
--- a/src/vendor/github.com/astaxie/beego/log.go
+++ b/src/vendor/github.com/astaxie/beego/log.go
@@ -33,82 +33,77 @@ const (
)
// BeeLogger references the used application logger.
-var BeeLogger = logs.NewLogger(100)
+var BeeLogger = logs.GetBeeLogger()
// SetLevel sets the global log level used by the simple logger.
func SetLevel(l int) {
- BeeLogger.SetLevel(l)
+ logs.SetLevel(l)
}
// SetLogFuncCall set the CallDepth, default is 3
func SetLogFuncCall(b bool) {
- BeeLogger.EnableFuncCallDepth(b)
- BeeLogger.SetLogFuncCallDepth(3)
+ logs.SetLogFuncCall(b)
}
// SetLogger sets a new logger.
func SetLogger(adaptername string, config string) error {
- err := BeeLogger.SetLogger(adaptername, config)
- if err != nil {
- return err
- }
- return nil
+ return logs.SetLogger(adaptername, config)
}
// Emergency logs a message at emergency level.
func Emergency(v ...interface{}) {
- BeeLogger.Emergency(generateFmtStr(len(v)), v...)
+ logs.Emergency(generateFmtStr(len(v)), v...)
}
// Alert logs a message at alert level.
func Alert(v ...interface{}) {
- BeeLogger.Alert(generateFmtStr(len(v)), v...)
+ logs.Alert(generateFmtStr(len(v)), v...)
}
// Critical logs a message at critical level.
func Critical(v ...interface{}) {
- BeeLogger.Critical(generateFmtStr(len(v)), v...)
+ logs.Critical(generateFmtStr(len(v)), v...)
}
// Error logs a message at error level.
func Error(v ...interface{}) {
- BeeLogger.Error(generateFmtStr(len(v)), v...)
+ logs.Error(generateFmtStr(len(v)), v...)
}
// Warning logs a message at warning level.
func Warning(v ...interface{}) {
- BeeLogger.Warning(generateFmtStr(len(v)), v...)
+ logs.Warning(generateFmtStr(len(v)), v...)
}
// Warn compatibility alias for Warning()
func Warn(v ...interface{}) {
- BeeLogger.Warn(generateFmtStr(len(v)), v...)
+ logs.Warn(generateFmtStr(len(v)), v...)
}
// Notice logs a message at notice level.
func Notice(v ...interface{}) {
- BeeLogger.Notice(generateFmtStr(len(v)), v...)
+ logs.Notice(generateFmtStr(len(v)), v...)
}
// Informational logs a message at info level.
func Informational(v ...interface{}) {
- BeeLogger.Informational(generateFmtStr(len(v)), v...)
+ logs.Informational(generateFmtStr(len(v)), v...)
}
// Info compatibility alias for Warning()
func Info(v ...interface{}) {
- BeeLogger.Info(generateFmtStr(len(v)), v...)
+ logs.Info(generateFmtStr(len(v)), v...)
}
// Debug logs a message at debug level.
func Debug(v ...interface{}) {
- BeeLogger.Debug(generateFmtStr(len(v)), v...)
+ logs.Debug(generateFmtStr(len(v)), v...)
}
// Trace logs a message at trace level.
// compatibility alias for Warning()
func Trace(v ...interface{}) {
- BeeLogger.Trace(generateFmtStr(len(v)), v...)
+ logs.Trace(generateFmtStr(len(v)), v...)
}
func generateFmtStr(n int) string {
diff --git a/src/vendor/github.com/astaxie/beego/logs/alils/alils.go b/src/vendor/github.com/astaxie/beego/logs/alils/alils.go
new file mode 100644
index 000000000..867ff4cb5
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/alils/alils.go
@@ -0,0 +1,186 @@
+package alils
+
+import (
+ "encoding/json"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/astaxie/beego/logs"
+ "github.com/gogo/protobuf/proto"
+)
+
+const (
+ // CacheSize set the flush size
+ CacheSize int = 64
+ // Delimiter define the topic delimiter
+ Delimiter string = "##"
+)
+
+// Config is the Config for Ali Log
+type Config struct {
+ Project string `json:"project"`
+ Endpoint string `json:"endpoint"`
+ KeyID string `json:"key_id"`
+ KeySecret string `json:"key_secret"`
+ LogStore string `json:"log_store"`
+ Topics []string `json:"topics"`
+ Source string `json:"source"`
+ Level int `json:"level"`
+ FlushWhen int `json:"flush_when"`
+}
+
+// aliLSWriter implements LoggerInterface.
+// it writes messages in keep-live tcp connection.
+type aliLSWriter struct {
+ store *LogStore
+ group []*LogGroup
+ withMap bool
+ groupMap map[string]*LogGroup
+ lock *sync.Mutex
+ Config
+}
+
+// NewAliLS create a new Logger
+func NewAliLS() logs.Logger {
+ alils := new(aliLSWriter)
+ alils.Level = logs.LevelTrace
+ return alils
+}
+
+// Init parse config and init struct
+func (c *aliLSWriter) Init(jsonConfig string) (err error) {
+
+ json.Unmarshal([]byte(jsonConfig), c)
+
+ if c.FlushWhen > CacheSize {
+ c.FlushWhen = CacheSize
+ }
+
+ prj := &LogProject{
+ Name: c.Project,
+ Endpoint: c.Endpoint,
+ AccessKeyID: c.KeyID,
+ AccessKeySecret: c.KeySecret,
+ }
+
+ c.store, err = prj.GetLogStore(c.LogStore)
+ if err != nil {
+ return err
+ }
+
+ // Create default Log Group
+ c.group = append(c.group, &LogGroup{
+ Topic: proto.String(""),
+ Source: proto.String(c.Source),
+ Logs: make([]*Log, 0, c.FlushWhen),
+ })
+
+ // Create other Log Group
+ c.groupMap = make(map[string]*LogGroup)
+ for _, topic := range c.Topics {
+
+ lg := &LogGroup{
+ Topic: proto.String(topic),
+ Source: proto.String(c.Source),
+ Logs: make([]*Log, 0, c.FlushWhen),
+ }
+
+ c.group = append(c.group, lg)
+ c.groupMap[topic] = lg
+ }
+
+ if len(c.group) == 1 {
+ c.withMap = false
+ } else {
+ c.withMap = true
+ }
+
+ c.lock = &sync.Mutex{}
+
+ return nil
+}
+
+// WriteMsg write message in connection.
+// if connection is down, try to re-connect.
+func (c *aliLSWriter) WriteMsg(when time.Time, msg string, level int) (err error) {
+
+ if level > c.Level {
+ return nil
+ }
+
+ var topic string
+ var content string
+ var lg *LogGroup
+ if c.withMap {
+
+ // Topic,LogGroup
+ strs := strings.SplitN(msg, Delimiter, 2)
+ if len(strs) == 2 {
+ pos := strings.LastIndex(strs[0], " ")
+ topic = strs[0][pos+1 : len(strs[0])]
+ content = strs[0][0:pos] + strs[1]
+ lg = c.groupMap[topic]
+ }
+
+ // send to empty Topic
+ if lg == nil {
+ content = msg
+ lg = c.group[0]
+ }
+ } else {
+ content = msg
+ lg = c.group[0]
+ }
+
+ c1 := &LogContent{
+ Key: proto.String("msg"),
+ Value: proto.String(content),
+ }
+
+ l := &Log{
+ Time: proto.Uint32(uint32(when.Unix())),
+ Contents: []*LogContent{
+ c1,
+ },
+ }
+
+ c.lock.Lock()
+ lg.Logs = append(lg.Logs, l)
+ c.lock.Unlock()
+
+ if len(lg.Logs) >= c.FlushWhen {
+ c.flush(lg)
+ }
+
+ return nil
+}
+
+// Flush implementing method. empty.
+func (c *aliLSWriter) Flush() {
+
+ // flush all group
+ for _, lg := range c.group {
+ c.flush(lg)
+ }
+}
+
+// Destroy destroy connection writer and close tcp listener.
+func (c *aliLSWriter) Destroy() {
+}
+
+func (c *aliLSWriter) flush(lg *LogGroup) {
+
+ c.lock.Lock()
+ defer c.lock.Unlock()
+ err := c.store.PutLogs(lg)
+ if err != nil {
+ return
+ }
+
+ lg.Logs = make([]*Log, 0, c.FlushWhen)
+}
+
+func init() {
+ logs.Register(logs.AdapterAliLS, NewAliLS)
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/alils/config.go b/src/vendor/github.com/astaxie/beego/logs/alils/config.go
new file mode 100755
index 000000000..e8c24448f
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/alils/config.go
@@ -0,0 +1,13 @@
+package alils
+
+const (
+ version = "0.5.0" // SDK version
+ signatureMethod = "hmac-sha1" // Signature method
+
+ // OffsetNewest stands for the log head offset, i.e. the offset that will be
+ // assigned to the next message that will be produced to the shard.
+ OffsetNewest = "end"
+ // OffsetOldest stands for the oldest offset available on the logstore for a
+ // shard.
+ OffsetOldest = "begin"
+)
diff --git a/src/vendor/github.com/astaxie/beego/logs/alils/log.pb.go b/src/vendor/github.com/astaxie/beego/logs/alils/log.pb.go
new file mode 100755
index 000000000..601b0d78d
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/alils/log.pb.go
@@ -0,0 +1,1038 @@
+package alils
+
+import (
+ "fmt"
+ "io"
+ "math"
+
+ "github.com/gogo/protobuf/proto"
+ github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+var (
+ // ErrInvalidLengthLog invalid proto
+ ErrInvalidLengthLog = fmt.Errorf("proto: negative length found during unmarshaling")
+ // ErrIntOverflowLog overflow
+ ErrIntOverflowLog = fmt.Errorf("proto: integer overflow")
+)
+
+// Log define the proto Log
+type Log struct {
+ Time *uint32 `protobuf:"varint,1,req,name=Time" json:"Time,omitempty"`
+ Contents []*LogContent `protobuf:"bytes,2,rep,name=Contents" json:"Contents,omitempty"`
+ XXXUnrecognized []byte `json:"-"`
+}
+
+// Reset the Log
+func (m *Log) Reset() { *m = Log{} }
+
+// String return the Compact Log
+func (m *Log) String() string { return proto.CompactTextString(m) }
+
+// ProtoMessage not implemented
+func (*Log) ProtoMessage() {}
+
+// GetTime return the Log's Time
+func (m *Log) GetTime() uint32 {
+ if m != nil && m.Time != nil {
+ return *m.Time
+ }
+ return 0
+}
+
+// GetContents return the Log's Contents
+func (m *Log) GetContents() []*LogContent {
+ if m != nil {
+ return m.Contents
+ }
+ return nil
+}
+
+// LogContent define the Log content struct
+type LogContent struct {
+ Key *string `protobuf:"bytes,1,req,name=Key" json:"Key,omitempty"`
+ Value *string `protobuf:"bytes,2,req,name=Value" json:"Value,omitempty"`
+ XXXUnrecognized []byte `json:"-"`
+}
+
+// Reset LogContent
+func (m *LogContent) Reset() { *m = LogContent{} }
+
+// String return the compact text
+func (m *LogContent) String() string { return proto.CompactTextString(m) }
+
+// ProtoMessage not implemented
+func (*LogContent) ProtoMessage() {}
+
+// GetKey return the Key
+func (m *LogContent) GetKey() string {
+ if m != nil && m.Key != nil {
+ return *m.Key
+ }
+ return ""
+}
+
+// GetValue return the Value
+func (m *LogContent) GetValue() string {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return ""
+}
+
+// LogGroup define the logs struct
+type LogGroup struct {
+ Logs []*Log `protobuf:"bytes,1,rep,name=Logs" json:"Logs,omitempty"`
+ Reserved *string `protobuf:"bytes,2,opt,name=Reserved" json:"Reserved,omitempty"`
+ Topic *string `protobuf:"bytes,3,opt,name=Topic" json:"Topic,omitempty"`
+ Source *string `protobuf:"bytes,4,opt,name=Source" json:"Source,omitempty"`
+ XXXUnrecognized []byte `json:"-"`
+}
+
+// Reset LogGroup
+func (m *LogGroup) Reset() { *m = LogGroup{} }
+
+// String return the compact text
+func (m *LogGroup) String() string { return proto.CompactTextString(m) }
+
+// ProtoMessage not implemented
+func (*LogGroup) ProtoMessage() {}
+
+// GetLogs return the loggroup logs
+func (m *LogGroup) GetLogs() []*Log {
+ if m != nil {
+ return m.Logs
+ }
+ return nil
+}
+
+// GetReserved return Reserved
+func (m *LogGroup) GetReserved() string {
+ if m != nil && m.Reserved != nil {
+ return *m.Reserved
+ }
+ return ""
+}
+
+// GetTopic return Topic
+func (m *LogGroup) GetTopic() string {
+ if m != nil && m.Topic != nil {
+ return *m.Topic
+ }
+ return ""
+}
+
+// GetSource return Source
+func (m *LogGroup) GetSource() string {
+ if m != nil && m.Source != nil {
+ return *m.Source
+ }
+ return ""
+}
+
+// LogGroupList define the LogGroups
+type LogGroupList struct {
+ LogGroups []*LogGroup `protobuf:"bytes,1,rep,name=logGroups" json:"logGroups,omitempty"`
+ XXXUnrecognized []byte `json:"-"`
+}
+
+// Reset LogGroupList
+func (m *LogGroupList) Reset() { *m = LogGroupList{} }
+
+// String return compact text
+func (m *LogGroupList) String() string { return proto.CompactTextString(m) }
+
+// ProtoMessage not implemented
+func (*LogGroupList) ProtoMessage() {}
+
+// GetLogGroups return the LogGroups
+func (m *LogGroupList) GetLogGroups() []*LogGroup {
+ if m != nil {
+ return m.LogGroups
+ }
+ return nil
+}
+
+// Marshal the logs to byte slice
+func (m *Log) Marshal() (data []byte, err error) {
+ size := m.Size()
+ data = make([]byte, size)
+ n, err := m.MarshalTo(data)
+ if err != nil {
+ return nil, err
+ }
+ return data[:n], nil
+}
+
+// MarshalTo data
+func (m *Log) MarshalTo(data []byte) (int, error) {
+ var i int
+ _ = i
+ var l int
+ _ = l
+ if m.Time == nil {
+ return 0, github_com_gogo_protobuf_proto.NewRequiredNotSetError("Time")
+ }
+ data[i] = 0x8
+ i++
+ i = encodeVarintLog(data, i, uint64(*m.Time))
+ if len(m.Contents) > 0 {
+ for _, msg := range m.Contents {
+ data[i] = 0x12
+ i++
+ i = encodeVarintLog(data, i, uint64(msg.Size()))
+ n, err := msg.MarshalTo(data[i:])
+ if err != nil {
+ return 0, err
+ }
+ i += n
+ }
+ }
+ if m.XXXUnrecognized != nil {
+ i += copy(data[i:], m.XXXUnrecognized)
+ }
+ return i, nil
+}
+
+// Marshal LogContent
+func (m *LogContent) Marshal() (data []byte, err error) {
+ size := m.Size()
+ data = make([]byte, size)
+ n, err := m.MarshalTo(data)
+ if err != nil {
+ return nil, err
+ }
+ return data[:n], nil
+}
+
+// MarshalTo logcontent to data
+func (m *LogContent) MarshalTo(data []byte) (int, error) {
+ var i int
+ _ = i
+ var l int
+ _ = l
+ if m.Key == nil {
+ return 0, github_com_gogo_protobuf_proto.NewRequiredNotSetError("Key")
+ }
+ data[i] = 0xa
+ i++
+ i = encodeVarintLog(data, i, uint64(len(*m.Key)))
+ i += copy(data[i:], *m.Key)
+
+ if m.Value == nil {
+ return 0, github_com_gogo_protobuf_proto.NewRequiredNotSetError("Value")
+ }
+ data[i] = 0x12
+ i++
+ i = encodeVarintLog(data, i, uint64(len(*m.Value)))
+ i += copy(data[i:], *m.Value)
+ if m.XXXUnrecognized != nil {
+ i += copy(data[i:], m.XXXUnrecognized)
+ }
+ return i, nil
+}
+
+// Marshal LogGroup
+func (m *LogGroup) Marshal() (data []byte, err error) {
+ size := m.Size()
+ data = make([]byte, size)
+ n, err := m.MarshalTo(data)
+ if err != nil {
+ return nil, err
+ }
+ return data[:n], nil
+}
+
+// MarshalTo LogGroup to data
+func (m *LogGroup) MarshalTo(data []byte) (int, error) {
+ var i int
+ _ = i
+ var l int
+ _ = l
+ if len(m.Logs) > 0 {
+ for _, msg := range m.Logs {
+ data[i] = 0xa
+ i++
+ i = encodeVarintLog(data, i, uint64(msg.Size()))
+ n, err := msg.MarshalTo(data[i:])
+ if err != nil {
+ return 0, err
+ }
+ i += n
+ }
+ }
+ if m.Reserved != nil {
+ data[i] = 0x12
+ i++
+ i = encodeVarintLog(data, i, uint64(len(*m.Reserved)))
+ i += copy(data[i:], *m.Reserved)
+ }
+ if m.Topic != nil {
+ data[i] = 0x1a
+ i++
+ i = encodeVarintLog(data, i, uint64(len(*m.Topic)))
+ i += copy(data[i:], *m.Topic)
+ }
+ if m.Source != nil {
+ data[i] = 0x22
+ i++
+ i = encodeVarintLog(data, i, uint64(len(*m.Source)))
+ i += copy(data[i:], *m.Source)
+ }
+ if m.XXXUnrecognized != nil {
+ i += copy(data[i:], m.XXXUnrecognized)
+ }
+ return i, nil
+}
+
+// Marshal LogGroupList
+func (m *LogGroupList) Marshal() (data []byte, err error) {
+ size := m.Size()
+ data = make([]byte, size)
+ n, err := m.MarshalTo(data)
+ if err != nil {
+ return nil, err
+ }
+ return data[:n], nil
+}
+
+// MarshalTo LogGroupList to data
+func (m *LogGroupList) MarshalTo(data []byte) (int, error) {
+ var i int
+ _ = i
+ var l int
+ _ = l
+ if len(m.LogGroups) > 0 {
+ for _, msg := range m.LogGroups {
+ data[i] = 0xa
+ i++
+ i = encodeVarintLog(data, i, uint64(msg.Size()))
+ n, err := msg.MarshalTo(data[i:])
+ if err != nil {
+ return 0, err
+ }
+ i += n
+ }
+ }
+ if m.XXXUnrecognized != nil {
+ i += copy(data[i:], m.XXXUnrecognized)
+ }
+ return i, nil
+}
+
+func encodeFixed64Log(data []byte, offset int, v uint64) int {
+ data[offset] = uint8(v)
+ data[offset+1] = uint8(v >> 8)
+ data[offset+2] = uint8(v >> 16)
+ data[offset+3] = uint8(v >> 24)
+ data[offset+4] = uint8(v >> 32)
+ data[offset+5] = uint8(v >> 40)
+ data[offset+6] = uint8(v >> 48)
+ data[offset+7] = uint8(v >> 56)
+ return offset + 8
+}
+func encodeFixed32Log(data []byte, offset int, v uint32) int {
+ data[offset] = uint8(v)
+ data[offset+1] = uint8(v >> 8)
+ data[offset+2] = uint8(v >> 16)
+ data[offset+3] = uint8(v >> 24)
+ return offset + 4
+}
+func encodeVarintLog(data []byte, offset int, v uint64) int {
+ for v >= 1<<7 {
+ data[offset] = uint8(v&0x7f | 0x80)
+ v >>= 7
+ offset++
+ }
+ data[offset] = uint8(v)
+ return offset + 1
+}
+
+// Size return the log's size
+func (m *Log) Size() (n int) {
+ var l int
+ _ = l
+ if m.Time != nil {
+ n += 1 + sovLog(uint64(*m.Time))
+ }
+ if len(m.Contents) > 0 {
+ for _, e := range m.Contents {
+ l = e.Size()
+ n += 1 + l + sovLog(uint64(l))
+ }
+ }
+ if m.XXXUnrecognized != nil {
+ n += len(m.XXXUnrecognized)
+ }
+ return n
+}
+
+// Size return LogContent size based on Key and Value
+func (m *LogContent) Size() (n int) {
+ var l int
+ _ = l
+ if m.Key != nil {
+ l = len(*m.Key)
+ n += 1 + l + sovLog(uint64(l))
+ }
+ if m.Value != nil {
+ l = len(*m.Value)
+ n += 1 + l + sovLog(uint64(l))
+ }
+ if m.XXXUnrecognized != nil {
+ n += len(m.XXXUnrecognized)
+ }
+ return n
+}
+
+// Size return LogGroup size based on Logs
+func (m *LogGroup) Size() (n int) {
+ var l int
+ _ = l
+ if len(m.Logs) > 0 {
+ for _, e := range m.Logs {
+ l = e.Size()
+ n += 1 + l + sovLog(uint64(l))
+ }
+ }
+ if m.Reserved != nil {
+ l = len(*m.Reserved)
+ n += 1 + l + sovLog(uint64(l))
+ }
+ if m.Topic != nil {
+ l = len(*m.Topic)
+ n += 1 + l + sovLog(uint64(l))
+ }
+ if m.Source != nil {
+ l = len(*m.Source)
+ n += 1 + l + sovLog(uint64(l))
+ }
+ if m.XXXUnrecognized != nil {
+ n += len(m.XXXUnrecognized)
+ }
+ return n
+}
+
+// Size return LogGroupList size
+func (m *LogGroupList) Size() (n int) {
+ var l int
+ _ = l
+ if len(m.LogGroups) > 0 {
+ for _, e := range m.LogGroups {
+ l = e.Size()
+ n += 1 + l + sovLog(uint64(l))
+ }
+ }
+ if m.XXXUnrecognized != nil {
+ n += len(m.XXXUnrecognized)
+ }
+ return n
+}
+
+func sovLog(x uint64) (n int) {
+ for {
+ n++
+ x >>= 7
+ if x == 0 {
+ break
+ }
+ }
+ return n
+}
+func sozLog(x uint64) (n int) {
+ return sovLog((x << 1) ^ (x >> 63))
+}
+
+// Unmarshal data to log
+func (m *Log) Unmarshal(data []byte) error {
+ var hasFields [1]uint64
+ l := len(data)
+ iNdEx := 0
+ for iNdEx < l {
+ preIndex := iNdEx
+ var wire uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ wire |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ fieldNum := int32(wire >> 3)
+ wireType := int(wire & 0x7)
+ if wireType == 4 {
+ return fmt.Errorf("proto: Log: wiretype end group for non-group")
+ }
+ if fieldNum <= 0 {
+ return fmt.Errorf("proto: Log: illegal tag %d (wire type %d)", fieldNum, wire)
+ }
+ switch fieldNum {
+ case 1:
+ if wireType != 0 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Time", wireType)
+ }
+ var v uint32
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ v |= (uint32(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ m.Time = &v
+ hasFields[0] |= uint64(0x00000001)
+ case 2:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Contents", wireType)
+ }
+ var msglen int
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ msglen |= (int(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ if msglen < 0 {
+ return ErrInvalidLengthLog
+ }
+ postIndex := iNdEx + msglen
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ m.Contents = append(m.Contents, &LogContent{})
+ if err := m.Contents[len(m.Contents)-1].Unmarshal(data[iNdEx:postIndex]); err != nil {
+ return err
+ }
+ iNdEx = postIndex
+ default:
+ iNdEx = preIndex
+ skippy, err := skipLog(data[iNdEx:])
+ if err != nil {
+ return err
+ }
+ if skippy < 0 {
+ return ErrInvalidLengthLog
+ }
+ if (iNdEx + skippy) > l {
+ return io.ErrUnexpectedEOF
+ }
+ m.XXXUnrecognized = append(m.XXXUnrecognized, data[iNdEx:iNdEx+skippy]...)
+ iNdEx += skippy
+ }
+ }
+ if hasFields[0]&uint64(0x00000001) == 0 {
+ return github_com_gogo_protobuf_proto.NewRequiredNotSetError("Time")
+ }
+
+ if iNdEx > l {
+ return io.ErrUnexpectedEOF
+ }
+ return nil
+}
+
+// Unmarshal data to LogContent
+func (m *LogContent) Unmarshal(data []byte) error {
+ var hasFields [1]uint64
+ l := len(data)
+ iNdEx := 0
+ for iNdEx < l {
+ preIndex := iNdEx
+ var wire uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ wire |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ fieldNum := int32(wire >> 3)
+ wireType := int(wire & 0x7)
+ if wireType == 4 {
+ return fmt.Errorf("proto: Content: wiretype end group for non-group")
+ }
+ if fieldNum <= 0 {
+ return fmt.Errorf("proto: Content: illegal tag %d (wire type %d)", fieldNum, wire)
+ }
+ switch fieldNum {
+ case 1:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType)
+ }
+ var stringLen uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ stringLen |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ intStringLen := int(stringLen)
+ if intStringLen < 0 {
+ return ErrInvalidLengthLog
+ }
+ postIndex := iNdEx + intStringLen
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ s := string(data[iNdEx:postIndex])
+ m.Key = &s
+ iNdEx = postIndex
+ hasFields[0] |= uint64(0x00000001)
+ case 2:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
+ }
+ var stringLen uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ stringLen |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ intStringLen := int(stringLen)
+ if intStringLen < 0 {
+ return ErrInvalidLengthLog
+ }
+ postIndex := iNdEx + intStringLen
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ s := string(data[iNdEx:postIndex])
+ m.Value = &s
+ iNdEx = postIndex
+ hasFields[0] |= uint64(0x00000002)
+ default:
+ iNdEx = preIndex
+ skippy, err := skipLog(data[iNdEx:])
+ if err != nil {
+ return err
+ }
+ if skippy < 0 {
+ return ErrInvalidLengthLog
+ }
+ if (iNdEx + skippy) > l {
+ return io.ErrUnexpectedEOF
+ }
+ m.XXXUnrecognized = append(m.XXXUnrecognized, data[iNdEx:iNdEx+skippy]...)
+ iNdEx += skippy
+ }
+ }
+ if hasFields[0]&uint64(0x00000001) == 0 {
+ return github_com_gogo_protobuf_proto.NewRequiredNotSetError("Key")
+ }
+ if hasFields[0]&uint64(0x00000002) == 0 {
+ return github_com_gogo_protobuf_proto.NewRequiredNotSetError("Value")
+ }
+
+ if iNdEx > l {
+ return io.ErrUnexpectedEOF
+ }
+ return nil
+}
+
+// Unmarshal data to LogGroup
+func (m *LogGroup) Unmarshal(data []byte) error {
+ l := len(data)
+ iNdEx := 0
+ for iNdEx < l {
+ preIndex := iNdEx
+ var wire uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ wire |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ fieldNum := int32(wire >> 3)
+ wireType := int(wire & 0x7)
+ if wireType == 4 {
+ return fmt.Errorf("proto: LogGroup: wiretype end group for non-group")
+ }
+ if fieldNum <= 0 {
+ return fmt.Errorf("proto: LogGroup: illegal tag %d (wire type %d)", fieldNum, wire)
+ }
+ switch fieldNum {
+ case 1:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Logs", wireType)
+ }
+ var msglen int
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ msglen |= (int(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ if msglen < 0 {
+ return ErrInvalidLengthLog
+ }
+ postIndex := iNdEx + msglen
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ m.Logs = append(m.Logs, &Log{})
+ if err := m.Logs[len(m.Logs)-1].Unmarshal(data[iNdEx:postIndex]); err != nil {
+ return err
+ }
+ iNdEx = postIndex
+ case 2:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Reserved", wireType)
+ }
+ var stringLen uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ stringLen |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ intStringLen := int(stringLen)
+ if intStringLen < 0 {
+ return ErrInvalidLengthLog
+ }
+ postIndex := iNdEx + intStringLen
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ s := string(data[iNdEx:postIndex])
+ m.Reserved = &s
+ iNdEx = postIndex
+ case 3:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Topic", wireType)
+ }
+ var stringLen uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ stringLen |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ intStringLen := int(stringLen)
+ if intStringLen < 0 {
+ return ErrInvalidLengthLog
+ }
+ postIndex := iNdEx + intStringLen
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ s := string(data[iNdEx:postIndex])
+ m.Topic = &s
+ iNdEx = postIndex
+ case 4:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Source", wireType)
+ }
+ var stringLen uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ stringLen |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ intStringLen := int(stringLen)
+ if intStringLen < 0 {
+ return ErrInvalidLengthLog
+ }
+ postIndex := iNdEx + intStringLen
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ s := string(data[iNdEx:postIndex])
+ m.Source = &s
+ iNdEx = postIndex
+ default:
+ iNdEx = preIndex
+ skippy, err := skipLog(data[iNdEx:])
+ if err != nil {
+ return err
+ }
+ if skippy < 0 {
+ return ErrInvalidLengthLog
+ }
+ if (iNdEx + skippy) > l {
+ return io.ErrUnexpectedEOF
+ }
+ m.XXXUnrecognized = append(m.XXXUnrecognized, data[iNdEx:iNdEx+skippy]...)
+ iNdEx += skippy
+ }
+ }
+
+ if iNdEx > l {
+ return io.ErrUnexpectedEOF
+ }
+ return nil
+}
+
+// Unmarshal data to LogGroupList
+func (m *LogGroupList) Unmarshal(data []byte) error {
+ l := len(data)
+ iNdEx := 0
+ for iNdEx < l {
+ preIndex := iNdEx
+ var wire uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ wire |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ fieldNum := int32(wire >> 3)
+ wireType := int(wire & 0x7)
+ if wireType == 4 {
+ return fmt.Errorf("proto: LogGroupList: wiretype end group for non-group")
+ }
+ if fieldNum <= 0 {
+ return fmt.Errorf("proto: LogGroupList: illegal tag %d (wire type %d)", fieldNum, wire)
+ }
+ switch fieldNum {
+ case 1:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field LogGroups", wireType)
+ }
+ var msglen int
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ msglen |= (int(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ if msglen < 0 {
+ return ErrInvalidLengthLog
+ }
+ postIndex := iNdEx + msglen
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ m.LogGroups = append(m.LogGroups, &LogGroup{})
+ if err := m.LogGroups[len(m.LogGroups)-1].Unmarshal(data[iNdEx:postIndex]); err != nil {
+ return err
+ }
+ iNdEx = postIndex
+ default:
+ iNdEx = preIndex
+ skippy, err := skipLog(data[iNdEx:])
+ if err != nil {
+ return err
+ }
+ if skippy < 0 {
+ return ErrInvalidLengthLog
+ }
+ if (iNdEx + skippy) > l {
+ return io.ErrUnexpectedEOF
+ }
+ m.XXXUnrecognized = append(m.XXXUnrecognized, data[iNdEx:iNdEx+skippy]...)
+ iNdEx += skippy
+ }
+ }
+
+ if iNdEx > l {
+ return io.ErrUnexpectedEOF
+ }
+ return nil
+}
+
+func skipLog(data []byte) (n int, err error) {
+ l := len(data)
+ iNdEx := 0
+ for iNdEx < l {
+ var wire uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return 0, ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return 0, io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ wire |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ wireType := int(wire & 0x7)
+ switch wireType {
+ case 0:
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return 0, ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return 0, io.ErrUnexpectedEOF
+ }
+ iNdEx++
+ if data[iNdEx-1] < 0x80 {
+ break
+ }
+ }
+ return iNdEx, nil
+ case 1:
+ iNdEx += 8
+ return iNdEx, nil
+ case 2:
+ var length int
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return 0, ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return 0, io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ length |= (int(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ iNdEx += length
+ if length < 0 {
+ return 0, ErrInvalidLengthLog
+ }
+ return iNdEx, nil
+ case 3:
+ for {
+ var innerWire uint64
+ var start = iNdEx
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return 0, ErrIntOverflowLog
+ }
+ if iNdEx >= l {
+ return 0, io.ErrUnexpectedEOF
+ }
+ b := data[iNdEx]
+ iNdEx++
+ innerWire |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ innerWireType := int(innerWire & 0x7)
+ if innerWireType == 4 {
+ break
+ }
+ next, err := skipLog(data[start:])
+ if err != nil {
+ return 0, err
+ }
+ iNdEx = start + next
+ }
+ return iNdEx, nil
+ case 4:
+ return iNdEx, nil
+ case 5:
+ iNdEx += 4
+ return iNdEx, nil
+ default:
+ return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
+ }
+ }
+ panic("unreachable")
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/alils/log_config.go b/src/vendor/github.com/astaxie/beego/logs/alils/log_config.go
new file mode 100755
index 000000000..e8564efbd
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/alils/log_config.go
@@ -0,0 +1,42 @@
+package alils
+
+// InputDetail define log detail
+type InputDetail struct {
+ LogType string `json:"logType"`
+ LogPath string `json:"logPath"`
+ FilePattern string `json:"filePattern"`
+ LocalStorage bool `json:"localStorage"`
+ TimeFormat string `json:"timeFormat"`
+ LogBeginRegex string `json:"logBeginRegex"`
+ Regex string `json:"regex"`
+ Keys []string `json:"key"`
+ FilterKeys []string `json:"filterKey"`
+ FilterRegex []string `json:"filterRegex"`
+ TopicFormat string `json:"topicFormat"`
+}
+
+// OutputDetail define the output detail
+type OutputDetail struct {
+ Endpoint string `json:"endpoint"`
+ LogStoreName string `json:"logstoreName"`
+}
+
+// LogConfig define Log Config
+type LogConfig struct {
+ Name string `json:"configName"`
+ InputType string `json:"inputType"`
+ InputDetail InputDetail `json:"inputDetail"`
+ OutputType string `json:"outputType"`
+ OutputDetail OutputDetail `json:"outputDetail"`
+
+ CreateTime uint32
+ LastModifyTime uint32
+
+ project *LogProject
+}
+
+// GetAppliedMachineGroup returns applied machine group of this config.
+func (c *LogConfig) GetAppliedMachineGroup(confName string) (groupNames []string, err error) {
+ groupNames, err = c.project.GetAppliedMachineGroups(c.Name)
+ return
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/alils/log_project.go b/src/vendor/github.com/astaxie/beego/logs/alils/log_project.go
new file mode 100755
index 000000000..59db8cbf7
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/alils/log_project.go
@@ -0,0 +1,819 @@
+/*
+Package alils implements the SDK(v0.5.0) of Simple Log Service(abbr. SLS).
+
+For more description about SLS, please read this article:
+http://gitlab.alibaba-inc.com/sls/doc.
+*/
+package alils
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/http/httputil"
+)
+
+// Error message in SLS HTTP response.
+type errorMessage struct {
+ Code string `json:"errorCode"`
+ Message string `json:"errorMessage"`
+}
+
+// LogProject Define the Ali Project detail
+type LogProject struct {
+ Name string // Project name
+ Endpoint string // IP or hostname of SLS endpoint
+ AccessKeyID string
+ AccessKeySecret string
+}
+
+// NewLogProject creates a new SLS project.
+func NewLogProject(name, endpoint, AccessKeyID, accessKeySecret string) (p *LogProject, err error) {
+ p = &LogProject{
+ Name: name,
+ Endpoint: endpoint,
+ AccessKeyID: AccessKeyID,
+ AccessKeySecret: accessKeySecret,
+ }
+ return p, nil
+}
+
+// ListLogStore returns all logstore names of project p.
+func (p *LogProject) ListLogStore() (storeNames []string, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ uri := fmt.Sprintf("/logstores")
+ r, err := request(p, "GET", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to list logstore")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ type Body struct {
+ Count int
+ LogStores []string
+ }
+ body := &Body{}
+
+ err = json.Unmarshal(buf, body)
+ if err != nil {
+ return
+ }
+
+ storeNames = body.LogStores
+
+ return
+}
+
+// GetLogStore returns logstore according by logstore name.
+func (p *LogProject) GetLogStore(name string) (s *LogStore, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ r, err := request(p, "GET", "/logstores/"+name, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to get logstore")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ s = &LogStore{}
+ err = json.Unmarshal(buf, s)
+ if err != nil {
+ return
+ }
+ s.project = p
+ return
+}
+
+// CreateLogStore creates a new logstore in SLS,
+// where name is logstore name,
+// and ttl is time-to-live(in day) of logs,
+// and shardCnt is the number of shards.
+func (p *LogProject) CreateLogStore(name string, ttl, shardCnt int) (err error) {
+
+ type Body struct {
+ Name string `json:"logstoreName"`
+ TTL int `json:"ttl"`
+ ShardCount int `json:"shardCount"`
+ }
+
+ store := &Body{
+ Name: name,
+ TTL: ttl,
+ ShardCount: shardCnt,
+ }
+
+ body, err := json.Marshal(store)
+ if err != nil {
+ return
+ }
+
+ h := map[string]string{
+ "x-sls-bodyrawsize": fmt.Sprintf("%v", len(body)),
+ "Content-Type": "application/json",
+ "Accept-Encoding": "deflate", // TODO: support lz4
+ }
+
+ r, err := request(p, "POST", "/logstores", h, body)
+ if err != nil {
+ return
+ }
+
+ body, err = ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(body, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to create logstore")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ return
+}
+
+// DeleteLogStore deletes a logstore according by logstore name.
+func (p *LogProject) DeleteLogStore(name string) (err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ r, err := request(p, "DELETE", "/logstores/"+name, h, nil)
+ if err != nil {
+ return
+ }
+
+ body, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(body, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to delete logstore")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+ return
+}
+
+// UpdateLogStore updates a logstore according by logstore name,
+// obviously we can't modify the logstore name itself.
+func (p *LogProject) UpdateLogStore(name string, ttl, shardCnt int) (err error) {
+
+ type Body struct {
+ Name string `json:"logstoreName"`
+ TTL int `json:"ttl"`
+ ShardCount int `json:"shardCount"`
+ }
+
+ store := &Body{
+ Name: name,
+ TTL: ttl,
+ ShardCount: shardCnt,
+ }
+
+ body, err := json.Marshal(store)
+ if err != nil {
+ return
+ }
+
+ h := map[string]string{
+ "x-sls-bodyrawsize": fmt.Sprintf("%v", len(body)),
+ "Content-Type": "application/json",
+ "Accept-Encoding": "deflate", // TODO: support lz4
+ }
+
+ r, err := request(p, "PUT", "/logstores", h, body)
+ if err != nil {
+ return
+ }
+
+ body, err = ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(body, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to update logstore")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ return
+}
+
+// ListMachineGroup returns machine group name list and the total number of machine groups.
+// The offset starts from 0 and the size is the max number of machine groups could be returned.
+func (p *LogProject) ListMachineGroup(offset, size int) (m []string, total int, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ if size <= 0 {
+ size = 500
+ }
+
+ uri := fmt.Sprintf("/machinegroups?offset=%v&size=%v", offset, size)
+ r, err := request(p, "GET", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to list machine group")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ type Body struct {
+ MachineGroups []string
+ Count int
+ Total int
+ }
+ body := &Body{}
+
+ err = json.Unmarshal(buf, body)
+ if err != nil {
+ return
+ }
+
+ m = body.MachineGroups
+ total = body.Total
+
+ return
+}
+
+// GetMachineGroup retruns machine group according by machine group name.
+func (p *LogProject) GetMachineGroup(name string) (m *MachineGroup, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ r, err := request(p, "GET", "/machinegroups/"+name, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to get machine group:%v", name)
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ m = &MachineGroup{}
+ err = json.Unmarshal(buf, m)
+ if err != nil {
+ return
+ }
+ m.project = p
+ return
+}
+
+// CreateMachineGroup creates a new machine group in SLS.
+func (p *LogProject) CreateMachineGroup(m *MachineGroup) (err error) {
+
+ body, err := json.Marshal(m)
+ if err != nil {
+ return
+ }
+
+ h := map[string]string{
+ "x-sls-bodyrawsize": fmt.Sprintf("%v", len(body)),
+ "Content-Type": "application/json",
+ "Accept-Encoding": "deflate", // TODO: support lz4
+ }
+
+ r, err := request(p, "POST", "/machinegroups", h, body)
+ if err != nil {
+ return
+ }
+
+ body, err = ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(body, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to create machine group")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ return
+}
+
+// UpdateMachineGroup updates a machine group.
+func (p *LogProject) UpdateMachineGroup(m *MachineGroup) (err error) {
+
+ body, err := json.Marshal(m)
+ if err != nil {
+ return
+ }
+
+ h := map[string]string{
+ "x-sls-bodyrawsize": fmt.Sprintf("%v", len(body)),
+ "Content-Type": "application/json",
+ "Accept-Encoding": "deflate", // TODO: support lz4
+ }
+
+ r, err := request(p, "PUT", "/machinegroups/"+m.Name, h, body)
+ if err != nil {
+ return
+ }
+
+ body, err = ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(body, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to update machine group")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ return
+}
+
+// DeleteMachineGroup deletes machine group according machine group name.
+func (p *LogProject) DeleteMachineGroup(name string) (err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ r, err := request(p, "DELETE", "/machinegroups/"+name, h, nil)
+ if err != nil {
+ return
+ }
+
+ body, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(body, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to delete machine group")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+ return
+}
+
+// ListConfig returns config names list and the total number of configs.
+// The offset starts from 0 and the size is the max number of configs could be returned.
+func (p *LogProject) ListConfig(offset, size int) (cfgNames []string, total int, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ if size <= 0 {
+ size = 100
+ }
+
+ uri := fmt.Sprintf("/configs?offset=%v&size=%v", offset, size)
+ r, err := request(p, "GET", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to delete machine group")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ type Body struct {
+ Total int
+ Configs []string
+ }
+ body := &Body{}
+
+ err = json.Unmarshal(buf, body)
+ if err != nil {
+ return
+ }
+
+ cfgNames = body.Configs
+ total = body.Total
+ return
+}
+
+// GetConfig returns config according by config name.
+func (p *LogProject) GetConfig(name string) (c *LogConfig, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ r, err := request(p, "GET", "/configs/"+name, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to delete config")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ c = &LogConfig{}
+ err = json.Unmarshal(buf, c)
+ if err != nil {
+ return
+ }
+ c.project = p
+ return
+}
+
+// UpdateConfig updates a config.
+func (p *LogProject) UpdateConfig(c *LogConfig) (err error) {
+
+ body, err := json.Marshal(c)
+ if err != nil {
+ return
+ }
+
+ h := map[string]string{
+ "x-sls-bodyrawsize": fmt.Sprintf("%v", len(body)),
+ "Content-Type": "application/json",
+ "Accept-Encoding": "deflate", // TODO: support lz4
+ }
+
+ r, err := request(p, "PUT", "/configs/"+c.Name, h, body)
+ if err != nil {
+ return
+ }
+
+ body, err = ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(body, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to update config")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ return
+}
+
+// CreateConfig creates a new config in SLS.
+func (p *LogProject) CreateConfig(c *LogConfig) (err error) {
+
+ body, err := json.Marshal(c)
+ if err != nil {
+ return
+ }
+
+ h := map[string]string{
+ "x-sls-bodyrawsize": fmt.Sprintf("%v", len(body)),
+ "Content-Type": "application/json",
+ "Accept-Encoding": "deflate", // TODO: support lz4
+ }
+
+ r, err := request(p, "POST", "/configs", h, body)
+ if err != nil {
+ return
+ }
+
+ body, err = ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(body, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to update config")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ return
+}
+
+// DeleteConfig deletes a config according by config name.
+func (p *LogProject) DeleteConfig(name string) (err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ r, err := request(p, "DELETE", "/configs/"+name, h, nil)
+ if err != nil {
+ return
+ }
+
+ body, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(body, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to delete config")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+ return
+}
+
+// GetAppliedMachineGroups returns applied machine group names list according config name.
+func (p *LogProject) GetAppliedMachineGroups(confName string) (groupNames []string, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ uri := fmt.Sprintf("/configs/%v/machinegroups", confName)
+ r, err := request(p, "GET", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to get applied machine groups")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ type Body struct {
+ Count int
+ Machinegroups []string
+ }
+
+ body := &Body{}
+ err = json.Unmarshal(buf, body)
+ if err != nil {
+ return
+ }
+
+ groupNames = body.Machinegroups
+ return
+}
+
+// GetAppliedConfigs returns applied config names list according machine group name groupName.
+func (p *LogProject) GetAppliedConfigs(groupName string) (confNames []string, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ uri := fmt.Sprintf("/machinegroups/%v/configs", groupName)
+ r, err := request(p, "GET", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to applied configs")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ type Cfg struct {
+ Count int `json:"count"`
+ Configs []string `json:"configs"`
+ }
+
+ body := &Cfg{}
+ err = json.Unmarshal(buf, body)
+ if err != nil {
+ return
+ }
+
+ confNames = body.Configs
+ return
+}
+
+// ApplyConfigToMachineGroup applies config to machine group.
+func (p *LogProject) ApplyConfigToMachineGroup(confName, groupName string) (err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ uri := fmt.Sprintf("/machinegroups/%v/configs/%v", groupName, confName)
+ r, err := request(p, "PUT", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to apply config to machine group")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+ return
+}
+
+// RemoveConfigFromMachineGroup removes config from machine group.
+func (p *LogProject) RemoveConfigFromMachineGroup(confName, groupName string) (err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ uri := fmt.Sprintf("/machinegroups/%v/configs/%v", groupName, confName)
+ r, err := request(p, "DELETE", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to remove config from machine group")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Printf("%s\n", dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+ return
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/alils/log_store.go b/src/vendor/github.com/astaxie/beego/logs/alils/log_store.go
new file mode 100755
index 000000000..fa5027364
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/alils/log_store.go
@@ -0,0 +1,271 @@
+package alils
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/http/httputil"
+ "strconv"
+
+ lz4 "github.com/cloudflare/golz4"
+ "github.com/gogo/protobuf/proto"
+)
+
+// LogStore Store the logs
+type LogStore struct {
+ Name string `json:"logstoreName"`
+ TTL int
+ ShardCount int
+
+ CreateTime uint32
+ LastModifyTime uint32
+
+ project *LogProject
+}
+
+// Shard define the Log Shard
+type Shard struct {
+ ShardID int `json:"shardID"`
+}
+
+// ListShards returns shard id list of this logstore.
+func (s *LogStore) ListShards() (shardIDs []int, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ uri := fmt.Sprintf("/logstores/%v/shards", s.Name)
+ r, err := request(s.project, "GET", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to list logstore")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Println(dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ var shards []*Shard
+ err = json.Unmarshal(buf, &shards)
+ if err != nil {
+ return
+ }
+
+ for _, v := range shards {
+ shardIDs = append(shardIDs, v.ShardID)
+ }
+ return
+}
+
+// PutLogs put logs into logstore.
+// The callers should transform user logs into LogGroup.
+func (s *LogStore) PutLogs(lg *LogGroup) (err error) {
+ body, err := proto.Marshal(lg)
+ if err != nil {
+ return
+ }
+
+ // Compresse body with lz4
+ out := make([]byte, lz4.CompressBound(body))
+ n, err := lz4.Compress(body, out)
+ if err != nil {
+ return
+ }
+
+ h := map[string]string{
+ "x-sls-compresstype": "lz4",
+ "x-sls-bodyrawsize": fmt.Sprintf("%v", len(body)),
+ "Content-Type": "application/x-protobuf",
+ }
+
+ uri := fmt.Sprintf("/logstores/%v", s.Name)
+ r, err := request(s.project, "POST", uri, h, out[:n])
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to put logs")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Println(dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+ return
+}
+
+// GetCursor gets log cursor of one shard specified by shardID.
+// The from can be in three form: a) unix timestamp in seccond, b) "begin", c) "end".
+// For more detail please read: http://gitlab.alibaba-inc.com/sls/doc/blob/master/api/shard.md#logstore
+func (s *LogStore) GetCursor(shardID int, from string) (cursor string, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ uri := fmt.Sprintf("/logstores/%v/shards/%v?type=cursor&from=%v",
+ s.Name, shardID, from)
+
+ r, err := request(s.project, "GET", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to get cursor")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Println(dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ type Body struct {
+ Cursor string
+ }
+ body := &Body{}
+
+ err = json.Unmarshal(buf, body)
+ if err != nil {
+ return
+ }
+ cursor = body.Cursor
+ return
+}
+
+// GetLogsBytes gets logs binary data from shard specified by shardID according cursor.
+// The logGroupMaxCount is the max number of logGroup could be returned.
+// The nextCursor is the next curosr can be used to read logs at next time.
+func (s *LogStore) GetLogsBytes(shardID int, cursor string,
+ logGroupMaxCount int) (out []byte, nextCursor string, err error) {
+
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ "Accept": "application/x-protobuf",
+ "Accept-Encoding": "lz4",
+ }
+
+ uri := fmt.Sprintf("/logstores/%v/shards/%v?type=logs&cursor=%v&count=%v",
+ s.Name, shardID, cursor, logGroupMaxCount)
+
+ r, err := request(s.project, "GET", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to get cursor")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Println(dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ v, ok := r.Header["X-Sls-Compresstype"]
+ if !ok || len(v) == 0 {
+ err = fmt.Errorf("can't find 'x-sls-compresstype' header")
+ return
+ }
+ if v[0] != "lz4" {
+ err = fmt.Errorf("unexpected compress type:%v", v[0])
+ return
+ }
+
+ v, ok = r.Header["X-Sls-Cursor"]
+ if !ok || len(v) == 0 {
+ err = fmt.Errorf("can't find 'x-sls-cursor' header")
+ return
+ }
+ nextCursor = v[0]
+
+ v, ok = r.Header["X-Sls-Bodyrawsize"]
+ if !ok || len(v) == 0 {
+ err = fmt.Errorf("can't find 'x-sls-bodyrawsize' header")
+ return
+ }
+ bodyRawSize, err := strconv.Atoi(v[0])
+ if err != nil {
+ return
+ }
+
+ out = make([]byte, bodyRawSize)
+ err = lz4.Uncompress(buf, out)
+ if err != nil {
+ return
+ }
+
+ return
+}
+
+// LogsBytesDecode decodes logs binary data retruned by GetLogsBytes API
+func LogsBytesDecode(data []byte) (gl *LogGroupList, err error) {
+
+ gl = &LogGroupList{}
+ err = proto.Unmarshal(data, gl)
+ if err != nil {
+ return
+ }
+
+ return
+}
+
+// GetLogs gets logs from shard specified by shardID according cursor.
+// The logGroupMaxCount is the max number of logGroup could be returned.
+// The nextCursor is the next curosr can be used to read logs at next time.
+func (s *LogStore) GetLogs(shardID int, cursor string,
+ logGroupMaxCount int) (gl *LogGroupList, nextCursor string, err error) {
+
+ out, nextCursor, err := s.GetLogsBytes(shardID, cursor, logGroupMaxCount)
+ if err != nil {
+ return
+ }
+
+ gl, err = LogsBytesDecode(out)
+ if err != nil {
+ return
+ }
+
+ return
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/alils/machine_group.go b/src/vendor/github.com/astaxie/beego/logs/alils/machine_group.go
new file mode 100755
index 000000000..b6c69a141
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/alils/machine_group.go
@@ -0,0 +1,91 @@
+package alils
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/http/httputil"
+)
+
+// MachineGroupAttribute define the Attribute
+type MachineGroupAttribute struct {
+ ExternalName string `json:"externalName"`
+ TopicName string `json:"groupTopic"`
+}
+
+// MachineGroup define the machine Group
+type MachineGroup struct {
+ Name string `json:"groupName"`
+ Type string `json:"groupType"`
+ MachineIDType string `json:"machineIdentifyType"`
+ MachineIDList []string `json:"machineList"`
+
+ Attribute MachineGroupAttribute `json:"groupAttribute"`
+
+ CreateTime uint32
+ LastModifyTime uint32
+
+ project *LogProject
+}
+
+// Machine define the Machine
+type Machine struct {
+ IP string
+ UniqueID string `json:"machine-uniqueid"`
+ UserdefinedID string `json:"userdefined-id"`
+}
+
+// MachineList define the Machine List
+type MachineList struct {
+ Total int
+ Machines []*Machine
+}
+
+// ListMachines returns machine list of this machine group.
+func (m *MachineGroup) ListMachines() (ms []*Machine, total int, err error) {
+ h := map[string]string{
+ "x-sls-bodyrawsize": "0",
+ }
+
+ uri := fmt.Sprintf("/machinegroups/%v/machines", m.Name)
+ r, err := request(m.project, "GET", uri, h, nil)
+ if err != nil {
+ return
+ }
+
+ buf, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return
+ }
+
+ if r.StatusCode != http.StatusOK {
+ errMsg := &errorMessage{}
+ err = json.Unmarshal(buf, errMsg)
+ if err != nil {
+ err = fmt.Errorf("failed to remove config from machine group")
+ dump, _ := httputil.DumpResponse(r, true)
+ fmt.Println(dump)
+ return
+ }
+ err = fmt.Errorf("%v:%v", errMsg.Code, errMsg.Message)
+ return
+ }
+
+ body := &MachineList{}
+ err = json.Unmarshal(buf, body)
+ if err != nil {
+ return
+ }
+
+ ms = body.Machines
+ total = body.Total
+
+ return
+}
+
+// GetAppliedConfigs returns applied configs of this machine group.
+func (m *MachineGroup) GetAppliedConfigs() (confNames []string, err error) {
+ confNames, err = m.project.GetAppliedConfigs(m.Name)
+ return
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/alils/request.go b/src/vendor/github.com/astaxie/beego/logs/alils/request.go
new file mode 100755
index 000000000..50d9c43c5
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/alils/request.go
@@ -0,0 +1,62 @@
+package alils
+
+import (
+ "bytes"
+ "crypto/md5"
+ "fmt"
+ "net/http"
+)
+
+// request sends a request to SLS.
+func request(project *LogProject, method, uri string, headers map[string]string,
+ body []byte) (resp *http.Response, err error) {
+
+ // The caller should provide 'x-sls-bodyrawsize' header
+ if _, ok := headers["x-sls-bodyrawsize"]; !ok {
+ err = fmt.Errorf("Can't find 'x-sls-bodyrawsize' header")
+ return
+ }
+
+ // SLS public request headers
+ headers["Host"] = project.Name + "." + project.Endpoint
+ headers["Date"] = nowRFC1123()
+ headers["x-sls-apiversion"] = version
+ headers["x-sls-signaturemethod"] = signatureMethod
+ if body != nil {
+ bodyMD5 := fmt.Sprintf("%X", md5.Sum(body))
+ headers["Content-MD5"] = bodyMD5
+
+ if _, ok := headers["Content-Type"]; !ok {
+ err = fmt.Errorf("Can't find 'Content-Type' header")
+ return
+ }
+ }
+
+ // Calc Authorization
+ // Authorization = "SLS :"
+ digest, err := signature(project, method, uri, headers)
+ if err != nil {
+ return
+ }
+ auth := fmt.Sprintf("SLS %v:%v", project.AccessKeyID, digest)
+ headers["Authorization"] = auth
+
+ // Initialize http request
+ reader := bytes.NewReader(body)
+ urlStr := fmt.Sprintf("http://%v.%v%v", project.Name, project.Endpoint, uri)
+ req, err := http.NewRequest(method, urlStr, reader)
+ if err != nil {
+ return
+ }
+ for k, v := range headers {
+ req.Header.Add(k, v)
+ }
+
+ // Get ready to do request
+ resp, err = http.DefaultClient.Do(req)
+ if err != nil {
+ return
+ }
+
+ return
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/alils/signature.go b/src/vendor/github.com/astaxie/beego/logs/alils/signature.go
new file mode 100755
index 000000000..2d6113076
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/alils/signature.go
@@ -0,0 +1,111 @@
+package alils
+
+import (
+ "crypto/hmac"
+ "crypto/sha1"
+ "encoding/base64"
+ "fmt"
+ "net/url"
+ "sort"
+ "strings"
+ "time"
+)
+
+// GMT location
+var gmtLoc = time.FixedZone("GMT", 0)
+
+// NowRFC1123 returns now time in RFC1123 format with GMT timezone,
+// eg. "Mon, 02 Jan 2006 15:04:05 GMT".
+func nowRFC1123() string {
+ return time.Now().In(gmtLoc).Format(time.RFC1123)
+}
+
+// signature calculates a request's signature digest.
+func signature(project *LogProject, method, uri string,
+ headers map[string]string) (digest string, err error) {
+ var contentMD5, contentType, date, canoHeaders, canoResource string
+ var slsHeaderKeys sort.StringSlice
+
+ // SignString = VERB + "\n"
+ // + CONTENT-MD5 + "\n"
+ // + CONTENT-TYPE + "\n"
+ // + DATE + "\n"
+ // + CanonicalizedSLSHeaders + "\n"
+ // + CanonicalizedResource
+
+ if val, ok := headers["Content-MD5"]; ok {
+ contentMD5 = val
+ }
+
+ if val, ok := headers["Content-Type"]; ok {
+ contentType = val
+ }
+
+ date, ok := headers["Date"]
+ if !ok {
+ err = fmt.Errorf("Can't find 'Date' header")
+ return
+ }
+
+ // Calc CanonicalizedSLSHeaders
+ slsHeaders := make(map[string]string, len(headers))
+ for k, v := range headers {
+ l := strings.TrimSpace(strings.ToLower(k))
+ if strings.HasPrefix(l, "x-sls-") {
+ slsHeaders[l] = strings.TrimSpace(v)
+ slsHeaderKeys = append(slsHeaderKeys, l)
+ }
+ }
+
+ sort.Sort(slsHeaderKeys)
+ for i, k := range slsHeaderKeys {
+ canoHeaders += k + ":" + slsHeaders[k]
+ if i+1 < len(slsHeaderKeys) {
+ canoHeaders += "\n"
+ }
+ }
+
+ // Calc CanonicalizedResource
+ u, err := url.Parse(uri)
+ if err != nil {
+ return
+ }
+
+ canoResource += url.QueryEscape(u.Path)
+ if u.RawQuery != "" {
+ var keys sort.StringSlice
+
+ vals := u.Query()
+ for k := range vals {
+ keys = append(keys, k)
+ }
+
+ sort.Sort(keys)
+ canoResource += "?"
+ for i, k := range keys {
+ if i > 0 {
+ canoResource += "&"
+ }
+
+ for _, v := range vals[k] {
+ canoResource += k + "=" + v
+ }
+ }
+ }
+
+ signStr := method + "\n" +
+ contentMD5 + "\n" +
+ contentType + "\n" +
+ date + "\n" +
+ canoHeaders + "\n" +
+ canoResource
+
+ // Signature = base64(hmac-sha1(UTF8-Encoding-Of(SignString),AccessKeySecret))
+ mac := hmac.New(sha1.New, []byte(project.AccessKeySecret))
+ _, err = mac.Write([]byte(signStr))
+ if err != nil {
+ return
+ }
+ digest = base64.StdEncoding.EncodeToString(mac.Sum(nil))
+ return
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/color.go b/src/vendor/github.com/astaxie/beego/logs/color.go
new file mode 100644
index 000000000..41d23638a
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/color.go
@@ -0,0 +1,28 @@
+// Copyright 2014 beego Author. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build !windows
+
+package logs
+
+import "io"
+
+type ansiColorWriter struct {
+ w io.Writer
+ mode outputMode
+}
+
+func (cw *ansiColorWriter) Write(p []byte) (int, error) {
+ return cw.w.Write(p)
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/color_windows.go b/src/vendor/github.com/astaxie/beego/logs/color_windows.go
new file mode 100644
index 000000000..4e28f1888
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/color_windows.go
@@ -0,0 +1,428 @@
+// Copyright 2014 beego Author. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build windows
+
+package logs
+
+import (
+ "bytes"
+ "io"
+ "strings"
+ "syscall"
+ "unsafe"
+)
+
+type (
+ csiState int
+ parseResult int
+)
+
+const (
+ outsideCsiCode csiState = iota
+ firstCsiCode
+ secondCsiCode
+)
+
+const (
+ noConsole parseResult = iota
+ changedColor
+ unknown
+)
+
+type ansiColorWriter struct {
+ w io.Writer
+ mode outputMode
+ state csiState
+ paramStartBuf bytes.Buffer
+ paramBuf bytes.Buffer
+}
+
+const (
+ firstCsiChar byte = '\x1b'
+ secondeCsiChar byte = '['
+ separatorChar byte = ';'
+ sgrCode byte = 'm'
+)
+
+const (
+ foregroundBlue = uint16(0x0001)
+ foregroundGreen = uint16(0x0002)
+ foregroundRed = uint16(0x0004)
+ foregroundIntensity = uint16(0x0008)
+ backgroundBlue = uint16(0x0010)
+ backgroundGreen = uint16(0x0020)
+ backgroundRed = uint16(0x0040)
+ backgroundIntensity = uint16(0x0080)
+ underscore = uint16(0x8000)
+
+ foregroundMask = foregroundBlue | foregroundGreen | foregroundRed | foregroundIntensity
+ backgroundMask = backgroundBlue | backgroundGreen | backgroundRed | backgroundIntensity
+)
+
+const (
+ ansiReset = "0"
+ ansiIntensityOn = "1"
+ ansiIntensityOff = "21"
+ ansiUnderlineOn = "4"
+ ansiUnderlineOff = "24"
+ ansiBlinkOn = "5"
+ ansiBlinkOff = "25"
+
+ ansiForegroundBlack = "30"
+ ansiForegroundRed = "31"
+ ansiForegroundGreen = "32"
+ ansiForegroundYellow = "33"
+ ansiForegroundBlue = "34"
+ ansiForegroundMagenta = "35"
+ ansiForegroundCyan = "36"
+ ansiForegroundWhite = "37"
+ ansiForegroundDefault = "39"
+
+ ansiBackgroundBlack = "40"
+ ansiBackgroundRed = "41"
+ ansiBackgroundGreen = "42"
+ ansiBackgroundYellow = "43"
+ ansiBackgroundBlue = "44"
+ ansiBackgroundMagenta = "45"
+ ansiBackgroundCyan = "46"
+ ansiBackgroundWhite = "47"
+ ansiBackgroundDefault = "49"
+
+ ansiLightForegroundGray = "90"
+ ansiLightForegroundRed = "91"
+ ansiLightForegroundGreen = "92"
+ ansiLightForegroundYellow = "93"
+ ansiLightForegroundBlue = "94"
+ ansiLightForegroundMagenta = "95"
+ ansiLightForegroundCyan = "96"
+ ansiLightForegroundWhite = "97"
+
+ ansiLightBackgroundGray = "100"
+ ansiLightBackgroundRed = "101"
+ ansiLightBackgroundGreen = "102"
+ ansiLightBackgroundYellow = "103"
+ ansiLightBackgroundBlue = "104"
+ ansiLightBackgroundMagenta = "105"
+ ansiLightBackgroundCyan = "106"
+ ansiLightBackgroundWhite = "107"
+)
+
+type drawType int
+
+const (
+ foreground drawType = iota
+ background
+)
+
+type winColor struct {
+ code uint16
+ drawType drawType
+}
+
+var colorMap = map[string]winColor{
+ ansiForegroundBlack: {0, foreground},
+ ansiForegroundRed: {foregroundRed, foreground},
+ ansiForegroundGreen: {foregroundGreen, foreground},
+ ansiForegroundYellow: {foregroundRed | foregroundGreen, foreground},
+ ansiForegroundBlue: {foregroundBlue, foreground},
+ ansiForegroundMagenta: {foregroundRed | foregroundBlue, foreground},
+ ansiForegroundCyan: {foregroundGreen | foregroundBlue, foreground},
+ ansiForegroundWhite: {foregroundRed | foregroundGreen | foregroundBlue, foreground},
+ ansiForegroundDefault: {foregroundRed | foregroundGreen | foregroundBlue, foreground},
+
+ ansiBackgroundBlack: {0, background},
+ ansiBackgroundRed: {backgroundRed, background},
+ ansiBackgroundGreen: {backgroundGreen, background},
+ ansiBackgroundYellow: {backgroundRed | backgroundGreen, background},
+ ansiBackgroundBlue: {backgroundBlue, background},
+ ansiBackgroundMagenta: {backgroundRed | backgroundBlue, background},
+ ansiBackgroundCyan: {backgroundGreen | backgroundBlue, background},
+ ansiBackgroundWhite: {backgroundRed | backgroundGreen | backgroundBlue, background},
+ ansiBackgroundDefault: {0, background},
+
+ ansiLightForegroundGray: {foregroundIntensity, foreground},
+ ansiLightForegroundRed: {foregroundIntensity | foregroundRed, foreground},
+ ansiLightForegroundGreen: {foregroundIntensity | foregroundGreen, foreground},
+ ansiLightForegroundYellow: {foregroundIntensity | foregroundRed | foregroundGreen, foreground},
+ ansiLightForegroundBlue: {foregroundIntensity | foregroundBlue, foreground},
+ ansiLightForegroundMagenta: {foregroundIntensity | foregroundRed | foregroundBlue, foreground},
+ ansiLightForegroundCyan: {foregroundIntensity | foregroundGreen | foregroundBlue, foreground},
+ ansiLightForegroundWhite: {foregroundIntensity | foregroundRed | foregroundGreen | foregroundBlue, foreground},
+
+ ansiLightBackgroundGray: {backgroundIntensity, background},
+ ansiLightBackgroundRed: {backgroundIntensity | backgroundRed, background},
+ ansiLightBackgroundGreen: {backgroundIntensity | backgroundGreen, background},
+ ansiLightBackgroundYellow: {backgroundIntensity | backgroundRed | backgroundGreen, background},
+ ansiLightBackgroundBlue: {backgroundIntensity | backgroundBlue, background},
+ ansiLightBackgroundMagenta: {backgroundIntensity | backgroundRed | backgroundBlue, background},
+ ansiLightBackgroundCyan: {backgroundIntensity | backgroundGreen | backgroundBlue, background},
+ ansiLightBackgroundWhite: {backgroundIntensity | backgroundRed | backgroundGreen | backgroundBlue, background},
+}
+
+var (
+ kernel32 = syscall.NewLazyDLL("kernel32.dll")
+ procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
+ procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
+ defaultAttr *textAttributes
+)
+
+func init() {
+ screenInfo := getConsoleScreenBufferInfo(uintptr(syscall.Stdout))
+ if screenInfo != nil {
+ colorMap[ansiForegroundDefault] = winColor{
+ screenInfo.WAttributes & (foregroundRed | foregroundGreen | foregroundBlue),
+ foreground,
+ }
+ colorMap[ansiBackgroundDefault] = winColor{
+ screenInfo.WAttributes & (backgroundRed | backgroundGreen | backgroundBlue),
+ background,
+ }
+ defaultAttr = convertTextAttr(screenInfo.WAttributes)
+ }
+}
+
+type coord struct {
+ X, Y int16
+}
+
+type smallRect struct {
+ Left, Top, Right, Bottom int16
+}
+
+type consoleScreenBufferInfo struct {
+ DwSize coord
+ DwCursorPosition coord
+ WAttributes uint16
+ SrWindow smallRect
+ DwMaximumWindowSize coord
+}
+
+func getConsoleScreenBufferInfo(hConsoleOutput uintptr) *consoleScreenBufferInfo {
+ var csbi consoleScreenBufferInfo
+ ret, _, _ := procGetConsoleScreenBufferInfo.Call(
+ hConsoleOutput,
+ uintptr(unsafe.Pointer(&csbi)))
+ if ret == 0 {
+ return nil
+ }
+ return &csbi
+}
+
+func setConsoleTextAttribute(hConsoleOutput uintptr, wAttributes uint16) bool {
+ ret, _, _ := procSetConsoleTextAttribute.Call(
+ hConsoleOutput,
+ uintptr(wAttributes))
+ return ret != 0
+}
+
+type textAttributes struct {
+ foregroundColor uint16
+ backgroundColor uint16
+ foregroundIntensity uint16
+ backgroundIntensity uint16
+ underscore uint16
+ otherAttributes uint16
+}
+
+func convertTextAttr(winAttr uint16) *textAttributes {
+ fgColor := winAttr & (foregroundRed | foregroundGreen | foregroundBlue)
+ bgColor := winAttr & (backgroundRed | backgroundGreen | backgroundBlue)
+ fgIntensity := winAttr & foregroundIntensity
+ bgIntensity := winAttr & backgroundIntensity
+ underline := winAttr & underscore
+ otherAttributes := winAttr &^ (foregroundMask | backgroundMask | underscore)
+ return &textAttributes{fgColor, bgColor, fgIntensity, bgIntensity, underline, otherAttributes}
+}
+
+func convertWinAttr(textAttr *textAttributes) uint16 {
+ var winAttr uint16
+ winAttr |= textAttr.foregroundColor
+ winAttr |= textAttr.backgroundColor
+ winAttr |= textAttr.foregroundIntensity
+ winAttr |= textAttr.backgroundIntensity
+ winAttr |= textAttr.underscore
+ winAttr |= textAttr.otherAttributes
+ return winAttr
+}
+
+func changeColor(param []byte) parseResult {
+ screenInfo := getConsoleScreenBufferInfo(uintptr(syscall.Stdout))
+ if screenInfo == nil {
+ return noConsole
+ }
+
+ winAttr := convertTextAttr(screenInfo.WAttributes)
+ strParam := string(param)
+ if len(strParam) <= 0 {
+ strParam = "0"
+ }
+ csiParam := strings.Split(strParam, string(separatorChar))
+ for _, p := range csiParam {
+ c, ok := colorMap[p]
+ switch {
+ case !ok:
+ switch p {
+ case ansiReset:
+ winAttr.foregroundColor = defaultAttr.foregroundColor
+ winAttr.backgroundColor = defaultAttr.backgroundColor
+ winAttr.foregroundIntensity = defaultAttr.foregroundIntensity
+ winAttr.backgroundIntensity = defaultAttr.backgroundIntensity
+ winAttr.underscore = 0
+ winAttr.otherAttributes = 0
+ case ansiIntensityOn:
+ winAttr.foregroundIntensity = foregroundIntensity
+ case ansiIntensityOff:
+ winAttr.foregroundIntensity = 0
+ case ansiUnderlineOn:
+ winAttr.underscore = underscore
+ case ansiUnderlineOff:
+ winAttr.underscore = 0
+ case ansiBlinkOn:
+ winAttr.backgroundIntensity = backgroundIntensity
+ case ansiBlinkOff:
+ winAttr.backgroundIntensity = 0
+ default:
+ // unknown code
+ }
+ case c.drawType == foreground:
+ winAttr.foregroundColor = c.code
+ case c.drawType == background:
+ winAttr.backgroundColor = c.code
+ }
+ }
+ winTextAttribute := convertWinAttr(winAttr)
+ setConsoleTextAttribute(uintptr(syscall.Stdout), winTextAttribute)
+
+ return changedColor
+}
+
+func parseEscapeSequence(command byte, param []byte) parseResult {
+ if defaultAttr == nil {
+ return noConsole
+ }
+
+ switch command {
+ case sgrCode:
+ return changeColor(param)
+ default:
+ return unknown
+ }
+}
+
+func (cw *ansiColorWriter) flushBuffer() (int, error) {
+ return cw.flushTo(cw.w)
+}
+
+func (cw *ansiColorWriter) resetBuffer() (int, error) {
+ return cw.flushTo(nil)
+}
+
+func (cw *ansiColorWriter) flushTo(w io.Writer) (int, error) {
+ var n1, n2 int
+ var err error
+
+ startBytes := cw.paramStartBuf.Bytes()
+ cw.paramStartBuf.Reset()
+ if w != nil {
+ n1, err = cw.w.Write(startBytes)
+ if err != nil {
+ return n1, err
+ }
+ } else {
+ n1 = len(startBytes)
+ }
+ paramBytes := cw.paramBuf.Bytes()
+ cw.paramBuf.Reset()
+ if w != nil {
+ n2, err = cw.w.Write(paramBytes)
+ if err != nil {
+ return n1 + n2, err
+ }
+ } else {
+ n2 = len(paramBytes)
+ }
+ return n1 + n2, nil
+}
+
+func isParameterChar(b byte) bool {
+ return ('0' <= b && b <= '9') || b == separatorChar
+}
+
+func (cw *ansiColorWriter) Write(p []byte) (int, error) {
+ var r, nw, first, last int
+ if cw.mode != DiscardNonColorEscSeq {
+ cw.state = outsideCsiCode
+ cw.resetBuffer()
+ }
+
+ var err error
+ for i, ch := range p {
+ switch cw.state {
+ case outsideCsiCode:
+ if ch == firstCsiChar {
+ cw.paramStartBuf.WriteByte(ch)
+ cw.state = firstCsiCode
+ }
+ case firstCsiCode:
+ switch ch {
+ case firstCsiChar:
+ cw.paramStartBuf.WriteByte(ch)
+ break
+ case secondeCsiChar:
+ cw.paramStartBuf.WriteByte(ch)
+ cw.state = secondCsiCode
+ last = i - 1
+ default:
+ cw.resetBuffer()
+ cw.state = outsideCsiCode
+ }
+ case secondCsiCode:
+ if isParameterChar(ch) {
+ cw.paramBuf.WriteByte(ch)
+ } else {
+ nw, err = cw.w.Write(p[first:last])
+ r += nw
+ if err != nil {
+ return r, err
+ }
+ first = i + 1
+ result := parseEscapeSequence(ch, cw.paramBuf.Bytes())
+ if result == noConsole || (cw.mode == OutputNonColorEscSeq && result == unknown) {
+ cw.paramBuf.WriteByte(ch)
+ nw, err := cw.flushBuffer()
+ if err != nil {
+ return r, err
+ }
+ r += nw
+ } else {
+ n, _ := cw.resetBuffer()
+ // Add one more to the size of the buffer for the last ch
+ r += n + 1
+ }
+
+ cw.state = outsideCsiCode
+ }
+ default:
+ cw.state = outsideCsiCode
+ }
+ }
+
+ if cw.mode != DiscardNonColorEscSeq || cw.state == outsideCsiCode {
+ nw, err = cw.w.Write(p[first:])
+ r += nw
+ }
+
+ return r, err
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/color_windows_test.go b/src/vendor/github.com/astaxie/beego/logs/color_windows_test.go
new file mode 100644
index 000000000..5074841ac
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/color_windows_test.go
@@ -0,0 +1,294 @@
+// Copyright 2014 beego Author. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build windows
+
+package logs
+
+import (
+ "bytes"
+ "fmt"
+ "syscall"
+ "testing"
+)
+
+var GetConsoleScreenBufferInfo = getConsoleScreenBufferInfo
+
+func ChangeColor(color uint16) {
+ setConsoleTextAttribute(uintptr(syscall.Stdout), color)
+}
+
+func ResetColor() {
+ ChangeColor(uint16(0x0007))
+}
+
+func TestWritePlanText(t *testing.T) {
+ inner := bytes.NewBufferString("")
+ w := NewAnsiColorWriter(inner)
+ expected := "plain text"
+ fmt.Fprintf(w, expected)
+ actual := inner.String()
+ if actual != expected {
+ t.Errorf("Get %q, want %q", actual, expected)
+ }
+}
+
+func TestWriteParseText(t *testing.T) {
+ inner := bytes.NewBufferString("")
+ w := NewAnsiColorWriter(inner)
+
+ inputTail := "\x1b[0mtail text"
+ expectedTail := "tail text"
+ fmt.Fprintf(w, inputTail)
+ actualTail := inner.String()
+ inner.Reset()
+ if actualTail != expectedTail {
+ t.Errorf("Get %q, want %q", actualTail, expectedTail)
+ }
+
+ inputHead := "head text\x1b[0m"
+ expectedHead := "head text"
+ fmt.Fprintf(w, inputHead)
+ actualHead := inner.String()
+ inner.Reset()
+ if actualHead != expectedHead {
+ t.Errorf("Get %q, want %q", actualHead, expectedHead)
+ }
+
+ inputBothEnds := "both ends \x1b[0m text"
+ expectedBothEnds := "both ends text"
+ fmt.Fprintf(w, inputBothEnds)
+ actualBothEnds := inner.String()
+ inner.Reset()
+ if actualBothEnds != expectedBothEnds {
+ t.Errorf("Get %q, want %q", actualBothEnds, expectedBothEnds)
+ }
+
+ inputManyEsc := "\x1b\x1b\x1b\x1b[0m many esc"
+ expectedManyEsc := "\x1b\x1b\x1b many esc"
+ fmt.Fprintf(w, inputManyEsc)
+ actualManyEsc := inner.String()
+ inner.Reset()
+ if actualManyEsc != expectedManyEsc {
+ t.Errorf("Get %q, want %q", actualManyEsc, expectedManyEsc)
+ }
+
+ expectedSplit := "split text"
+ for _, ch := range "split \x1b[0m text" {
+ fmt.Fprintf(w, string(ch))
+ }
+ actualSplit := inner.String()
+ inner.Reset()
+ if actualSplit != expectedSplit {
+ t.Errorf("Get %q, want %q", actualSplit, expectedSplit)
+ }
+}
+
+type screenNotFoundError struct {
+ error
+}
+
+func writeAnsiColor(expectedText, colorCode string) (actualText string, actualAttributes uint16, err error) {
+ inner := bytes.NewBufferString("")
+ w := NewAnsiColorWriter(inner)
+ fmt.Fprintf(w, "\x1b[%sm%s", colorCode, expectedText)
+
+ actualText = inner.String()
+ screenInfo := GetConsoleScreenBufferInfo(uintptr(syscall.Stdout))
+ if screenInfo != nil {
+ actualAttributes = screenInfo.WAttributes
+ } else {
+ err = &screenNotFoundError{}
+ }
+ return
+}
+
+type testParam struct {
+ text string
+ attributes uint16
+ ansiColor string
+}
+
+func TestWriteAnsiColorText(t *testing.T) {
+ screenInfo := GetConsoleScreenBufferInfo(uintptr(syscall.Stdout))
+ if screenInfo == nil {
+ t.Fatal("Could not get ConsoleScreenBufferInfo")
+ }
+ defer ChangeColor(screenInfo.WAttributes)
+ defaultFgColor := screenInfo.WAttributes & uint16(0x0007)
+ defaultBgColor := screenInfo.WAttributes & uint16(0x0070)
+ defaultFgIntensity := screenInfo.WAttributes & uint16(0x0008)
+ defaultBgIntensity := screenInfo.WAttributes & uint16(0x0080)
+
+ fgParam := []testParam{
+ {"foreground black ", uint16(0x0000 | 0x0000), "30"},
+ {"foreground red ", uint16(0x0004 | 0x0000), "31"},
+ {"foreground green ", uint16(0x0002 | 0x0000), "32"},
+ {"foreground yellow ", uint16(0x0006 | 0x0000), "33"},
+ {"foreground blue ", uint16(0x0001 | 0x0000), "34"},
+ {"foreground magenta", uint16(0x0005 | 0x0000), "35"},
+ {"foreground cyan ", uint16(0x0003 | 0x0000), "36"},
+ {"foreground white ", uint16(0x0007 | 0x0000), "37"},
+ {"foreground default", defaultFgColor | 0x0000, "39"},
+ {"foreground light gray ", uint16(0x0000 | 0x0008 | 0x0000), "90"},
+ {"foreground light red ", uint16(0x0004 | 0x0008 | 0x0000), "91"},
+ {"foreground light green ", uint16(0x0002 | 0x0008 | 0x0000), "92"},
+ {"foreground light yellow ", uint16(0x0006 | 0x0008 | 0x0000), "93"},
+ {"foreground light blue ", uint16(0x0001 | 0x0008 | 0x0000), "94"},
+ {"foreground light magenta", uint16(0x0005 | 0x0008 | 0x0000), "95"},
+ {"foreground light cyan ", uint16(0x0003 | 0x0008 | 0x0000), "96"},
+ {"foreground light white ", uint16(0x0007 | 0x0008 | 0x0000), "97"},
+ }
+
+ bgParam := []testParam{
+ {"background black ", uint16(0x0007 | 0x0000), "40"},
+ {"background red ", uint16(0x0007 | 0x0040), "41"},
+ {"background green ", uint16(0x0007 | 0x0020), "42"},
+ {"background yellow ", uint16(0x0007 | 0x0060), "43"},
+ {"background blue ", uint16(0x0007 | 0x0010), "44"},
+ {"background magenta", uint16(0x0007 | 0x0050), "45"},
+ {"background cyan ", uint16(0x0007 | 0x0030), "46"},
+ {"background white ", uint16(0x0007 | 0x0070), "47"},
+ {"background default", uint16(0x0007) | defaultBgColor, "49"},
+ {"background light gray ", uint16(0x0007 | 0x0000 | 0x0080), "100"},
+ {"background light red ", uint16(0x0007 | 0x0040 | 0x0080), "101"},
+ {"background light green ", uint16(0x0007 | 0x0020 | 0x0080), "102"},
+ {"background light yellow ", uint16(0x0007 | 0x0060 | 0x0080), "103"},
+ {"background light blue ", uint16(0x0007 | 0x0010 | 0x0080), "104"},
+ {"background light magenta", uint16(0x0007 | 0x0050 | 0x0080), "105"},
+ {"background light cyan ", uint16(0x0007 | 0x0030 | 0x0080), "106"},
+ {"background light white ", uint16(0x0007 | 0x0070 | 0x0080), "107"},
+ }
+
+ resetParam := []testParam{
+ {"all reset", defaultFgColor | defaultBgColor | defaultFgIntensity | defaultBgIntensity, "0"},
+ {"all reset", defaultFgColor | defaultBgColor | defaultFgIntensity | defaultBgIntensity, ""},
+ }
+
+ boldParam := []testParam{
+ {"bold on", uint16(0x0007 | 0x0008), "1"},
+ {"bold off", uint16(0x0007), "21"},
+ }
+
+ underscoreParam := []testParam{
+ {"underscore on", uint16(0x0007 | 0x8000), "4"},
+ {"underscore off", uint16(0x0007), "24"},
+ }
+
+ blinkParam := []testParam{
+ {"blink on", uint16(0x0007 | 0x0080), "5"},
+ {"blink off", uint16(0x0007), "25"},
+ }
+
+ mixedParam := []testParam{
+ {"both black, bold, underline, blink", uint16(0x0000 | 0x0000 | 0x0008 | 0x8000 | 0x0080), "30;40;1;4;5"},
+ {"both red, bold, underline, blink", uint16(0x0004 | 0x0040 | 0x0008 | 0x8000 | 0x0080), "31;41;1;4;5"},
+ {"both green, bold, underline, blink", uint16(0x0002 | 0x0020 | 0x0008 | 0x8000 | 0x0080), "32;42;1;4;5"},
+ {"both yellow, bold, underline, blink", uint16(0x0006 | 0x0060 | 0x0008 | 0x8000 | 0x0080), "33;43;1;4;5"},
+ {"both blue, bold, underline, blink", uint16(0x0001 | 0x0010 | 0x0008 | 0x8000 | 0x0080), "34;44;1;4;5"},
+ {"both magenta, bold, underline, blink", uint16(0x0005 | 0x0050 | 0x0008 | 0x8000 | 0x0080), "35;45;1;4;5"},
+ {"both cyan, bold, underline, blink", uint16(0x0003 | 0x0030 | 0x0008 | 0x8000 | 0x0080), "36;46;1;4;5"},
+ {"both white, bold, underline, blink", uint16(0x0007 | 0x0070 | 0x0008 | 0x8000 | 0x0080), "37;47;1;4;5"},
+ {"both default, bold, underline, blink", uint16(defaultFgColor | defaultBgColor | 0x0008 | 0x8000 | 0x0080), "39;49;1;4;5"},
+ }
+
+ assertTextAttribute := func(expectedText string, expectedAttributes uint16, ansiColor string) {
+ actualText, actualAttributes, err := writeAnsiColor(expectedText, ansiColor)
+ if actualText != expectedText {
+ t.Errorf("Get %q, want %q", actualText, expectedText)
+ }
+ if err != nil {
+ t.Fatal("Could not get ConsoleScreenBufferInfo")
+ }
+ if actualAttributes != expectedAttributes {
+ t.Errorf("Text: %q, Get 0x%04x, want 0x%04x", expectedText, actualAttributes, expectedAttributes)
+ }
+ }
+
+ for _, v := range fgParam {
+ ResetColor()
+ assertTextAttribute(v.text, v.attributes, v.ansiColor)
+ }
+
+ for _, v := range bgParam {
+ ChangeColor(uint16(0x0070 | 0x0007))
+ assertTextAttribute(v.text, v.attributes, v.ansiColor)
+ }
+
+ for _, v := range resetParam {
+ ChangeColor(uint16(0x0000 | 0x0070 | 0x0008))
+ assertTextAttribute(v.text, v.attributes, v.ansiColor)
+ }
+
+ ResetColor()
+ for _, v := range boldParam {
+ assertTextAttribute(v.text, v.attributes, v.ansiColor)
+ }
+
+ ResetColor()
+ for _, v := range underscoreParam {
+ assertTextAttribute(v.text, v.attributes, v.ansiColor)
+ }
+
+ ResetColor()
+ for _, v := range blinkParam {
+ assertTextAttribute(v.text, v.attributes, v.ansiColor)
+ }
+
+ for _, v := range mixedParam {
+ ResetColor()
+ assertTextAttribute(v.text, v.attributes, v.ansiColor)
+ }
+}
+
+func TestIgnoreUnknownSequences(t *testing.T) {
+ inner := bytes.NewBufferString("")
+ w := NewModeAnsiColorWriter(inner, OutputNonColorEscSeq)
+
+ inputText := "\x1b[=decpath mode"
+ expectedTail := inputText
+ fmt.Fprintf(w, inputText)
+ actualTail := inner.String()
+ inner.Reset()
+ if actualTail != expectedTail {
+ t.Errorf("Get %q, want %q", actualTail, expectedTail)
+ }
+
+ inputText = "\x1b[=tailing esc and bracket\x1b["
+ expectedTail = inputText
+ fmt.Fprintf(w, inputText)
+ actualTail = inner.String()
+ inner.Reset()
+ if actualTail != expectedTail {
+ t.Errorf("Get %q, want %q", actualTail, expectedTail)
+ }
+
+ inputText = "\x1b[?tailing esc\x1b"
+ expectedTail = inputText
+ fmt.Fprintf(w, inputText)
+ actualTail = inner.String()
+ inner.Reset()
+ if actualTail != expectedTail {
+ t.Errorf("Get %q, want %q", actualTail, expectedTail)
+ }
+
+ inputText = "\x1b[1h;3punended color code invalid\x1b3"
+ expectedTail = inputText
+ fmt.Fprintf(w, inputText)
+ actualTail = inner.String()
+ inner.Reset()
+ if actualTail != expectedTail {
+ t.Errorf("Get %q, want %q", actualTail, expectedTail)
+ }
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/conn.go b/src/vendor/github.com/astaxie/beego/logs/conn.go
index 1db1a427c..6d5bf6bfc 100644
--- a/src/vendor/github.com/astaxie/beego/logs/conn.go
+++ b/src/vendor/github.com/astaxie/beego/logs/conn.go
@@ -113,5 +113,5 @@ func (c *connWriter) needToConnectOnMsg() bool {
}
func init() {
- Register("conn", NewConn)
+ Register(AdapterConn, NewConn)
}
diff --git a/src/vendor/github.com/astaxie/beego/logs/console.go b/src/vendor/github.com/astaxie/beego/logs/console.go
index 05d08a42e..e75f2a1b1 100644
--- a/src/vendor/github.com/astaxie/beego/logs/console.go
+++ b/src/vendor/github.com/astaxie/beego/logs/console.go
@@ -41,7 +41,7 @@ var colors = []brush{
newBrush("1;33"), // Warning yellow
newBrush("1;32"), // Notice green
newBrush("1;34"), // Informational blue
- newBrush("1;34"), // Debug blue
+ newBrush("1;44"), // Debug Background blue
}
// consoleWriter implements LoggerInterface and writes messages to terminal.
@@ -56,7 +56,7 @@ func NewConsole() Logger {
cw := &consoleWriter{
lg: newLogWriter(os.Stdout),
Level: LevelDebug,
- Colorful: true,
+ Colorful: runtime.GOOS != "windows",
}
return cw
}
@@ -97,5 +97,5 @@ func (c *consoleWriter) Flush() {
}
func init() {
- Register("console", NewConsole)
+ Register(AdapterConsole, NewConsole)
}
diff --git a/src/vendor/github.com/astaxie/beego/logs/es/es.go b/src/vendor/github.com/astaxie/beego/logs/es/es.go
index 397ca2eff..22f4f650d 100644
--- a/src/vendor/github.com/astaxie/beego/logs/es/es.go
+++ b/src/vendor/github.com/astaxie/beego/logs/es/es.go
@@ -76,5 +76,5 @@ func (el *esLogger) Flush() {
}
func init() {
- logs.Register("es", NewES)
+ logs.Register(logs.AdapterEs, NewES)
}
diff --git a/src/vendor/github.com/astaxie/beego/logs/file.go b/src/vendor/github.com/astaxie/beego/logs/file.go
index 9d3f78a05..e8c1f37e8 100644
--- a/src/vendor/github.com/astaxie/beego/logs/file.go
+++ b/src/vendor/github.com/astaxie/beego/logs/file.go
@@ -22,6 +22,7 @@ import (
"io"
"os"
"path/filepath"
+ "strconv"
"strings"
"sync"
"time"
@@ -30,7 +31,7 @@ import (
// fileLogWriter implements LoggerInterface.
// It writes messages by lines limit, file size limit, or time frequency.
type fileLogWriter struct {
- sync.Mutex // write log order by order and atomic incr maxLinesCurLines and maxSizeCurSize
+ sync.RWMutex // write log order by order and atomic incr maxLinesCurLines and maxSizeCurSize
// The opened file
Filename string `json:"filename"`
fileWriter *os.File
@@ -47,12 +48,15 @@ type fileLogWriter struct {
Daily bool `json:"daily"`
MaxDays int64 `json:"maxdays"`
dailyOpenDate int
+ dailyOpenTime time.Time
Rotate bool `json:"rotate"`
Level int `json:"level"`
- Perm os.FileMode `json:"perm"`
+ Perm string `json:"perm"`
+
+ RotatePerm string `json:"rotateperm"`
fileNameOnly, suffix string // like "project.log", project is fileNameOnly and .log is suffix
}
@@ -60,14 +64,12 @@ type fileLogWriter struct {
// newFileWriter create a FileLogWriter returning as LoggerInterface.
func newFileWriter() Logger {
w := &fileLogWriter{
- Filename: "",
- MaxLines: 1000000,
- MaxSize: 1 << 28, //256 MB
- Daily: true,
- MaxDays: 7,
- Rotate: true,
- Level: LevelTrace,
- Perm: 0660,
+ Daily: true,
+ MaxDays: 7,
+ Rotate: true,
+ RotatePerm: "0440",
+ Level: LevelTrace,
+ Perm: "0660",
}
return w
}
@@ -77,11 +79,11 @@ func newFileWriter() Logger {
// {
// "filename":"logs/beego.log",
// "maxLines":10000,
-// "maxsize":1<<30,
+// "maxsize":1024,
// "daily":true,
// "maxDays":15,
// "rotate":true,
-// "perm":0600
+// "perm":"0600"
// }
func (w *fileLogWriter) Init(jsonConfig string) error {
err := json.Unmarshal([]byte(jsonConfig), w)
@@ -128,7 +130,9 @@ func (w *fileLogWriter) WriteMsg(when time.Time, msg string, level int) error {
h, d := formatTimeHeader(when)
msg = string(h) + msg + "\n"
if w.Rotate {
+ w.RLock()
if w.needRotate(len(msg), d) {
+ w.RUnlock()
w.Lock()
if w.needRotate(len(msg), d) {
if err := w.doRotate(when); err != nil {
@@ -136,6 +140,8 @@ func (w *fileLogWriter) WriteMsg(when time.Time, msg string, level int) error {
}
}
w.Unlock()
+ } else {
+ w.RUnlock()
}
}
@@ -151,7 +157,15 @@ func (w *fileLogWriter) WriteMsg(when time.Time, msg string, level int) error {
func (w *fileLogWriter) createLogFile() (*os.File, error) {
// Open the log file
- fd, err := os.OpenFile(w.Filename, os.O_WRONLY|os.O_APPEND|os.O_CREATE, w.Perm)
+ perm, err := strconv.ParseInt(w.Perm, 8, 64)
+ if err != nil {
+ return nil, err
+ }
+ fd, err := os.OpenFile(w.Filename, os.O_WRONLY|os.O_APPEND|os.O_CREATE, os.FileMode(perm))
+ if err == nil {
+ // Make sure file perm is user set perm cause of `os.OpenFile` will obey umask
+ os.Chmod(w.Filename, os.FileMode(perm))
+ }
return fd, err
}
@@ -159,11 +173,15 @@ func (w *fileLogWriter) initFd() error {
fd := w.fileWriter
fInfo, err := fd.Stat()
if err != nil {
- return fmt.Errorf("get stat err: %s\n", err)
+ return fmt.Errorf("get stat err: %s", err)
}
w.maxSizeCurSize = int(fInfo.Size())
- w.dailyOpenDate = time.Now().Day()
+ w.dailyOpenTime = time.Now()
+ w.dailyOpenDate = w.dailyOpenTime.Day()
w.maxLinesCurLines = 0
+ if w.Daily {
+ go w.dailyRotate(w.dailyOpenTime)
+ }
if fInfo.Size() > 0 {
count, err := w.lines()
if err != nil {
@@ -174,6 +192,20 @@ func (w *fileLogWriter) initFd() error {
return nil
}
+func (w *fileLogWriter) dailyRotate(openTime time.Time) {
+ y, m, d := openTime.Add(24 * time.Hour).Date()
+ nextDay := time.Date(y, m, d, 0, 0, 0, 0, openTime.Location())
+ tm := time.NewTimer(time.Duration(nextDay.UnixNano() - openTime.UnixNano() + 100))
+ <-tm.C
+ w.Lock()
+ if w.needRotate(0, time.Now().Day()) {
+ if err := w.doRotate(time.Now()); err != nil {
+ fmt.Fprintf(os.Stderr, "FileLogWriter(%q): %s\n", w.Filename, err)
+ }
+ }
+ w.Unlock()
+}
+
func (w *fileLogWriter) lines() (int, error) {
fd, err := os.Open(w.Filename)
if err != nil {
@@ -204,26 +236,37 @@ func (w *fileLogWriter) lines() (int, error) {
// DoRotate means it need to write file in new file.
// new file name like xx.2013-01-01.log (daily) or xx.001.log (by line or size)
func (w *fileLogWriter) doRotate(logTime time.Time) error {
- _, err := os.Lstat(w.Filename)
- if err != nil {
- return err
- }
// file exists
// Find the next available number
num := 1
fName := ""
+ rotatePerm, err := strconv.ParseInt(w.RotatePerm, 8, 64)
+ if err != nil {
+ return err
+ }
+
+ _, err = os.Lstat(w.Filename)
+ if err != nil {
+ //even if the file is not exist or other ,we should RESTART the logger
+ goto RESTART_LOGGER
+ }
+
if w.MaxLines > 0 || w.MaxSize > 0 {
for ; err == nil && num <= 999; num++ {
fName = w.fileNameOnly + fmt.Sprintf(".%s.%03d%s", logTime.Format("2006-01-02"), num, w.suffix)
_, err = os.Lstat(fName)
}
} else {
- fName = fmt.Sprintf("%s.%s%s", w.fileNameOnly, logTime.Format("2006-01-02"), w.suffix)
+ fName = fmt.Sprintf("%s.%s%s", w.fileNameOnly, w.dailyOpenTime.Format("2006-01-02"), w.suffix)
_, err = os.Lstat(fName)
+ for ; err == nil && num <= 999; num++ {
+ fName = w.fileNameOnly + fmt.Sprintf(".%s.%03d%s", w.dailyOpenTime.Format("2006-01-02"), num, w.suffix)
+ _, err = os.Lstat(fName)
+ }
}
// return error if the last file checked still existed
if err == nil {
- return fmt.Errorf("Rotate: Cannot find free log number to rename %s\n", w.Filename)
+ return fmt.Errorf("Rotate: Cannot find free log number to rename %s", w.Filename)
}
// close fileWriter before rename
@@ -231,19 +274,25 @@ func (w *fileLogWriter) doRotate(logTime time.Time) error {
// Rename the file to its new found name
// even if occurs error,we MUST guarantee to restart new logger
- renameErr := os.Rename(w.Filename, fName)
- // re-start logger
+ err = os.Rename(w.Filename, fName)
+ if err != nil {
+ goto RESTART_LOGGER
+ }
+
+ err = os.Chmod(fName, os.FileMode(rotatePerm))
+
+RESTART_LOGGER:
+
startLoggerErr := w.startLogger()
go w.deleteOldLog()
if startLoggerErr != nil {
- return fmt.Errorf("Rotate StartLogger: %s\n", startLoggerErr)
+ return fmt.Errorf("Rotate StartLogger: %s", startLoggerErr)
}
- if renameErr != nil {
- return fmt.Errorf("Rotate: %s\n", renameErr)
+ if err != nil {
+ return fmt.Errorf("Rotate: %s", err)
}
return nil
-
}
func (w *fileLogWriter) deleteOldLog() {
@@ -255,8 +304,12 @@ func (w *fileLogWriter) deleteOldLog() {
}
}()
- if !info.IsDir() && info.ModTime().Unix() < (time.Now().Unix()-60*60*24*w.MaxDays) {
- if strings.HasPrefix(filepath.Base(path), w.fileNameOnly) &&
+ if info == nil {
+ return
+ }
+
+ if !info.IsDir() && info.ModTime().Add(24*time.Hour*time.Duration(w.MaxDays)).Before(time.Now()) {
+ if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
strings.HasSuffix(filepath.Base(path), w.suffix) {
os.Remove(path)
}
@@ -278,5 +331,5 @@ func (w *fileLogWriter) Flush() {
}
func init() {
- Register("file", newFileWriter)
+ Register(AdapterFile, newFileWriter)
}
diff --git a/src/vendor/github.com/astaxie/beego/logs/file_test.go b/src/vendor/github.com/astaxie/beego/logs/file_test.go
index 1fa6cdaa4..626521b9d 100644
--- a/src/vendor/github.com/astaxie/beego/logs/file_test.go
+++ b/src/vendor/github.com/astaxie/beego/logs/file_test.go
@@ -17,12 +17,35 @@ package logs
import (
"bufio"
"fmt"
+ "io/ioutil"
"os"
"strconv"
"testing"
"time"
)
+func TestFilePerm(t *testing.T) {
+ log := NewLogger(10000)
+ // use 0666 as test perm cause the default umask is 022
+ log.SetLogger("file", `{"filename":"test.log", "perm": "0666"}`)
+ log.Debug("debug")
+ log.Informational("info")
+ log.Notice("notice")
+ log.Warning("warning")
+ log.Error("error")
+ log.Alert("alert")
+ log.Critical("critical")
+ log.Emergency("emergency")
+ file, err := os.Stat("test.log")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if file.Mode() != 0666 {
+ t.Fatal("unexpected log file permission")
+ }
+ os.Remove("test.log")
+}
+
func TestFile1(t *testing.T) {
log := NewLogger(10000)
log.SetLogger("file", `{"filename":"test.log"}`)
@@ -89,7 +112,7 @@ func TestFile2(t *testing.T) {
os.Remove("test2.log")
}
-func TestFileRotate(t *testing.T) {
+func TestFileRotate_01(t *testing.T) {
log := NewLogger(10000)
log.SetLogger("file", `{"filename":"test3.log","maxlines":4}`)
log.Debug("debug")
@@ -110,6 +133,112 @@ func TestFileRotate(t *testing.T) {
os.Remove("test3.log")
}
+func TestFileRotate_02(t *testing.T) {
+ fn1 := "rotate_day.log"
+ fn2 := "rotate_day." + time.Now().Add(-24*time.Hour).Format("2006-01-02") + ".log"
+ testFileRotate(t, fn1, fn2)
+}
+
+func TestFileRotate_03(t *testing.T) {
+ fn1 := "rotate_day.log"
+ fn := "rotate_day." + time.Now().Add(-24*time.Hour).Format("2006-01-02") + ".log"
+ os.Create(fn)
+ fn2 := "rotate_day." + time.Now().Add(-24*time.Hour).Format("2006-01-02") + ".001.log"
+ testFileRotate(t, fn1, fn2)
+ os.Remove(fn)
+}
+
+func TestFileRotate_04(t *testing.T) {
+ fn1 := "rotate_day.log"
+ fn2 := "rotate_day." + time.Now().Add(-24*time.Hour).Format("2006-01-02") + ".log"
+ testFileDailyRotate(t, fn1, fn2)
+}
+
+func TestFileRotate_05(t *testing.T) {
+ fn1 := "rotate_day.log"
+ fn := "rotate_day." + time.Now().Add(-24*time.Hour).Format("2006-01-02") + ".log"
+ os.Create(fn)
+ fn2 := "rotate_day." + time.Now().Add(-24*time.Hour).Format("2006-01-02") + ".001.log"
+ testFileDailyRotate(t, fn1, fn2)
+ os.Remove(fn)
+}
+func TestFileRotate_06(t *testing.T) { //test file mode
+ log := NewLogger(10000)
+ log.SetLogger("file", `{"filename":"test3.log","maxlines":4}`)
+ log.Debug("debug")
+ log.Info("info")
+ log.Notice("notice")
+ log.Warning("warning")
+ log.Error("error")
+ log.Alert("alert")
+ log.Critical("critical")
+ log.Emergency("emergency")
+ rotateName := "test3" + fmt.Sprintf(".%s.%03d", time.Now().Format("2006-01-02"), 1) + ".log"
+ s, _ := os.Lstat(rotateName)
+ if s.Mode() != 0440 {
+ os.Remove(rotateName)
+ os.Remove("test3.log")
+ t.Fatal("rotate file mode error")
+ }
+ os.Remove(rotateName)
+ os.Remove("test3.log")
+}
+func testFileRotate(t *testing.T, fn1, fn2 string) {
+ fw := &fileLogWriter{
+ Daily: true,
+ MaxDays: 7,
+ Rotate: true,
+ Level: LevelTrace,
+ Perm: "0660",
+ RotatePerm: "0440",
+ }
+ fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
+ fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
+ fw.dailyOpenDate = fw.dailyOpenTime.Day()
+ fw.WriteMsg(time.Now(), "this is a msg for test", LevelDebug)
+
+ for _, file := range []string{fn1, fn2} {
+ _, err := os.Stat(file)
+ if err != nil {
+ t.FailNow()
+ }
+ os.Remove(file)
+ }
+ fw.Destroy()
+}
+
+func testFileDailyRotate(t *testing.T, fn1, fn2 string) {
+ fw := &fileLogWriter{
+ Daily: true,
+ MaxDays: 7,
+ Rotate: true,
+ Level: LevelTrace,
+ Perm: "0660",
+ RotatePerm: "0440",
+ }
+ fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
+ fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
+ fw.dailyOpenDate = fw.dailyOpenTime.Day()
+ today, _ := time.ParseInLocation("2006-01-02", time.Now().Format("2006-01-02"), fw.dailyOpenTime.Location())
+ today = today.Add(-1 * time.Second)
+ fw.dailyRotate(today)
+ for _, file := range []string{fn1, fn2} {
+ _, err := os.Stat(file)
+ if err != nil {
+ t.FailNow()
+ }
+ content, err := ioutil.ReadFile(file)
+ if err != nil {
+ t.FailNow()
+ }
+ if len(content) > 0 {
+ t.FailNow()
+ }
+ os.Remove(file)
+ }
+ fw.Destroy()
+}
+
func exists(path string) (bool, error) {
_, err := os.Stat(path)
if err == nil {
diff --git a/src/vendor/github.com/astaxie/beego/logs/jianliao.go b/src/vendor/github.com/astaxie/beego/logs/jianliao.go
new file mode 100644
index 000000000..88ba0f9af
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/jianliao.go
@@ -0,0 +1,72 @@
+package logs
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "net/url"
+ "time"
+)
+
+// JLWriter implements beego LoggerInterface and is used to send jiaoliao webhook
+type JLWriter struct {
+ AuthorName string `json:"authorname"`
+ Title string `json:"title"`
+ WebhookURL string `json:"webhookurl"`
+ RedirectURL string `json:"redirecturl,omitempty"`
+ ImageURL string `json:"imageurl,omitempty"`
+ Level int `json:"level"`
+}
+
+// newJLWriter create jiaoliao writer.
+func newJLWriter() Logger {
+ return &JLWriter{Level: LevelTrace}
+}
+
+// Init JLWriter with json config string
+func (s *JLWriter) Init(jsonconfig string) error {
+ return json.Unmarshal([]byte(jsonconfig), s)
+}
+
+// WriteMsg write message in smtp writer.
+// it will send an email with subject and only this message.
+func (s *JLWriter) WriteMsg(when time.Time, msg string, level int) error {
+ if level > s.Level {
+ return nil
+ }
+
+ text := fmt.Sprintf("%s %s", when.Format("2006-01-02 15:04:05"), msg)
+
+ form := url.Values{}
+ form.Add("authorName", s.AuthorName)
+ form.Add("title", s.Title)
+ form.Add("text", text)
+ if s.RedirectURL != "" {
+ form.Add("redirectUrl", s.RedirectURL)
+ }
+ if s.ImageURL != "" {
+ form.Add("imageUrl", s.ImageURL)
+ }
+
+ resp, err := http.PostForm(s.WebhookURL, form)
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode != http.StatusOK {
+ return fmt.Errorf("Post webhook failed %s %d", resp.Status, resp.StatusCode)
+ }
+ return nil
+}
+
+// Flush implementing method. empty.
+func (s *JLWriter) Flush() {
+}
+
+// Destroy implementing method. empty.
+func (s *JLWriter) Destroy() {
+}
+
+func init() {
+ Register(AdapterJianLiao, newJLWriter)
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/log.go b/src/vendor/github.com/astaxie/beego/logs/log.go
index 3b3e22081..0e97a70e3 100644
--- a/src/vendor/github.com/astaxie/beego/logs/log.go
+++ b/src/vendor/github.com/astaxie/beego/logs/log.go
@@ -35,10 +35,12 @@ package logs
import (
"fmt"
+ "log"
"os"
"path"
"runtime"
"strconv"
+ "strings"
"sync"
"time"
)
@@ -55,16 +57,31 @@ const (
LevelDebug
)
-// Legacy loglevel constants to ensure backwards compatibility.
-//
-// Deprecated: will be removed in 1.5.0.
+// levelLogLogger is defined to implement log.Logger
+// the real log level will be LevelEmergency
+const levelLoggerImpl = -1
+
+// Name for adapter with beego official support
+const (
+ AdapterConsole = "console"
+ AdapterFile = "file"
+ AdapterMultiFile = "multifile"
+ AdapterMail = "smtp"
+ AdapterConn = "conn"
+ AdapterEs = "es"
+ AdapterJianLiao = "jianliao"
+ AdapterSlack = "slack"
+ AdapterAliLS = "alils"
+)
+
+// Legacy log level constants to ensure backwards compatibility.
const (
LevelInfo = LevelInformational
LevelTrace = LevelDebug
LevelWarn = LevelWarning
)
-type loggerType func() Logger
+type newLoggerFunc func() Logger
// Logger defines the behavior of a log provider.
type Logger interface {
@@ -74,12 +91,13 @@ type Logger interface {
Flush()
}
-var adapters = make(map[string]loggerType)
+var adapters = make(map[string]newLoggerFunc)
+var levelPrefix = [LevelDebug + 1]string{"[M] ", "[A] ", "[C] ", "[E] ", "[W] ", "[N] ", "[I] ", "[D] "}
// Register makes a log provide available by the provided name.
// If Register is called twice with the same name or if driver is nil,
// it panics.
-func Register(name string, log loggerType) {
+func Register(name string, log newLoggerFunc) {
if log == nil {
panic("logs: Register provide is nil")
}
@@ -94,15 +112,19 @@ func Register(name string, log loggerType) {
type BeeLogger struct {
lock sync.Mutex
level int
+ init bool
enableFuncCallDepth bool
loggerFuncCallDepth int
asynchronous bool
+ msgChanLen int64
msgChan chan *logMsg
signalChan chan string
wg sync.WaitGroup
outputs []*nameLogger
}
+const defaultAsyncMsgLen = 1e3
+
type nameLogger struct {
Logger
name string
@@ -119,18 +141,31 @@ var logMsgPool *sync.Pool
// NewLogger returns a new BeeLogger.
// channelLen means the number of messages in chan(used where asynchronous is true).
// if the buffering chan is full, logger adapters write to file or other way.
-func NewLogger(channelLen int64) *BeeLogger {
+func NewLogger(channelLens ...int64) *BeeLogger {
bl := new(BeeLogger)
bl.level = LevelDebug
bl.loggerFuncCallDepth = 2
- bl.msgChan = make(chan *logMsg, channelLen)
+ bl.msgChanLen = append(channelLens, 0)[0]
+ if bl.msgChanLen <= 0 {
+ bl.msgChanLen = defaultAsyncMsgLen
+ }
bl.signalChan = make(chan string, 1)
+ bl.setLogger(AdapterConsole)
return bl
}
// Async set the log to asynchronous and start the goroutine
-func (bl *BeeLogger) Async() *BeeLogger {
+func (bl *BeeLogger) Async(msgLen ...int64) *BeeLogger {
+ bl.lock.Lock()
+ defer bl.lock.Unlock()
+ if bl.asynchronous {
+ return bl
+ }
bl.asynchronous = true
+ if len(msgLen) > 0 && msgLen[0] > 0 {
+ bl.msgChanLen = msgLen[0]
+ }
+ bl.msgChan = make(chan *logMsg, bl.msgChanLen)
logMsgPool = &sync.Pool{
New: func() interface{} {
return &logMsg{}
@@ -143,16 +178,14 @@ func (bl *BeeLogger) Async() *BeeLogger {
// SetLogger provides a given logger adapter into BeeLogger with config string.
// config need to be correct JSON as string: {"interval":360}.
-func (bl *BeeLogger) SetLogger(adapterName string, config string) error {
- bl.lock.Lock()
- defer bl.lock.Unlock()
-
+func (bl *BeeLogger) setLogger(adapterName string, configs ...string) error {
+ config := append(configs, "{}")[0]
for _, l := range bl.outputs {
if l.name == adapterName {
return fmt.Errorf("logs: duplicate adaptername %q (you have set this logger before)", adapterName)
}
}
-
+
log, ok := adapters[adapterName]
if !ok {
return fmt.Errorf("logs: unknown adaptername %q (forgotten Register?)", adapterName)
@@ -168,6 +201,18 @@ func (bl *BeeLogger) SetLogger(adapterName string, config string) error {
return nil
}
+// SetLogger provides a given logger adapter into BeeLogger with config string.
+// config need to be correct JSON as string: {"interval":360}.
+func (bl *BeeLogger) SetLogger(adapterName string, configs ...string) error {
+ bl.lock.Lock()
+ defer bl.lock.Unlock()
+ if !bl.init {
+ bl.outputs = []*nameLogger{}
+ bl.init = true
+ }
+ return bl.setLogger(adapterName, configs...)
+}
+
// DelLogger remove a logger adapter in BeeLogger.
func (bl *BeeLogger) DelLogger(adapterName string) error {
bl.lock.Lock()
@@ -196,7 +241,32 @@ func (bl *BeeLogger) writeToLoggers(when time.Time, msg string, level int) {
}
}
-func (bl *BeeLogger) writeMsg(logLevel int, msg string) error {
+func (bl *BeeLogger) Write(p []byte) (n int, err error) {
+ if len(p) == 0 {
+ return 0, nil
+ }
+ // writeMsg will always add a '\n' character
+ if p[len(p)-1] == '\n' {
+ p = p[0 : len(p)-1]
+ }
+ // set levelLoggerImpl to ensure all log message will be write out
+ err = bl.writeMsg(levelLoggerImpl, string(p))
+ if err == nil {
+ return len(p), err
+ }
+ return 0, err
+}
+
+func (bl *BeeLogger) writeMsg(logLevel int, msg string, v ...interface{}) error {
+ if !bl.init {
+ bl.lock.Lock()
+ bl.setLogger(AdapterConsole)
+ bl.lock.Unlock()
+ }
+
+ if len(v) > 0 {
+ msg = fmt.Sprintf(msg, v...)
+ }
when := time.Now()
if bl.enableFuncCallDepth {
_, file, line, ok := runtime.Caller(bl.loggerFuncCallDepth)
@@ -205,8 +275,17 @@ func (bl *BeeLogger) writeMsg(logLevel int, msg string) error {
line = 0
}
_, filename := path.Split(file)
- msg = "[" + filename + ":" + strconv.FormatInt(int64(line), 10) + "]" + msg
+ msg = "[" + filename + ":" + strconv.Itoa(line) + "] " + msg
}
+
+ //set level info in front of filename info
+ if logLevel == levelLoggerImpl {
+ // set to emergency to ensure all log will be print out correctly
+ logLevel = LevelEmergency
+ } else {
+ msg = levelPrefix[logLevel] + msg
+ }
+
if bl.asynchronous {
lm := logMsgPool.Get().(*logMsg)
lm.level = logLevel
@@ -273,8 +352,7 @@ func (bl *BeeLogger) Emergency(format string, v ...interface{}) {
if LevelEmergency > bl.level {
return
}
- msg := fmt.Sprintf("[M] "+format, v...)
- bl.writeMsg(LevelEmergency, msg)
+ bl.writeMsg(LevelEmergency, format, v...)
}
// Alert Log ALERT level message.
@@ -282,8 +360,7 @@ func (bl *BeeLogger) Alert(format string, v ...interface{}) {
if LevelAlert > bl.level {
return
}
- msg := fmt.Sprintf("[A] "+format, v...)
- bl.writeMsg(LevelAlert, msg)
+ bl.writeMsg(LevelAlert, format, v...)
}
// Critical Log CRITICAL level message.
@@ -291,8 +368,7 @@ func (bl *BeeLogger) Critical(format string, v ...interface{}) {
if LevelCritical > bl.level {
return
}
- msg := fmt.Sprintf("[C] "+format, v...)
- bl.writeMsg(LevelCritical, msg)
+ bl.writeMsg(LevelCritical, format, v...)
}
// Error Log ERROR level message.
@@ -300,17 +376,15 @@ func (bl *BeeLogger) Error(format string, v ...interface{}) {
if LevelError > bl.level {
return
}
- msg := fmt.Sprintf("[E] "+format, v...)
- bl.writeMsg(LevelError, msg)
+ bl.writeMsg(LevelError, format, v...)
}
// Warning Log WARNING level message.
func (bl *BeeLogger) Warning(format string, v ...interface{}) {
- if LevelWarning > bl.level {
+ if LevelWarn > bl.level {
return
}
- msg := fmt.Sprintf("[W] "+format, v...)
- bl.writeMsg(LevelWarning, msg)
+ bl.writeMsg(LevelWarn, format, v...)
}
// Notice Log NOTICE level message.
@@ -318,17 +392,15 @@ func (bl *BeeLogger) Notice(format string, v ...interface{}) {
if LevelNotice > bl.level {
return
}
- msg := fmt.Sprintf("[N] "+format, v...)
- bl.writeMsg(LevelNotice, msg)
+ bl.writeMsg(LevelNotice, format, v...)
}
// Informational Log INFORMATIONAL level message.
func (bl *BeeLogger) Informational(format string, v ...interface{}) {
- if LevelInformational > bl.level {
+ if LevelInfo > bl.level {
return
}
- msg := fmt.Sprintf("[I] "+format, v...)
- bl.writeMsg(LevelInformational, msg)
+ bl.writeMsg(LevelInfo, format, v...)
}
// Debug Log DEBUG level message.
@@ -336,28 +408,25 @@ func (bl *BeeLogger) Debug(format string, v ...interface{}) {
if LevelDebug > bl.level {
return
}
- msg := fmt.Sprintf("[D] "+format, v...)
- bl.writeMsg(LevelDebug, msg)
+ bl.writeMsg(LevelDebug, format, v...)
}
// Warn Log WARN level message.
// compatibility alias for Warning()
func (bl *BeeLogger) Warn(format string, v ...interface{}) {
- if LevelWarning > bl.level {
+ if LevelWarn > bl.level {
return
}
- msg := fmt.Sprintf("[W] "+format, v...)
- bl.writeMsg(LevelWarning, msg)
+ bl.writeMsg(LevelWarn, format, v...)
}
// Info Log INFO level message.
// compatibility alias for Informational()
func (bl *BeeLogger) Info(format string, v ...interface{}) {
- if LevelInformational > bl.level {
+ if LevelInfo > bl.level {
return
}
- msg := fmt.Sprintf("[I] "+format, v...)
- bl.writeMsg(LevelInformational, msg)
+ bl.writeMsg(LevelInfo, format, v...)
}
// Trace Log TRACE level message.
@@ -366,8 +435,7 @@ func (bl *BeeLogger) Trace(format string, v ...interface{}) {
if LevelDebug > bl.level {
return
}
- msg := fmt.Sprintf("[D] "+format, v...)
- bl.writeMsg(LevelDebug, msg)
+ bl.writeMsg(LevelDebug, format, v...)
}
// Flush flush all chan data.
@@ -386,6 +454,7 @@ func (bl *BeeLogger) Close() {
if bl.asynchronous {
bl.signalChan <- "close"
bl.wg.Wait()
+ close(bl.msgChan)
} else {
bl.flush()
for _, l := range bl.outputs {
@@ -393,7 +462,6 @@ func (bl *BeeLogger) Close() {
}
bl.outputs = nil
}
- close(bl.msgChan)
close(bl.signalChan)
}
@@ -407,16 +475,172 @@ func (bl *BeeLogger) Reset() {
}
func (bl *BeeLogger) flush() {
- for {
- if len(bl.msgChan) > 0 {
- bm := <-bl.msgChan
- bl.writeToLoggers(bm.when, bm.msg, bm.level)
- logMsgPool.Put(bm)
- continue
+ if bl.asynchronous {
+ for {
+ if len(bl.msgChan) > 0 {
+ bm := <-bl.msgChan
+ bl.writeToLoggers(bm.when, bm.msg, bm.level)
+ logMsgPool.Put(bm)
+ continue
+ }
+ break
}
- break
}
for _, l := range bl.outputs {
l.Flush()
}
}
+
+// beeLogger references the used application logger.
+var beeLogger = NewLogger()
+
+// GetBeeLogger returns the default BeeLogger
+func GetBeeLogger() *BeeLogger {
+ return beeLogger
+}
+
+var beeLoggerMap = struct {
+ sync.RWMutex
+ logs map[string]*log.Logger
+}{
+ logs: map[string]*log.Logger{},
+}
+
+// GetLogger returns the default BeeLogger
+func GetLogger(prefixes ...string) *log.Logger {
+ prefix := append(prefixes, "")[0]
+ if prefix != "" {
+ prefix = fmt.Sprintf(`[%s] `, strings.ToUpper(prefix))
+ }
+ beeLoggerMap.RLock()
+ l, ok := beeLoggerMap.logs[prefix]
+ if ok {
+ beeLoggerMap.RUnlock()
+ return l
+ }
+ beeLoggerMap.RUnlock()
+ beeLoggerMap.Lock()
+ defer beeLoggerMap.Unlock()
+ l, ok = beeLoggerMap.logs[prefix]
+ if !ok {
+ l = log.New(beeLogger, prefix, 0)
+ beeLoggerMap.logs[prefix] = l
+ }
+ return l
+}
+
+// Reset will remove all the adapter
+func Reset() {
+ beeLogger.Reset()
+}
+
+// Async set the beelogger with Async mode and hold msglen messages
+func Async(msgLen ...int64) *BeeLogger {
+ return beeLogger.Async(msgLen...)
+}
+
+// SetLevel sets the global log level used by the simple logger.
+func SetLevel(l int) {
+ beeLogger.SetLevel(l)
+}
+
+// EnableFuncCallDepth enable log funcCallDepth
+func EnableFuncCallDepth(b bool) {
+ beeLogger.enableFuncCallDepth = b
+}
+
+// SetLogFuncCall set the CallDepth, default is 4
+func SetLogFuncCall(b bool) {
+ beeLogger.EnableFuncCallDepth(b)
+ beeLogger.SetLogFuncCallDepth(4)
+}
+
+// SetLogFuncCallDepth set log funcCallDepth
+func SetLogFuncCallDepth(d int) {
+ beeLogger.loggerFuncCallDepth = d
+}
+
+// SetLogger sets a new logger.
+func SetLogger(adapter string, config ...string) error {
+ return beeLogger.SetLogger(adapter, config...)
+}
+
+// Emergency logs a message at emergency level.
+func Emergency(f interface{}, v ...interface{}) {
+ beeLogger.Emergency(formatLog(f, v...))
+}
+
+// Alert logs a message at alert level.
+func Alert(f interface{}, v ...interface{}) {
+ beeLogger.Alert(formatLog(f, v...))
+}
+
+// Critical logs a message at critical level.
+func Critical(f interface{}, v ...interface{}) {
+ beeLogger.Critical(formatLog(f, v...))
+}
+
+// Error logs a message at error level.
+func Error(f interface{}, v ...interface{}) {
+ beeLogger.Error(formatLog(f, v...))
+}
+
+// Warning logs a message at warning level.
+func Warning(f interface{}, v ...interface{}) {
+ beeLogger.Warn(formatLog(f, v...))
+}
+
+// Warn compatibility alias for Warning()
+func Warn(f interface{}, v ...interface{}) {
+ beeLogger.Warn(formatLog(f, v...))
+}
+
+// Notice logs a message at notice level.
+func Notice(f interface{}, v ...interface{}) {
+ beeLogger.Notice(formatLog(f, v...))
+}
+
+// Informational logs a message at info level.
+func Informational(f interface{}, v ...interface{}) {
+ beeLogger.Info(formatLog(f, v...))
+}
+
+// Info compatibility alias for Warning()
+func Info(f interface{}, v ...interface{}) {
+ beeLogger.Info(formatLog(f, v...))
+}
+
+// Debug logs a message at debug level.
+func Debug(f interface{}, v ...interface{}) {
+ beeLogger.Debug(formatLog(f, v...))
+}
+
+// Trace logs a message at trace level.
+// compatibility alias for Warning()
+func Trace(f interface{}, v ...interface{}) {
+ beeLogger.Trace(formatLog(f, v...))
+}
+
+func formatLog(f interface{}, v ...interface{}) string {
+ var msg string
+ switch f.(type) {
+ case string:
+ msg = f.(string)
+ if len(v) == 0 {
+ return msg
+ }
+ if strings.Contains(msg, "%") && !strings.Contains(msg, "%%") {
+ //format string
+ } else {
+ //do not contain format char
+ msg += strings.Repeat(" %v", len(v))
+ }
+ default:
+ msg = fmt.Sprint(f)
+ if len(v) == 0 {
+ return msg
+ }
+ msg += strings.Repeat(" %v", len(v))
+ }
+ return fmt.Sprintf(msg, v...)
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/logger.go b/src/vendor/github.com/astaxie/beego/logs/logger.go
index 323c41c56..b5d7255f0 100644
--- a/src/vendor/github.com/astaxie/beego/logs/logger.go
+++ b/src/vendor/github.com/astaxie/beego/logs/logger.go
@@ -12,11 +12,12 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-
package logs
import (
+ "fmt"
"io"
+ "os"
"sync"
"time"
)
@@ -37,44 +38,164 @@ func (lg *logWriter) println(when time.Time, msg string) {
lg.Unlock()
}
+type outputMode int
+
+// DiscardNonColorEscSeq supports the divided color escape sequence.
+// But non-color escape sequence is not output.
+// Please use the OutputNonColorEscSeq If you want to output a non-color
+// escape sequences such as ncurses. However, it does not support the divided
+// color escape sequence.
+const (
+ _ outputMode = iota
+ DiscardNonColorEscSeq
+ OutputNonColorEscSeq
+)
+
+// NewAnsiColorWriter creates and initializes a new ansiColorWriter
+// using io.Writer w as its initial contents.
+// In the console of Windows, which change the foreground and background
+// colors of the text by the escape sequence.
+// In the console of other systems, which writes to w all text.
+func NewAnsiColorWriter(w io.Writer) io.Writer {
+ return NewModeAnsiColorWriter(w, DiscardNonColorEscSeq)
+}
+
+// NewModeAnsiColorWriter create and initializes a new ansiColorWriter
+// by specifying the outputMode.
+func NewModeAnsiColorWriter(w io.Writer, mode outputMode) io.Writer {
+ if _, ok := w.(*ansiColorWriter); !ok {
+ return &ansiColorWriter{
+ w: w,
+ mode: mode,
+ }
+ }
+ return w
+}
+
+const (
+ y1 = `0123456789`
+ y2 = `0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789`
+ y3 = `0000000000111111111122222222223333333333444444444455555555556666666666777777777788888888889999999999`
+ y4 = `0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789`
+ mo1 = `000000000111`
+ mo2 = `123456789012`
+ d1 = `0000000001111111111222222222233`
+ d2 = `1234567890123456789012345678901`
+ h1 = `000000000011111111112222`
+ h2 = `012345678901234567890123`
+ mi1 = `000000000011111111112222222222333333333344444444445555555555`
+ mi2 = `012345678901234567890123456789012345678901234567890123456789`
+ s1 = `000000000011111111112222222222333333333344444444445555555555`
+ s2 = `012345678901234567890123456789012345678901234567890123456789`
+)
+
func formatTimeHeader(when time.Time) ([]byte, int) {
y, mo, d := when.Date()
h, mi, s := when.Clock()
- //len(2006/01/02 15:03:04)==19
+ //len("2006/01/02 15:04:05 ")==20
var buf [20]byte
- t := 3
- for y >= 10 {
- p := y / 10
- buf[t] = byte('0' + y - p*10)
- y = p
- t--
- }
- buf[0] = byte('0' + y)
+
+ buf[0] = y1[y/1000%10]
+ buf[1] = y2[y/100]
+ buf[2] = y3[y-y/100*100]
+ buf[3] = y4[y-y/100*100]
buf[4] = '/'
- if mo > 9 {
- buf[5] = '1'
- buf[6] = byte('0' + mo - 9)
- } else {
- buf[5] = '0'
- buf[6] = byte('0' + mo)
- }
+ buf[5] = mo1[mo-1]
+ buf[6] = mo2[mo-1]
buf[7] = '/'
- t = d / 10
- buf[8] = byte('0' + t)
- buf[9] = byte('0' + d - t*10)
+ buf[8] = d1[d-1]
+ buf[9] = d2[d-1]
buf[10] = ' '
- t = h / 10
- buf[11] = byte('0' + t)
- buf[12] = byte('0' + h - t*10)
+ buf[11] = h1[h]
+ buf[12] = h2[h]
buf[13] = ':'
- t = mi / 10
- buf[14] = byte('0' + t)
- buf[15] = byte('0' + mi - t*10)
+ buf[14] = mi1[mi]
+ buf[15] = mi2[mi]
buf[16] = ':'
- t = s / 10
- buf[17] = byte('0' + t)
- buf[18] = byte('0' + s - t*10)
+ buf[17] = s1[s]
+ buf[18] = s2[s]
buf[19] = ' '
return buf[0:], d
}
+
+var (
+ green = string([]byte{27, 91, 57, 55, 59, 52, 50, 109})
+ white = string([]byte{27, 91, 57, 48, 59, 52, 55, 109})
+ yellow = string([]byte{27, 91, 57, 55, 59, 52, 51, 109})
+ red = string([]byte{27, 91, 57, 55, 59, 52, 49, 109})
+ blue = string([]byte{27, 91, 57, 55, 59, 52, 52, 109})
+ magenta = string([]byte{27, 91, 57, 55, 59, 52, 53, 109})
+ cyan = string([]byte{27, 91, 57, 55, 59, 52, 54, 109})
+
+ w32Green = string([]byte{27, 91, 52, 50, 109})
+ w32White = string([]byte{27, 91, 52, 55, 109})
+ w32Yellow = string([]byte{27, 91, 52, 51, 109})
+ w32Red = string([]byte{27, 91, 52, 49, 109})
+ w32Blue = string([]byte{27, 91, 52, 52, 109})
+ w32Magenta = string([]byte{27, 91, 52, 53, 109})
+ w32Cyan = string([]byte{27, 91, 52, 54, 109})
+
+ reset = string([]byte{27, 91, 48, 109})
+)
+
+// ColorByStatus return color by http code
+// 2xx return Green
+// 3xx return White
+// 4xx return Yellow
+// 5xx return Red
+func ColorByStatus(cond bool, code int) string {
+ switch {
+ case code >= 200 && code < 300:
+ return map[bool]string{true: green, false: w32Green}[cond]
+ case code >= 300 && code < 400:
+ return map[bool]string{true: white, false: w32White}[cond]
+ case code >= 400 && code < 500:
+ return map[bool]string{true: yellow, false: w32Yellow}[cond]
+ default:
+ return map[bool]string{true: red, false: w32Red}[cond]
+ }
+}
+
+// ColorByMethod return color by http code
+// GET return Blue
+// POST return Cyan
+// PUT return Yellow
+// DELETE return Red
+// PATCH return Green
+// HEAD return Magenta
+// OPTIONS return WHITE
+func ColorByMethod(cond bool, method string) string {
+ switch method {
+ case "GET":
+ return map[bool]string{true: blue, false: w32Blue}[cond]
+ case "POST":
+ return map[bool]string{true: cyan, false: w32Cyan}[cond]
+ case "PUT":
+ return map[bool]string{true: yellow, false: w32Yellow}[cond]
+ case "DELETE":
+ return map[bool]string{true: red, false: w32Red}[cond]
+ case "PATCH":
+ return map[bool]string{true: green, false: w32Green}[cond]
+ case "HEAD":
+ return map[bool]string{true: magenta, false: w32Magenta}[cond]
+ case "OPTIONS":
+ return map[bool]string{true: white, false: w32White}[cond]
+ default:
+ return reset
+ }
+}
+
+// Guard Mutex to guarantee atomic of W32Debug(string) function
+var mu sync.Mutex
+
+// W32Debug Helper method to output colored logs in Windows terminals
+func W32Debug(msg string) {
+ mu.Lock()
+ defer mu.Unlock()
+
+ current := time.Now()
+ w := NewAnsiColorWriter(os.Stdout)
+
+ fmt.Fprintf(w, "[beego] %v %s\n", current.Format("2006/01/02 - 15:04:05"), msg)
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/logger_test.go b/src/vendor/github.com/astaxie/beego/logs/logger_test.go
new file mode 100644
index 000000000..119b7bd31
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/logger_test.go
@@ -0,0 +1,75 @@
+// Copyright 2016 beego Author. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package logs
+
+import (
+ "bytes"
+ "testing"
+ "time"
+)
+
+func TestFormatHeader_0(t *testing.T) {
+ tm := time.Now()
+ if tm.Year() >= 2100 {
+ t.FailNow()
+ }
+ dur := time.Second
+ for {
+ if tm.Year() >= 2100 {
+ break
+ }
+ h, _ := formatTimeHeader(tm)
+ if tm.Format("2006/01/02 15:04:05 ") != string(h) {
+ t.Log(tm)
+ t.FailNow()
+ }
+ tm = tm.Add(dur)
+ dur *= 2
+ }
+}
+
+func TestFormatHeader_1(t *testing.T) {
+ tm := time.Now()
+ year := tm.Year()
+ dur := time.Second
+ for {
+ if tm.Year() >= year+1 {
+ break
+ }
+ h, _ := formatTimeHeader(tm)
+ if tm.Format("2006/01/02 15:04:05 ") != string(h) {
+ t.Log(tm)
+ t.FailNow()
+ }
+ tm = tm.Add(dur)
+ }
+}
+
+func TestNewAnsiColor1(t *testing.T) {
+ inner := bytes.NewBufferString("")
+ w := NewAnsiColorWriter(inner)
+ if w == inner {
+ t.Errorf("Get %#v, want %#v", w, inner)
+ }
+}
+
+func TestNewAnsiColor2(t *testing.T) {
+ inner := bytes.NewBufferString("")
+ w1 := NewAnsiColorWriter(inner)
+ w2 := NewAnsiColorWriter(w1)
+ if w1 != w2 {
+ t.Errorf("Get %#v, want %#v", w1, w2)
+ }
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/multifile.go b/src/vendor/github.com/astaxie/beego/logs/multifile.go
index b82ba2741..63204e176 100644
--- a/src/vendor/github.com/astaxie/beego/logs/multifile.go
+++ b/src/vendor/github.com/astaxie/beego/logs/multifile.go
@@ -112,5 +112,5 @@ func newFilesWriter() Logger {
}
func init() {
- Register("multifile", newFilesWriter)
+ Register(AdapterMultiFile, newFilesWriter)
}
diff --git a/src/vendor/github.com/astaxie/beego/logs/slack.go b/src/vendor/github.com/astaxie/beego/logs/slack.go
new file mode 100644
index 000000000..1cd2e5aee
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/logs/slack.go
@@ -0,0 +1,60 @@
+package logs
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "net/url"
+ "time"
+)
+
+// SLACKWriter implements beego LoggerInterface and is used to send jiaoliao webhook
+type SLACKWriter struct {
+ WebhookURL string `json:"webhookurl"`
+ Level int `json:"level"`
+}
+
+// newSLACKWriter create jiaoliao writer.
+func newSLACKWriter() Logger {
+ return &SLACKWriter{Level: LevelTrace}
+}
+
+// Init SLACKWriter with json config string
+func (s *SLACKWriter) Init(jsonconfig string) error {
+ return json.Unmarshal([]byte(jsonconfig), s)
+}
+
+// WriteMsg write message in smtp writer.
+// it will send an email with subject and only this message.
+func (s *SLACKWriter) WriteMsg(when time.Time, msg string, level int) error {
+ if level > s.Level {
+ return nil
+ }
+
+ text := fmt.Sprintf("{\"text\": \"%s %s\"}", when.Format("2006-01-02 15:04:05"), msg)
+
+ form := url.Values{}
+ form.Add("payload", text)
+
+ resp, err := http.PostForm(s.WebhookURL, form)
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode != http.StatusOK {
+ return fmt.Errorf("Post webhook failed %s %d", resp.Status, resp.StatusCode)
+ }
+ return nil
+}
+
+// Flush implementing method. empty.
+func (s *SLACKWriter) Flush() {
+}
+
+// Destroy implementing method. empty.
+func (s *SLACKWriter) Destroy() {
+}
+
+func init() {
+ Register(AdapterSlack, newSLACKWriter)
+}
diff --git a/src/vendor/github.com/astaxie/beego/logs/smtp.go b/src/vendor/github.com/astaxie/beego/logs/smtp.go
index 47f5a0c69..6208d7b85 100644
--- a/src/vendor/github.com/astaxie/beego/logs/smtp.go
+++ b/src/vendor/github.com/astaxie/beego/logs/smtp.go
@@ -52,11 +52,7 @@ func newSMTPWriter() Logger {
// "level":LevelError
// }
func (s *SMTPWriter) Init(jsonconfig string) error {
- err := json.Unmarshal([]byte(jsonconfig), s)
- if err != nil {
- return err
- }
- return nil
+ return json.Unmarshal([]byte(jsonconfig), s)
}
func (s *SMTPWriter) getSMTPAuth(host string) smtp.Auth {
@@ -106,7 +102,7 @@ func (s *SMTPWriter) sendMail(hostAddressWithPort string, auth smtp.Auth, fromAd
if err != nil {
return err
}
- _, err = w.Write([]byte(msgContent))
+ _, err = w.Write(msgContent)
if err != nil {
return err
}
@@ -116,12 +112,7 @@ func (s *SMTPWriter) sendMail(hostAddressWithPort string, auth smtp.Auth, fromAd
return err
}
- err = client.Quit()
- if err != nil {
- return err
- }
-
- return nil
+ return client.Quit()
}
// WriteMsg write message in smtp writer.
@@ -147,14 +138,12 @@ func (s *SMTPWriter) WriteMsg(when time.Time, msg string, level int) error {
// Flush implementing method. empty.
func (s *SMTPWriter) Flush() {
- return
}
// Destroy implementing method. empty.
func (s *SMTPWriter) Destroy() {
- return
}
func init() {
- Register("smtp", newSMTPWriter)
+ Register(AdapterMail, newSMTPWriter)
}
diff --git a/src/vendor/github.com/astaxie/beego/migration/ddl.go b/src/vendor/github.com/astaxie/beego/migration/ddl.go
index 51243337f..cea103558 100644
--- a/src/vendor/github.com/astaxie/beego/migration/ddl.go
+++ b/src/vendor/github.com/astaxie/beego/migration/ddl.go
@@ -14,40 +14,382 @@
package migration
-// Table store the tablename and Column
-type Table struct {
- TableName string
- Columns []*Column
+import (
+ "fmt"
+
+ "github.com/astaxie/beego"
+)
+
+// Index struct defines the structure of Index Columns
+type Index struct {
+ Name string
}
-// Create return the create sql
-func (t *Table) Create() string {
- return ""
+// Unique struct defines a single unique key combination
+type Unique struct {
+ Definition string
+ Columns []*Column
}
-// Drop return the drop sql
-func (t *Table) Drop() string {
- return ""
-}
-
-// Column define the columns name type and Default
+//Column struct defines a single column of a table
type Column struct {
- Name string
- Type string
- Default interface{}
+ Name string
+ Inc string
+ Null string
+ Default string
+ Unsign string
+ DataType string
+ remove bool
+ Modify bool
}
-// Create return create sql with the provided tbname and columns
-func Create(tbname string, columns ...Column) string {
- return ""
+// Foreign struct defines a single foreign relationship
+type Foreign struct {
+ ForeignTable string
+ ForeignColumn string
+ OnDelete string
+ OnUpdate string
+ Column
}
-// Drop return the drop sql with the provided tbname and columns
-func Drop(tbname string, columns ...Column) string {
- return ""
+// RenameColumn struct allows renaming of columns
+type RenameColumn struct {
+ OldName string
+ OldNull string
+ OldDefault string
+ OldUnsign string
+ OldDataType string
+ NewName string
+ Column
}
-// TableDDL is still in think
-func TableDDL(tbname string, columns ...Column) string {
- return ""
+// CreateTable creates the table on system
+func (m *Migration) CreateTable(tablename, engine, charset string, p ...func()) {
+ m.TableName = tablename
+ m.Engine = engine
+ m.Charset = charset
+ m.ModifyType = "create"
+}
+
+// AlterTable set the ModifyType to alter
+func (m *Migration) AlterTable(tablename string) {
+ m.TableName = tablename
+ m.ModifyType = "alter"
+}
+
+// NewCol creates a new standard column and attaches it to m struct
+func (m *Migration) NewCol(name string) *Column {
+ col := &Column{Name: name}
+ m.AddColumns(col)
+ return col
+}
+
+//PriCol creates a new primary column and attaches it to m struct
+func (m *Migration) PriCol(name string) *Column {
+ col := &Column{Name: name}
+ m.AddColumns(col)
+ m.AddPrimary(col)
+ return col
+}
+
+//UniCol creates / appends columns to specified unique key and attaches it to m struct
+func (m *Migration) UniCol(uni, name string) *Column {
+ col := &Column{Name: name}
+ m.AddColumns(col)
+
+ uniqueOriginal := &Unique{}
+
+ for _, unique := range m.Uniques {
+ if unique.Definition == uni {
+ unique.AddColumnsToUnique(col)
+ uniqueOriginal = unique
+ }
+ }
+ if uniqueOriginal.Definition == "" {
+ unique := &Unique{Definition: uni}
+ unique.AddColumnsToUnique(col)
+ m.AddUnique(unique)
+ }
+
+ return col
+}
+
+//ForeignCol creates a new foreign column and returns the instance of column
+func (m *Migration) ForeignCol(colname, foreigncol, foreigntable string) (foreign *Foreign) {
+
+ foreign = &Foreign{ForeignColumn: foreigncol, ForeignTable: foreigntable}
+ foreign.Name = colname
+ m.AddForeign(foreign)
+ return foreign
+}
+
+//SetOnDelete sets the on delete of foreign
+func (foreign *Foreign) SetOnDelete(del string) *Foreign {
+ foreign.OnDelete = "ON DELETE" + del
+ return foreign
+}
+
+//SetOnUpdate sets the on update of foreign
+func (foreign *Foreign) SetOnUpdate(update string) *Foreign {
+ foreign.OnUpdate = "ON UPDATE" + update
+ return foreign
+}
+
+//Remove marks the columns to be removed.
+//it allows reverse m to create the column.
+func (c *Column) Remove() {
+ c.remove = true
+}
+
+//SetAuto enables auto_increment of column (can be used once)
+func (c *Column) SetAuto(inc bool) *Column {
+ if inc {
+ c.Inc = "auto_increment"
+ }
+ return c
+}
+
+//SetNullable sets the column to be null
+func (c *Column) SetNullable(null bool) *Column {
+ if null {
+ c.Null = ""
+
+ } else {
+ c.Null = "NOT NULL"
+ }
+ return c
+}
+
+//SetDefault sets the default value, prepend with "DEFAULT "
+func (c *Column) SetDefault(def string) *Column {
+ c.Default = "DEFAULT " + def
+ return c
+}
+
+//SetUnsigned sets the column to be unsigned int
+func (c *Column) SetUnsigned(unsign bool) *Column {
+ if unsign {
+ c.Unsign = "UNSIGNED"
+ }
+ return c
+}
+
+//SetDataType sets the dataType of the column
+func (c *Column) SetDataType(dataType string) *Column {
+ c.DataType = dataType
+ return c
+}
+
+//SetOldNullable allows reverting to previous nullable on reverse ms
+func (c *RenameColumn) SetOldNullable(null bool) *RenameColumn {
+ if null {
+ c.OldNull = ""
+
+ } else {
+ c.OldNull = "NOT NULL"
+ }
+ return c
+}
+
+//SetOldDefault allows reverting to previous default on reverse ms
+func (c *RenameColumn) SetOldDefault(def string) *RenameColumn {
+ c.OldDefault = def
+ return c
+}
+
+//SetOldUnsigned allows reverting to previous unsgined on reverse ms
+func (c *RenameColumn) SetOldUnsigned(unsign bool) *RenameColumn {
+ if unsign {
+ c.OldUnsign = "UNSIGNED"
+ }
+ return c
+}
+
+//SetOldDataType allows reverting to previous datatype on reverse ms
+func (c *RenameColumn) SetOldDataType(dataType string) *RenameColumn {
+ c.OldDataType = dataType
+ return c
+}
+
+//SetPrimary adds the columns to the primary key (can only be used any number of times in only one m)
+func (c *Column) SetPrimary(m *Migration) *Column {
+ m.Primary = append(m.Primary, c)
+ return c
+}
+
+//AddColumnsToUnique adds the columns to Unique Struct
+func (unique *Unique) AddColumnsToUnique(columns ...*Column) *Unique {
+
+ unique.Columns = append(unique.Columns, columns...)
+
+ return unique
+}
+
+//AddColumns adds columns to m struct
+func (m *Migration) AddColumns(columns ...*Column) *Migration {
+
+ m.Columns = append(m.Columns, columns...)
+
+ return m
+}
+
+//AddPrimary adds the column to primary in m struct
+func (m *Migration) AddPrimary(primary *Column) *Migration {
+ m.Primary = append(m.Primary, primary)
+ return m
+}
+
+//AddUnique adds the column to unique in m struct
+func (m *Migration) AddUnique(unique *Unique) *Migration {
+ m.Uniques = append(m.Uniques, unique)
+ return m
+}
+
+//AddForeign adds the column to foreign in m struct
+func (m *Migration) AddForeign(foreign *Foreign) *Migration {
+ m.Foreigns = append(m.Foreigns, foreign)
+ return m
+}
+
+//AddIndex adds the column to index in m struct
+func (m *Migration) AddIndex(index *Index) *Migration {
+ m.Indexes = append(m.Indexes, index)
+ return m
+}
+
+//RenameColumn allows renaming of columns
+func (m *Migration) RenameColumn(from, to string) *RenameColumn {
+ rename := &RenameColumn{OldName: from, NewName: to}
+ m.Renames = append(m.Renames, rename)
+ return rename
+}
+
+//GetSQL returns the generated sql depending on ModifyType
+func (m *Migration) GetSQL() (sql string) {
+ sql = ""
+ switch m.ModifyType {
+ case "create":
+ {
+ sql += fmt.Sprintf("CREATE TABLE `%s` (", m.TableName)
+ for index, column := range m.Columns {
+ sql += fmt.Sprintf("\n `%s` %s %s %s %s %s", column.Name, column.DataType, column.Unsign, column.Null, column.Inc, column.Default)
+ if len(m.Columns) > index+1 {
+ sql += ","
+ }
+ }
+
+ if len(m.Primary) > 0 {
+ sql += fmt.Sprintf(",\n PRIMARY KEY( ")
+ }
+ for index, column := range m.Primary {
+ sql += fmt.Sprintf(" `%s`", column.Name)
+ if len(m.Primary) > index+1 {
+ sql += ","
+ }
+
+ }
+ if len(m.Primary) > 0 {
+ sql += fmt.Sprintf(")")
+ }
+
+ for _, unique := range m.Uniques {
+ sql += fmt.Sprintf(",\n UNIQUE KEY `%s`( ", unique.Definition)
+ for index, column := range unique.Columns {
+ sql += fmt.Sprintf(" `%s`", column.Name)
+ if len(unique.Columns) > index+1 {
+ sql += ","
+ }
+ }
+ sql += fmt.Sprintf(")")
+ }
+ for _, foreign := range m.Foreigns {
+ sql += fmt.Sprintf(",\n `%s` %s %s %s %s %s", foreign.Name, foreign.DataType, foreign.Unsign, foreign.Null, foreign.Inc, foreign.Default)
+ sql += fmt.Sprintf(",\n KEY `%s_%s_foreign`(`%s`),", m.TableName, foreign.Column.Name, foreign.Column.Name)
+ sql += fmt.Sprintf("\n CONSTRAINT `%s_%s_foreign` FOREIGN KEY (`%s`) REFERENCES `%s` (`%s`) %s %s", m.TableName, foreign.Column.Name, foreign.Column.Name, foreign.ForeignTable, foreign.ForeignColumn, foreign.OnDelete, foreign.OnUpdate)
+
+ }
+ sql += fmt.Sprintf(")ENGINE=%s DEFAULT CHARSET=%s;", m.Engine, m.Charset)
+ break
+ }
+ case "alter":
+ {
+ sql += fmt.Sprintf("ALTER TABLE `%s` ", m.TableName)
+ for index, column := range m.Columns {
+ if !column.remove {
+ beego.BeeLogger.Info("col")
+ sql += fmt.Sprintf("\n ADD `%s` %s %s %s %s %s", column.Name, column.DataType, column.Unsign, column.Null, column.Inc, column.Default)
+ } else {
+ sql += fmt.Sprintf("\n DROP COLUMN `%s`", column.Name)
+ }
+
+ if len(m.Columns) > index {
+ sql += ","
+ }
+ }
+ for index, column := range m.Renames {
+ sql += fmt.Sprintf("CHANGE COLUMN `%s` `%s` %s %s %s %s %s", column.OldName, column.NewName, column.DataType, column.Unsign, column.Null, column.Inc, column.Default)
+ if len(m.Renames) > index+1 {
+ sql += ","
+ }
+ }
+
+ for index, foreign := range m.Foreigns {
+ sql += fmt.Sprintf("ADD `%s` %s %s %s %s %s", foreign.Name, foreign.DataType, foreign.Unsign, foreign.Null, foreign.Inc, foreign.Default)
+ sql += fmt.Sprintf(",\n ADD KEY `%s_%s_foreign`(`%s`)", m.TableName, foreign.Column.Name, foreign.Column.Name)
+ sql += fmt.Sprintf(",\n ADD CONSTRAINT `%s_%s_foreign` FOREIGN KEY (`%s`) REFERENCES `%s` (`%s`) %s %s", m.TableName, foreign.Column.Name, foreign.Column.Name, foreign.ForeignTable, foreign.ForeignColumn, foreign.OnDelete, foreign.OnUpdate)
+ if len(m.Foreigns) > index+1 {
+ sql += ","
+ }
+ }
+ sql += ";"
+
+ break
+ }
+ case "reverse":
+ {
+
+ sql += fmt.Sprintf("ALTER TABLE `%s`", m.TableName)
+ for index, column := range m.Columns {
+ if column.remove {
+ sql += fmt.Sprintf("\n ADD `%s` %s %s %s %s %s", column.Name, column.DataType, column.Unsign, column.Null, column.Inc, column.Default)
+ } else {
+ sql += fmt.Sprintf("\n DROP COLUMN `%s`", column.Name)
+ }
+ if len(m.Columns) > index {
+ sql += ","
+ }
+ }
+
+ if len(m.Primary) > 0 {
+ sql += fmt.Sprintf("\n DROP PRIMARY KEY,")
+ }
+
+ for index, unique := range m.Uniques {
+ sql += fmt.Sprintf("\n DROP KEY `%s`", unique.Definition)
+ if len(m.Uniques) > index {
+ sql += ","
+ }
+
+ }
+ for index, column := range m.Renames {
+ sql += fmt.Sprintf("\n CHANGE COLUMN `%s` `%s` %s %s %s %s", column.NewName, column.OldName, column.OldDataType, column.OldUnsign, column.OldNull, column.OldDefault)
+ if len(m.Renames) > index {
+ sql += ","
+ }
+ }
+
+ for _, foreign := range m.Foreigns {
+ sql += fmt.Sprintf("\n DROP KEY `%s_%s_foreign`", m.TableName, foreign.Column.Name)
+ sql += fmt.Sprintf(",\n DROP FOREIGN KEY `%s_%s_foreign`", m.TableName, foreign.Column.Name)
+ sql += fmt.Sprintf(",\n DROP COLUMN `%s`", foreign.Name)
+ }
+ sql += ";"
+ }
+ case "delete":
+ {
+ sql += fmt.Sprintf("DROP TABLE IF EXISTS `%s`;", m.TableName)
+ }
+ }
+
+ return
}
diff --git a/src/vendor/github.com/astaxie/beego/migration/doc.go b/src/vendor/github.com/astaxie/beego/migration/doc.go
new file mode 100644
index 000000000..0c6564d4d
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/migration/doc.go
@@ -0,0 +1,32 @@
+// Package migration enables you to generate migrations back and forth. It generates both migrations.
+//
+// //Creates a table
+// m.CreateTable("tablename","InnoDB","utf8");
+//
+// //Alter a table
+// m.AlterTable("tablename")
+//
+// Standard Column Methods
+// * SetDataType
+// * SetNullable
+// * SetDefault
+// * SetUnsigned (use only on integer types unless produces error)
+//
+// //Sets a primary column, multiple calls allowed, standard column methods available
+// m.PriCol("id").SetAuto(true).SetNullable(false).SetDataType("INT(10)").SetUnsigned(true)
+//
+// //UniCol Can be used multiple times, allows standard Column methods. Use same "index" string to add to same index
+// m.UniCol("index","column")
+//
+// //Standard Column Initialisation, can call .Remove() after NewCol("") on alter to remove
+// m.NewCol("name").SetDataType("VARCHAR(255) COLLATE utf8_unicode_ci").SetNullable(false)
+// m.NewCol("value").SetDataType("DOUBLE(8,2)").SetNullable(false)
+//
+// //Rename Columns , only use with Alter table, doesn't works with Create, prefix standard column methods with "Old" to
+// //create a true reversible migration eg: SetOldDataType("DOUBLE(12,3)")
+// m.RenameColumn("from","to")...
+//
+// //Foreign Columns, single columns are only supported, SetOnDelete & SetOnUpdate are available, call appropriately.
+// //Supports standard column methods, automatic reverse.
+// m.ForeignCol("local_col","foreign_col","foreign_table")
+package migration
diff --git a/src/vendor/github.com/astaxie/beego/migration/migration.go b/src/vendor/github.com/astaxie/beego/migration/migration.go
index 1591bc50d..510537142 100644
--- a/src/vendor/github.com/astaxie/beego/migration/migration.go
+++ b/src/vendor/github.com/astaxie/beego/migration/migration.go
@@ -33,7 +33,7 @@ import (
"strings"
"time"
- "github.com/astaxie/beego"
+ "github.com/astaxie/beego/logs"
"github.com/astaxie/beego/orm"
)
@@ -52,6 +52,26 @@ type Migrationer interface {
GetCreated() int64
}
+//Migration defines the migrations by either SQL or DDL
+type Migration struct {
+ sqls []string
+ Created string
+ TableName string
+ Engine string
+ Charset string
+ ModifyType string
+ Columns []*Column
+ Indexes []*Index
+ Primary []*Column
+ Uniques []*Unique
+ Foreigns []*Foreign
+ Renames []*RenameColumn
+ RemoveColumns []*Column
+ RemoveIndexes []*Index
+ RemoveUniques []*Unique
+ RemoveForeigns []*Foreign
+}
+
var (
migrationMap map[string]Migrationer
)
@@ -60,20 +80,34 @@ func init() {
migrationMap = make(map[string]Migrationer)
}
-// Migration the basic type which will implement the basic type
-type Migration struct {
- sqls []string
- Created string
-}
-
// Up implement in the Inheritance struct for upgrade
func (m *Migration) Up() {
+ switch m.ModifyType {
+ case "reverse":
+ m.ModifyType = "alter"
+ case "delete":
+ m.ModifyType = "create"
+ }
+ m.sqls = append(m.sqls, m.GetSQL())
}
// Down implement in the Inheritance struct for down
func (m *Migration) Down() {
+ switch m.ModifyType {
+ case "alter":
+ m.ModifyType = "reverse"
+ case "create":
+ m.ModifyType = "delete"
+ }
+ m.sqls = append(m.sqls, m.GetSQL())
+}
+
+//Migrate adds the SQL to the execution list
+func (m *Migration) Migrate(migrationType string) {
+ m.ModifyType = migrationType
+ m.sqls = append(m.sqls, m.GetSQL())
}
// SQL add sql want to execute
@@ -90,7 +124,7 @@ func (m *Migration) Reset() {
func (m *Migration) Exec(name, status string) error {
o := orm.NewOrm()
for _, s := range m.sqls {
- beego.Info("exec sql:", s)
+ logs.Info("exec sql:", s)
r := o.Raw(s)
_, err := r.Exec()
if err != nil {
@@ -144,20 +178,20 @@ func Upgrade(lasttime int64) error {
i := 0
for _, v := range sm {
if v.created > lasttime {
- beego.Info("start upgrade", v.name)
+ logs.Info("start upgrade", v.name)
v.m.Reset()
v.m.Up()
err := v.m.Exec(v.name, "up")
if err != nil {
- beego.Error("execute error:", err)
+ logs.Error("execute error:", err)
time.Sleep(2 * time.Second)
return err
}
- beego.Info("end upgrade:", v.name)
+ logs.Info("end upgrade:", v.name)
i++
}
}
- beego.Info("total success upgrade:", i, " migration")
+ logs.Info("total success upgrade:", i, " migration")
time.Sleep(2 * time.Second)
return nil
}
@@ -165,20 +199,20 @@ func Upgrade(lasttime int64) error {
// Rollback rollback the migration by the name
func Rollback(name string) error {
if v, ok := migrationMap[name]; ok {
- beego.Info("start rollback")
+ logs.Info("start rollback")
v.Reset()
v.Down()
err := v.Exec(name, "down")
if err != nil {
- beego.Error("execute error:", err)
+ logs.Error("execute error:", err)
time.Sleep(2 * time.Second)
return err
}
- beego.Info("end rollback")
+ logs.Info("end rollback")
time.Sleep(2 * time.Second)
return nil
}
- beego.Error("not exist the migrationMap name:" + name)
+ logs.Error("not exist the migrationMap name:" + name)
time.Sleep(2 * time.Second)
return errors.New("not exist the migrationMap name:" + name)
}
@@ -191,23 +225,23 @@ func Reset() error {
for j := len(sm) - 1; j >= 0; j-- {
v := sm[j]
if isRollBack(v.name) {
- beego.Info("skip the", v.name)
+ logs.Info("skip the", v.name)
time.Sleep(1 * time.Second)
continue
}
- beego.Info("start reset:", v.name)
+ logs.Info("start reset:", v.name)
v.m.Reset()
v.m.Down()
err := v.m.Exec(v.name, "down")
if err != nil {
- beego.Error("execute error:", err)
+ logs.Error("execute error:", err)
time.Sleep(2 * time.Second)
return err
}
i++
- beego.Info("end reset:", v.name)
+ logs.Info("end reset:", v.name)
}
- beego.Info("total success reset:", i, " migration")
+ logs.Info("total success reset:", i, " migration")
time.Sleep(2 * time.Second)
return nil
}
@@ -216,7 +250,7 @@ func Reset() error {
func Refresh() error {
err := Reset()
if err != nil {
- beego.Error("execute error:", err)
+ logs.Error("execute error:", err)
time.Sleep(2 * time.Second)
return err
}
@@ -265,7 +299,7 @@ func isRollBack(name string) bool {
var maps []orm.Params
num, err := o.Raw("select * from migrations where `name` = ? order by id_migration desc", name).Values(&maps)
if err != nil {
- beego.Info("get name has error", err)
+ logs.Info("get name has error", err)
return false
}
if num <= 0 {
diff --git a/src/vendor/github.com/astaxie/beego/mime.go b/src/vendor/github.com/astaxie/beego/mime.go
index e85fcb2ac..ca2878ab2 100644
--- a/src/vendor/github.com/astaxie/beego/mime.go
+++ b/src/vendor/github.com/astaxie/beego/mime.go
@@ -339,7 +339,7 @@ var mimemaps = map[string]string{
".pvu": "paleovu/x-pv",
".pwz": "application/vndms-powerpoint",
".py": "text/x-scriptphyton",
- ".pyc": "applicaiton/x-bytecodepython",
+ ".pyc": "application/x-bytecodepython",
".qcp": "audio/vndqcelp",
".qd3": "x-world/x-3dmf",
".qd3d": "x-world/x-3dmf",
diff --git a/src/vendor/github.com/astaxie/beego/namespace.go b/src/vendor/github.com/astaxie/beego/namespace.go
index 4007d44cc..72f22a720 100644
--- a/src/vendor/github.com/astaxie/beego/namespace.go
+++ b/src/vendor/github.com/astaxie/beego/namespace.go
@@ -44,7 +44,7 @@ func NewNamespace(prefix string, params ...LinkNamespace) *Namespace {
return ns
}
-// Cond set condtion function
+// Cond set condition function
// if cond return true can run this namespace, else can't
// usage:
// ns.Cond(func (ctx *context.Context) bool{
@@ -60,7 +60,7 @@ func (n *Namespace) Cond(cond namespaceCond) *Namespace {
exception("405", ctx)
}
}
- if v, ok := n.handlers.filters[BeforeRouter]; ok {
+ if v := n.handlers.filters[BeforeRouter]; len(v) > 0 {
mr := new(FilterRouter)
mr.tree = NewTree()
mr.pattern = "*"
@@ -267,13 +267,12 @@ func addPrefix(t *Tree, prefix string) {
addPrefix(t.wildcard, prefix)
}
for _, l := range t.leaves {
- if c, ok := l.runObject.(*controllerInfo); ok {
+ if c, ok := l.runObject.(*ControllerInfo); ok {
if !strings.HasPrefix(c.pattern, prefix) {
c.pattern = prefix + c.pattern
}
}
}
-
}
// NSCond is Namespace Condition
@@ -284,16 +283,16 @@ func NSCond(cond namespaceCond) LinkNamespace {
}
// NSBefore Namespace BeforeRouter filter
-func NSBefore(filiterList ...FilterFunc) LinkNamespace {
+func NSBefore(filterList ...FilterFunc) LinkNamespace {
return func(ns *Namespace) {
- ns.Filter("before", filiterList...)
+ ns.Filter("before", filterList...)
}
}
// NSAfter add Namespace FinishRouter filter
-func NSAfter(filiterList ...FilterFunc) LinkNamespace {
+func NSAfter(filterList ...FilterFunc) LinkNamespace {
return func(ns *Namespace) {
- ns.Filter("after", filiterList...)
+ ns.Filter("after", filterList...)
}
}
diff --git a/src/vendor/github.com/astaxie/beego/namespace_test.go b/src/vendor/github.com/astaxie/beego/namespace_test.go
index a92ae3efd..b3f20dff2 100644
--- a/src/vendor/github.com/astaxie/beego/namespace_test.go
+++ b/src/vendor/github.com/astaxie/beego/namespace_test.go
@@ -61,8 +61,8 @@ func TestNamespaceNest(t *testing.T) {
ns.Namespace(
NewNamespace("/admin").
Get("/order", func(ctx *context.Context) {
- ctx.Output.Body([]byte("order"))
- }),
+ ctx.Output.Body([]byte("order"))
+ }),
)
AddNamespace(ns)
BeeApp.Handlers.ServeHTTP(w, r)
@@ -79,8 +79,8 @@ func TestNamespaceNestParam(t *testing.T) {
ns.Namespace(
NewNamespace("/admin").
Get("/order/:id", func(ctx *context.Context) {
- ctx.Output.Body([]byte(ctx.Input.Param(":id")))
- }),
+ ctx.Output.Body([]byte(ctx.Input.Param(":id")))
+ }),
)
AddNamespace(ns)
BeeApp.Handlers.ServeHTTP(w, r)
@@ -124,8 +124,8 @@ func TestNamespaceFilter(t *testing.T) {
ctx.Output.Body([]byte("this is Filter"))
}).
Get("/user/:id", func(ctx *context.Context) {
- ctx.Output.Body([]byte(ctx.Input.Param(":id")))
- })
+ ctx.Output.Body([]byte(ctx.Input.Param(":id")))
+ })
AddNamespace(ns)
BeeApp.Handlers.ServeHTTP(w, r)
if w.Body.String() != "this is Filter" {
@@ -139,10 +139,7 @@ func TestNamespaceCond(t *testing.T) {
ns := NewNamespace("/v2")
ns.Cond(func(ctx *context.Context) bool {
- if ctx.Input.Domain() == "beego.me" {
- return true
- }
- return false
+ return ctx.Input.Domain() == "beego.me"
}).
AutoRouter(&TestController{})
AddNamespace(ns)
diff --git a/src/vendor/github.com/astaxie/beego/orm/cmd.go b/src/vendor/github.com/astaxie/beego/orm/cmd.go
index 3638a75cf..0ff4dc40d 100644
--- a/src/vendor/github.com/astaxie/beego/orm/cmd.go
+++ b/src/vendor/github.com/astaxie/beego/orm/cmd.go
@@ -150,7 +150,7 @@ func (d *commandSyncDb) Run() error {
}
for _, fi := range mi.fields.fieldsDB {
- if _, ok := columns[fi.column]; ok == false {
+ if _, ok := columns[fi.column]; !ok {
fields = append(fields, fi)
}
}
@@ -175,7 +175,7 @@ func (d *commandSyncDb) Run() error {
}
for _, idx := range indexes[mi.table] {
- if d.al.DbBaser.IndexExists(db, idx.Table, idx.Name) == false {
+ if !d.al.DbBaser.IndexExists(db, idx.Table, idx.Name) {
if !d.noInfo {
fmt.Printf("create index `%s` for table `%s`\n", idx.Name, idx.Table)
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/cmd_utils.go b/src/vendor/github.com/astaxie/beego/orm/cmd_utils.go
index da0ee8ab4..de47cb023 100644
--- a/src/vendor/github.com/astaxie/beego/orm/cmd_utils.go
+++ b/src/vendor/github.com/astaxie/beego/orm/cmd_utils.go
@@ -52,9 +52,15 @@ checkColumn:
case TypeBooleanField:
col = T["bool"]
case TypeCharField:
- col = fmt.Sprintf(T["string"], fieldSize)
+ if al.Driver == DRPostgres && fi.toText {
+ col = T["string-text"]
+ } else {
+ col = fmt.Sprintf(T["string"], fieldSize)
+ }
case TypeTextField:
col = T["string-text"]
+ case TypeTimeField:
+ col = T["time.Time-clock"]
case TypeDateField:
col = T["time.Time-date"]
case TypeDateTimeField:
@@ -83,11 +89,23 @@ checkColumn:
col = T["float64"]
case TypeDecimalField:
s := T["float64-decimal"]
- if strings.Index(s, "%d") == -1 {
+ if !strings.Contains(s, "%d") {
col = s
} else {
col = fmt.Sprintf(s, fi.digits, fi.decimals)
}
+ case TypeJSONField:
+ if al.Driver != DRPostgres {
+ fieldType = TypeCharField
+ goto checkColumn
+ }
+ col = T["json"]
+ case TypeJsonbField:
+ if al.Driver != DRPostgres {
+ fieldType = TypeCharField
+ goto checkColumn
+ }
+ col = T["jsonb"]
case RelForeignKey, RelOneToOne:
fieldType = fi.relModelInfo.fields.pk.fieldType
fieldSize = fi.relModelInfo.fields.pk.size
@@ -102,7 +120,7 @@ func getColumnAddQuery(al *alias, fi *fieldInfo) string {
Q := al.DbBaser.TableQuote()
typ := getColumnTyp(al, fi)
- if fi.null == false {
+ if !fi.null {
typ += " " + "NOT NULL"
}
@@ -154,7 +172,7 @@ func getDbCreateSQL(al *alias) (sqls []string, tableIndexes map[string][]dbIndex
} else {
column += col
- if fi.null == false {
+ if !fi.null {
column += " " + "NOT NULL"
}
@@ -174,7 +192,7 @@ func getDbCreateSQL(al *alias) (sqls []string, tableIndexes map[string][]dbIndex
}
}
- if strings.Index(column, "%COL%") != -1 {
+ if strings.Contains(column, "%COL%") {
column = strings.Replace(column, "%COL%", fi.column, -1)
}
@@ -264,7 +282,7 @@ func getColumnDefault(fi *fieldInfo) string {
// These defaults will be useful if there no config value orm:"default" and NOT NULL is on
switch fi.fieldType {
- case TypeDateField, TypeDateTimeField, TypeTextField:
+ case TypeTimeField, TypeDateField, TypeDateTimeField, TypeTextField:
return v
case TypeBitField, TypeSmallIntegerField, TypeIntegerField,
@@ -276,6 +294,8 @@ func getColumnDefault(fi *fieldInfo) string {
case TypeBooleanField:
t = " DEFAULT %s "
d = "FALSE"
+ case TypeJSONField, TypeJsonbField:
+ d = "{}"
}
if fi.colDefault {
diff --git a/src/vendor/github.com/astaxie/beego/orm/db.go b/src/vendor/github.com/astaxie/beego/orm/db.go
index 314c3535e..12f0f54d2 100644
--- a/src/vendor/github.com/astaxie/beego/orm/db.go
+++ b/src/vendor/github.com/astaxie/beego/orm/db.go
@@ -24,6 +24,7 @@ import (
)
const (
+ formatTime = "15:04:05"
formatDate = "2006-01-02"
formatDateTime = "2006-01-02 15:04:05"
)
@@ -47,6 +48,7 @@ var (
"lte": true,
"eq": true,
"nq": true,
+ "ne": true,
"startswith": true,
"endswith": true,
"istartswith": true,
@@ -71,12 +73,12 @@ type dbBase struct {
var _ dbBaser = new(dbBase)
// get struct columns values as interface slice.
-func (d *dbBase) collectValues(mi *modelInfo, ind reflect.Value, cols []string, skipAuto bool, insert bool, names *[]string, tz *time.Location) (values []interface{}, err error) {
- var columns []string
-
- if names != nil {
- columns = *names
+func (d *dbBase) collectValues(mi *modelInfo, ind reflect.Value, cols []string, skipAuto bool, insert bool, names *[]string, tz *time.Location) (values []interface{}, autoFields []string, err error) {
+ if names == nil {
+ ns := make([]string, 0, len(cols))
+ names = &ns
}
+ values = make([]interface{}, 0, len(cols))
for _, column := range cols {
var fi *fieldInfo
@@ -85,23 +87,29 @@ func (d *dbBase) collectValues(mi *modelInfo, ind reflect.Value, cols []string,
} else {
panic(fmt.Errorf("wrong db field/column name `%s` for model `%s`", column, mi.fullName))
}
- if fi.dbcol == false || fi.auto && skipAuto {
+ if !fi.dbcol || fi.auto && skipAuto {
continue
}
value, err := d.collectFieldValue(mi, fi, ind, insert, tz)
if err != nil {
- return nil, err
+ return nil, nil, err
}
- if names != nil {
- columns = append(columns, column)
+ // ignore empty value auto field
+ if insert && fi.auto {
+ if fi.fieldType&IsPositiveIntegerField > 0 {
+ if vu, ok := value.(uint64); !ok || vu == 0 {
+ continue
+ }
+ } else {
+ if vu, ok := value.(int64); !ok || vu == 0 {
+ continue
+ }
+ }
+ autoFields = append(autoFields, fi.column)
}
- values = append(values, value)
- }
-
- if names != nil {
- *names = columns
+ *names, values = append(*names, column), append(values, value)
}
return
@@ -134,7 +142,7 @@ func (d *dbBase) collectFieldValue(mi *modelInfo, fi *fieldInfo, ind reflect.Val
} else {
value = field.Bool()
}
- case TypeCharField, TypeTextField:
+ case TypeCharField, TypeTextField, TypeJSONField, TypeJsonbField:
if ns, ok := field.Interface().(sql.NullString); ok {
value = nil
if ns.Valid {
@@ -169,7 +177,7 @@ func (d *dbBase) collectFieldValue(mi *modelInfo, fi *fieldInfo, ind reflect.Val
value = field.Float()
}
}
- case TypeDateField, TypeDateTimeField:
+ case TypeTimeField, TypeDateField, TypeDateTimeField:
value = field.Interface()
if t, ok := value.(time.Time); ok {
d.ins.TimeToDB(&t, tz)
@@ -181,7 +189,7 @@ func (d *dbBase) collectFieldValue(mi *modelInfo, fi *fieldInfo, ind reflect.Val
}
default:
switch {
- case fi.fieldType&IsPostiveIntegerField > 0:
+ case fi.fieldType&IsPositiveIntegerField > 0:
if field.Kind() == reflect.Ptr {
if field.IsNil() {
value = nil
@@ -216,14 +224,14 @@ func (d *dbBase) collectFieldValue(mi *modelInfo, fi *fieldInfo, ind reflect.Val
value = nil
}
}
- if fi.null == false && value == nil {
+ if !fi.null && value == nil {
return nil, fmt.Errorf("field `%s` cannot be NULL", fi.fullName)
}
}
}
}
switch fi.fieldType {
- case TypeDateField, TypeDateTimeField:
+ case TypeTimeField, TypeDateField, TypeDateTimeField:
if fi.autoNow || fi.autoNowAdd && insert {
if insert {
if t, ok := value.(time.Time); ok && !t.IsZero() {
@@ -236,10 +244,21 @@ func (d *dbBase) collectFieldValue(mi *modelInfo, fi *fieldInfo, ind reflect.Val
if fi.isFielder {
f := field.Addr().Interface().(Fielder)
f.SetRaw(tnow.In(DefaultTimeLoc))
+ } else if field.Kind() == reflect.Ptr {
+ v := tnow.In(DefaultTimeLoc)
+ field.Set(reflect.ValueOf(&v))
} else {
field.Set(reflect.ValueOf(tnow.In(DefaultTimeLoc)))
}
}
+ case TypeJSONField, TypeJsonbField:
+ if s, ok := value.(string); (ok && len(s) == 0) || value == nil {
+ if fi.colDefault && fi.initial.Exist() {
+ value = fi.initial.String()
+ } else {
+ value = nil
+ }
+ }
}
}
return value, nil
@@ -252,7 +271,7 @@ func (d *dbBase) PrepareInsert(q dbQuerier, mi *modelInfo) (stmtQuerier, string,
dbcols := make([]string, 0, len(mi.fields.dbcols))
marks := make([]string, 0, len(mi.fields.dbcols))
for _, fi := range mi.fields.fieldsDB {
- if fi.auto == false {
+ if !fi.auto {
dbcols = append(dbcols, fi.column)
marks = append(marks, "?")
}
@@ -273,7 +292,7 @@ func (d *dbBase) PrepareInsert(q dbQuerier, mi *modelInfo) (stmtQuerier, string,
// insert struct with prepared statement and given struct reflect value.
func (d *dbBase) InsertStmt(stmt stmtQuerier, mi *modelInfo, ind reflect.Value, tz *time.Location) (int64, error) {
- values, err := d.collectValues(mi, ind, mi.fields.dbcols, true, true, nil, tz)
+ values, _, err := d.collectValues(mi, ind, mi.fields.dbcols, true, true, nil, tz)
if err != nil {
return 0, err
}
@@ -292,7 +311,7 @@ func (d *dbBase) InsertStmt(stmt stmtQuerier, mi *modelInfo, ind reflect.Value,
}
// query sql ,read records and persist in dbBaser.
-func (d *dbBase) Read(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.Location, cols []string) error {
+func (d *dbBase) Read(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.Location, cols []string, isForUpdate bool) error {
var whereCols []string
var args []interface{}
@@ -300,14 +319,14 @@ func (d *dbBase) Read(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.Lo
if len(cols) > 0 {
var err error
whereCols = make([]string, 0, len(cols))
- args, err = d.collectValues(mi, ind, cols, false, false, &whereCols, tz)
+ args, _, err = d.collectValues(mi, ind, cols, false, false, &whereCols, tz)
if err != nil {
return err
}
} else {
// default use pk value as where condtion.
pkColumn, pkValue, ok := getExistPk(mi, ind)
- if ok == false {
+ if !ok {
return ErrMissPK
}
whereCols = []string{pkColumn}
@@ -323,7 +342,12 @@ func (d *dbBase) Read(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.Lo
sep = fmt.Sprintf("%s = ? AND %s", Q, Q)
wheres := strings.Join(whereCols, sep)
- query := fmt.Sprintf("SELECT %s%s%s FROM %s%s%s WHERE %s%s%s = ?", Q, sels, Q, Q, mi.table, Q, Q, wheres, Q)
+ forUpdate := ""
+ if isForUpdate {
+ forUpdate = "FOR UPDATE"
+ }
+
+ query := fmt.Sprintf("SELECT %s%s%s FROM %s%s%s WHERE %s%s%s = ? %s", Q, sels, Q, Q, mi.table, Q, Q, wheres, Q, forUpdate)
refs := make([]interface{}, colsNum)
for i := range refs {
@@ -349,13 +373,21 @@ func (d *dbBase) Read(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.Lo
// execute insert sql dbQuerier with given struct reflect.Value.
func (d *dbBase) Insert(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.Location) (int64, error) {
- names := make([]string, 0, len(mi.fields.dbcols)-1)
- values, err := d.collectValues(mi, ind, mi.fields.dbcols, true, true, &names, tz)
+ names := make([]string, 0, len(mi.fields.dbcols))
+ values, autoFields, err := d.collectValues(mi, ind, mi.fields.dbcols, false, true, &names, tz)
if err != nil {
return 0, err
}
- return d.InsertValue(q, mi, false, names, values)
+ id, err := d.InsertValue(q, mi, false, names, values)
+ if err != nil {
+ return 0, err
+ }
+
+ if len(autoFields) > 0 {
+ err = d.ins.setval(q, mi, autoFields)
+ }
+ return id, err
}
// multi-insert sql with given slice struct reflect.Value.
@@ -369,7 +401,7 @@ func (d *dbBase) InsertMulti(q dbQuerier, mi *modelInfo, sind reflect.Value, bul
// typ := reflect.Indirect(mi.addrField).Type()
- length := sind.Len()
+ length, autoFields := sind.Len(), make([]string, 0, 1)
for i := 1; i <= length; i++ {
@@ -381,16 +413,18 @@ func (d *dbBase) InsertMulti(q dbQuerier, mi *modelInfo, sind reflect.Value, bul
// }
if i == 1 {
- vus, err := d.collectValues(mi, ind, mi.fields.dbcols, true, true, &names, tz)
+ var (
+ vus []interface{}
+ err error
+ )
+ vus, autoFields, err = d.collectValues(mi, ind, mi.fields.dbcols, false, true, &names, tz)
if err != nil {
return cnt, err
}
values = make([]interface{}, bulk*len(vus))
nums += copy(values, vus)
-
} else {
-
- vus, err := d.collectValues(mi, ind, mi.fields.dbcols, true, true, nil, tz)
+ vus, _, err := d.collectValues(mi, ind, mi.fields.dbcols, false, true, nil, tz)
if err != nil {
return cnt, err
}
@@ -412,7 +446,12 @@ func (d *dbBase) InsertMulti(q dbQuerier, mi *modelInfo, sind reflect.Value, bul
}
}
- return cnt, nil
+ var err error
+ if len(autoFields) > 0 {
+ err = d.ins.setval(q, mi, autoFields)
+ }
+
+ return cnt, err
}
// execute insert sql with given struct and given values.
@@ -455,10 +494,113 @@ func (d *dbBase) InsertValue(q dbQuerier, mi *modelInfo, isMulti bool, names []s
return id, err
}
+// InsertOrUpdate a row
+// If your primary key or unique column conflict will update
+// If no will insert
+func (d *dbBase) InsertOrUpdate(q dbQuerier, mi *modelInfo, ind reflect.Value, a *alias, args ...string) (int64, error) {
+ args0 := ""
+ iouStr := ""
+ argsMap := map[string]string{}
+ switch a.Driver {
+ case DRMySQL:
+ iouStr = "ON DUPLICATE KEY UPDATE"
+ case DRPostgres:
+ if len(args) == 0 {
+ return 0, fmt.Errorf("`%s` use InsertOrUpdate must have a conflict column", a.DriverName)
+ }
+ args0 = strings.ToLower(args[0])
+ iouStr = fmt.Sprintf("ON CONFLICT (%s) DO UPDATE SET", args0)
+ default:
+ return 0, fmt.Errorf("`%s` nonsupport InsertOrUpdate in beego", a.DriverName)
+ }
+
+ //Get on the key-value pairs
+ for _, v := range args {
+ kv := strings.Split(v, "=")
+ if len(kv) == 2 {
+ argsMap[strings.ToLower(kv[0])] = kv[1]
+ }
+ }
+
+ isMulti := false
+ names := make([]string, 0, len(mi.fields.dbcols)-1)
+ Q := d.ins.TableQuote()
+ values, _, err := d.collectValues(mi, ind, mi.fields.dbcols, true, true, &names, a.TZ)
+
+ if err != nil {
+ return 0, err
+ }
+
+ marks := make([]string, len(names))
+ updateValues := make([]interface{}, 0)
+ updates := make([]string, len(names))
+ var conflitValue interface{}
+ for i, v := range names {
+ marks[i] = "?"
+ valueStr := argsMap[strings.ToLower(v)]
+ if v == args0 {
+ conflitValue = values[i]
+ }
+ if valueStr != "" {
+ switch a.Driver {
+ case DRMySQL:
+ updates[i] = v + "=" + valueStr
+ case DRPostgres:
+ if conflitValue != nil {
+ //postgres ON CONFLICT DO UPDATE SET can`t use colu=colu+values
+ updates[i] = fmt.Sprintf("%s=(select %s from %s where %s = ? )", v, valueStr, mi.table, args0)
+ updateValues = append(updateValues, conflitValue)
+ } else {
+ return 0, fmt.Errorf("`%s` must be in front of `%s` in your struct", args0, v)
+ }
+ }
+ } else {
+ updates[i] = v + "=?"
+ updateValues = append(updateValues, values[i])
+ }
+ }
+
+ values = append(values, updateValues...)
+
+ sep := fmt.Sprintf("%s, %s", Q, Q)
+ qmarks := strings.Join(marks, ", ")
+ qupdates := strings.Join(updates, ", ")
+ columns := strings.Join(names, sep)
+
+ multi := len(values) / len(names)
+
+ if isMulti {
+ qmarks = strings.Repeat(qmarks+"), (", multi-1) + qmarks
+ }
+ //conflitValue maybe is a int,can`t use fmt.Sprintf
+ query := fmt.Sprintf("INSERT INTO %s%s%s (%s%s%s) VALUES (%s) %s "+qupdates, Q, mi.table, Q, Q, columns, Q, qmarks, iouStr)
+
+ d.ins.ReplaceMarks(&query)
+
+ if isMulti || !d.ins.HasReturningID(mi, &query) {
+ res, err := q.Exec(query, values...)
+ if err == nil {
+ if isMulti {
+ return res.RowsAffected()
+ }
+ return res.LastInsertId()
+ }
+ return 0, err
+ }
+
+ row := q.QueryRow(query, values...)
+ var id int64
+ err = row.Scan(&id)
+ if err != nil && err.Error() == `pq: syntax error at or near "ON"` {
+ err = fmt.Errorf("postgres version must 9.5 or higher")
+ }
+ return id, err
+}
+
// execute update sql dbQuerier with given struct reflect.Value.
func (d *dbBase) Update(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.Location, cols []string) (int64, error) {
pkName, pkValue, ok := getExistPk(mi, ind)
- if ok == false {
+ if !ok {
return 0, ErrMissPK
}
@@ -472,7 +614,7 @@ func (d *dbBase) Update(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.
setNames = make([]string, 0, len(cols))
}
- setValues, err := d.collectValues(mi, ind, cols, true, false, &setNames, tz)
+ setValues, _, err := d.collectValues(mi, ind, cols, true, false, &setNames, tz)
if err != nil {
return 0, err
}
@@ -497,18 +639,36 @@ func (d *dbBase) Update(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.
// execute delete sql dbQuerier with given struct reflect.Value.
// delete index is pk.
-func (d *dbBase) Delete(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.Location) (int64, error) {
- pkName, pkValue, ok := getExistPk(mi, ind)
- if ok == false {
- return 0, ErrMissPK
+func (d *dbBase) Delete(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.Location, cols []string) (int64, error) {
+ var whereCols []string
+ var args []interface{}
+ // if specify cols length > 0, then use it for where condition.
+ if len(cols) > 0 {
+ var err error
+ whereCols = make([]string, 0, len(cols))
+ args, _, err = d.collectValues(mi, ind, cols, false, false, &whereCols, tz)
+ if err != nil {
+ return 0, err
+ }
+ } else {
+ // default use pk value as where condtion.
+ pkColumn, pkValue, ok := getExistPk(mi, ind)
+ if !ok {
+ return 0, ErrMissPK
+ }
+ whereCols = []string{pkColumn}
+ args = append(args, pkValue)
}
Q := d.ins.TableQuote()
- query := fmt.Sprintf("DELETE FROM %s%s%s WHERE %s%s%s = ?", Q, mi.table, Q, Q, pkName, Q)
+ sep := fmt.Sprintf("%s = ? AND %s", Q, Q)
+ wheres := strings.Join(whereCols, sep)
+
+ query := fmt.Sprintf("DELETE FROM %s%s%s WHERE %s%s%s = ?", Q, mi.table, Q, Q, wheres, Q)
d.ins.ReplaceMarks(&query)
- res, err := q.Exec(query, pkValue)
+ res, err := q.Exec(query, args...)
if err == nil {
num, err := res.RowsAffected()
if err != nil {
@@ -516,13 +676,13 @@ func (d *dbBase) Delete(q dbQuerier, mi *modelInfo, ind reflect.Value, tz *time.
}
if num > 0 {
if mi.fields.pk.auto {
- if mi.fields.pk.fieldType&IsPostiveIntegerField > 0 {
+ if mi.fields.pk.fieldType&IsPositiveIntegerField > 0 {
ind.FieldByIndex(mi.fields.pk.fieldIndex).SetUint(0)
} else {
ind.FieldByIndex(mi.fields.pk.fieldIndex).SetInt(0)
}
}
- err := d.deleteRels(q, mi, []interface{}{pkValue}, tz)
+ err := d.deleteRels(q, mi, args, tz)
if err != nil {
return num, err
}
@@ -538,7 +698,7 @@ func (d *dbBase) UpdateBatch(q dbQuerier, qs *querySet, mi *modelInfo, cond *Con
columns := make([]string, 0, len(params))
values := make([]interface{}, 0, len(params))
for col, val := range params {
- if fi, ok := mi.fields.GetByAny(col); ok == false || fi.dbcol == false {
+ if fi, ok := mi.fields.GetByAny(col); !ok || !fi.dbcol {
panic(fmt.Errorf("wrong field/column name `%s`", col))
} else {
columns = append(columns, fi.column)
@@ -673,7 +833,11 @@ func (d *dbBase) DeleteBatch(q dbQuerier, qs *querySet, mi *modelInfo, cond *Con
if err := rs.Scan(&ref); err != nil {
return 0, err
}
- args = append(args, reflect.ValueOf(ref).Interface())
+ pkValue, err := d.convertValueFromDB(mi.fields.pk, reflect.ValueOf(ref).Interface(), tz)
+ if err != nil {
+ return 0, err
+ }
+ args = append(args, pkValue)
cnt++
}
@@ -768,7 +932,7 @@ func (d *dbBase) ReadBatch(q dbQuerier, qs *querySet, mi *modelInfo, cond *Condi
if hasRel {
for _, fi := range mi.fields.fieldsDB {
if fi.fieldType&IsRelField > 0 {
- if maps[fi.column] == false {
+ if !maps[fi.column] {
tCols = append(tCols, fi.column)
}
}
@@ -826,7 +990,7 @@ func (d *dbBase) ReadBatch(q dbQuerier, qs *querySet, mi *modelInfo, cond *Condi
var cnt int64
for rs.Next() {
- if one && cnt == 0 || one == false {
+ if one && cnt == 0 || !one {
if err := rs.Scan(refs...); err != nil {
return 0, err
}
@@ -906,7 +1070,7 @@ func (d *dbBase) ReadBatch(q dbQuerier, qs *querySet, mi *modelInfo, cond *Condi
cnt++
}
- if one == false {
+ if !one {
if cnt > 0 {
ind.Set(slice)
} else {
@@ -927,12 +1091,17 @@ func (d *dbBase) Count(q dbQuerier, qs *querySet, mi *modelInfo, cond *Condition
tables.parseRelated(qs.related, qs.relDepth)
where, args := tables.getCondSQL(cond, false, tz)
+ groupBy := tables.getGroupSQL(qs.groups)
tables.getOrderSQL(qs.orders)
join := tables.getJoinSQL()
Q := d.ins.TableQuote()
- query := fmt.Sprintf("SELECT COUNT(*) FROM %s%s%s T0 %s%s", Q, mi.table, Q, join, where)
+ query := fmt.Sprintf("SELECT COUNT(*) FROM %s%s%s T0 %s%s%s", Q, mi.table, Q, join, where, groupBy)
+
+ if groupBy != "" {
+ query = fmt.Sprintf("SELECT COUNT(*) FROM (%s) AS T", query)
+ }
d.ins.ReplaceMarks(&query)
@@ -944,7 +1113,7 @@ func (d *dbBase) Count(q dbQuerier, qs *querySet, mi *modelInfo, cond *Condition
// generate sql with replacing operator string placeholders and replaced values.
func (d *dbBase) GenerateOperatorSQL(mi *modelInfo, fi *fieldInfo, operator string, args []interface{}, tz *time.Location) (string, []interface{}) {
- sql := ""
+ var sql string
params := getFlatParams(fi, args, tz)
if len(params) == 0 {
@@ -1071,13 +1240,13 @@ setValue:
}
value = b
}
- case fieldType == TypeCharField || fieldType == TypeTextField:
+ case fieldType == TypeCharField || fieldType == TypeTextField || fieldType == TypeJSONField || fieldType == TypeJsonbField:
if str == nil {
value = ToStr(val)
} else {
value = str.String()
}
- case fieldType == TypeDateField || fieldType == TypeDateTimeField:
+ case fieldType == TypeTimeField || fieldType == TypeDateField || fieldType == TypeDateTimeField:
if str == nil {
switch t := val.(type) {
case time.Time:
@@ -1097,15 +1266,20 @@ setValue:
if len(s) >= 19 {
s = s[:19]
t, err = time.ParseInLocation(formatDateTime, s, tz)
- } else {
+ } else if len(s) >= 10 {
if len(s) > 10 {
s = s[:10]
}
t, err = time.ParseInLocation(formatDate, s, tz)
+ } else if len(s) >= 8 {
+ if len(s) > 8 {
+ s = s[:8]
+ }
+ t, err = time.ParseInLocation(formatTime, s, tz)
}
t = t.In(DefaultTimeLoc)
- if err != nil && s != "0000-00-00" && s != "0000-00-00 00:00:00" {
+ if err != nil && s != "00:00:00" && s != "0000-00-00" && s != "0000-00-00 00:00:00" {
tErr = err
goto end
}
@@ -1140,7 +1314,7 @@ setValue:
tErr = err
goto end
}
- if fieldType&IsPostiveIntegerField > 0 {
+ if fieldType&IsPositiveIntegerField > 0 {
v, _ := str.Uint64()
value = v
} else {
@@ -1186,7 +1360,7 @@ end:
func (d *dbBase) setFieldValue(fi *fieldInfo, value interface{}, field reflect.Value) (interface{}, error) {
fieldType := fi.fieldType
- isNative := fi.isFielder == false
+ isNative := !fi.isFielder
setValue:
switch {
@@ -1212,7 +1386,7 @@ setValue:
field.SetBool(value.(bool))
}
}
- case fieldType == TypeCharField || fieldType == TypeTextField:
+ case fieldType == TypeCharField || fieldType == TypeTextField || fieldType == TypeJSONField || fieldType == TypeJsonbField:
if isNative {
if ns, ok := field.Interface().(sql.NullString); ok {
if value == nil {
@@ -1234,12 +1408,18 @@ setValue:
field.SetString(value.(string))
}
}
- case fieldType == TypeDateField || fieldType == TypeDateTimeField:
+ case fieldType == TypeTimeField || fieldType == TypeDateField || fieldType == TypeDateTimeField:
if isNative {
if value == nil {
value = time.Time{}
+ } else if field.Kind() == reflect.Ptr {
+ if value != nil {
+ v := value.(time.Time)
+ field.Set(reflect.ValueOf(&v))
+ }
+ } else {
+ field.Set(reflect.ValueOf(value))
}
- field.Set(reflect.ValueOf(value))
}
case fieldType == TypePositiveBitField && field.Kind() == reflect.Ptr:
if value != nil {
@@ -1292,7 +1472,7 @@ setValue:
field.Set(reflect.ValueOf(&v))
}
case fieldType&IsIntegerField > 0:
- if fieldType&IsPostiveIntegerField > 0 {
+ if fieldType&IsPositiveIntegerField > 0 {
if isNative {
if value == nil {
value = uint64(0)
@@ -1356,7 +1536,7 @@ setValue:
}
}
- if isNative == false {
+ if !isNative {
fd := field.Addr().Interface().(Fielder)
err := fd.SetRaw(value)
if err != nil {
@@ -1417,7 +1597,7 @@ func (d *dbBase) ReadValues(q dbQuerier, qs *querySet, mi *modelInfo, cond *Cond
infos = make([]*fieldInfo, 0, len(exprs))
for _, ex := range exprs {
index, name, fi, suc := tables.parseExprs(mi, strings.Split(ex, ExprSep))
- if suc == false {
+ if !suc {
panic(fmt.Errorf("unknown field/column name `%s`", ex))
}
cols = append(cols, fmt.Sprintf("%s.%s%s%s %s%s%s", index, Q, fi.column, Q, Q, name, Q))
@@ -1440,7 +1620,11 @@ func (d *dbBase) ReadValues(q dbQuerier, qs *querySet, mi *modelInfo, cond *Cond
sels := strings.Join(cols, ", ")
- query := fmt.Sprintf("SELECT %s FROM %s%s%s T0 %s%s%s%s%s", sels, Q, mi.table, Q, join, where, groupBy, orderBy, limit)
+ sqlSelect := "SELECT"
+ if qs.distinct {
+ sqlSelect += " DISTINCT"
+ }
+ query := fmt.Sprintf("%s %s FROM %s%s%s T0 %s%s%s%s%s", sqlSelect, sels, Q, mi.table, Q, join, where, groupBy, orderBy, limit)
d.ins.ReplaceMarks(&query)
@@ -1552,7 +1736,7 @@ func (d *dbBase) TableQuote() string {
return "`"
}
-// replace value placeholer in parametered sql string.
+// replace value placeholder in parametered sql string.
func (d *dbBase) ReplaceMarks(query *string) {
// default use `?` as mark, do nothing
}
@@ -1562,6 +1746,11 @@ func (d *dbBase) HasReturningID(*modelInfo, *string) bool {
return false
}
+// sync auto key
+func (d *dbBase) setval(db dbQuerier, mi *modelInfo, autoFields []string) error {
+ return nil
+}
+
// convert time from db.
func (d *dbBase) TimeFromDB(t *time.Time, tz *time.Location) {
*t = t.In(tz)
diff --git a/src/vendor/github.com/astaxie/beego/orm/db_alias.go b/src/vendor/github.com/astaxie/beego/orm/db_alias.go
index b6c833a71..c70892392 100644
--- a/src/vendor/github.com/astaxie/beego/orm/db_alias.go
+++ b/src/vendor/github.com/astaxie/beego/orm/db_alias.go
@@ -60,6 +60,8 @@ var (
"sqlite3": DRSqlite,
"tidb": DRTiDB,
"oracle": DROracle,
+ "oci8": DROracle, // github.com/mattn/go-oci8
+ "ora": DROracle, //https://github.com/rana/ora
}
dbBasers = map[DriverType]dbBaser{
DRMySQL: newdbBaseMysql(),
@@ -80,7 +82,7 @@ type _dbCache struct {
func (ac *_dbCache) add(name string, al *alias) (added bool) {
ac.mux.Lock()
defer ac.mux.Unlock()
- if _, ok := ac.cache[name]; ok == false {
+ if _, ok := ac.cache[name]; !ok {
ac.cache[name] = al
added = true
}
@@ -186,7 +188,7 @@ func addAliasWthDB(aliasName, driverName string, db *sql.DB) (*alias, error) {
return nil, fmt.Errorf("register db Ping `%s`, %s", aliasName, err.Error())
}
- if dataBaseCache.add(aliasName, al) == false {
+ if !dataBaseCache.add(aliasName, al) {
return nil, fmt.Errorf("DataBase alias name `%s` already registered, cannot reuse", aliasName)
}
@@ -244,11 +246,11 @@ end:
// RegisterDriver Register a database driver use specify driver name, this can be definition the driver is which database type.
func RegisterDriver(driverName string, typ DriverType) error {
- if t, ok := drivers[driverName]; ok == false {
+ if t, ok := drivers[driverName]; !ok {
drivers[driverName] = typ
} else {
if t != typ {
- return fmt.Errorf("driverName `%s` db driver already registered and is other type\n", driverName)
+ return fmt.Errorf("driverName `%s` db driver already registered and is other type", driverName)
}
}
return nil
@@ -259,7 +261,7 @@ func SetDataBaseTZ(aliasName string, tz *time.Location) error {
if al, ok := dataBaseCache.get(aliasName); ok {
al.TZ = tz
} else {
- return fmt.Errorf("DataBase alias name `%s` not registered\n", aliasName)
+ return fmt.Errorf("DataBase alias name `%s` not registered", aliasName)
}
return nil
}
@@ -294,5 +296,5 @@ func GetDB(aliasNames ...string) (*sql.DB, error) {
if ok {
return al.DB, nil
}
- return nil, fmt.Errorf("DataBase of alias name `%s` not found\n", name)
+ return nil, fmt.Errorf("DataBase of alias name `%s` not found", name)
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/db_mysql.go b/src/vendor/github.com/astaxie/beego/orm/db_mysql.go
index 10fe26571..51185563f 100644
--- a/src/vendor/github.com/astaxie/beego/orm/db_mysql.go
+++ b/src/vendor/github.com/astaxie/beego/orm/db_mysql.go
@@ -16,6 +16,8 @@ package orm
import (
"fmt"
+ "reflect"
+ "strings"
)
// mysql operators.
@@ -96,6 +98,82 @@ func (d *dbBaseMysql) IndexExists(db dbQuerier, table string, name string) bool
return cnt > 0
}
+// InsertOrUpdate a row
+// If your primary key or unique column conflict will update
+// If no will insert
+// Add "`" for mysql sql building
+func (d *dbBaseMysql) InsertOrUpdate(q dbQuerier, mi *modelInfo, ind reflect.Value, a *alias, args ...string) (int64, error) {
+ var iouStr string
+ argsMap := map[string]string{}
+
+ iouStr = "ON DUPLICATE KEY UPDATE"
+
+ //Get on the key-value pairs
+ for _, v := range args {
+ kv := strings.Split(v, "=")
+ if len(kv) == 2 {
+ argsMap[strings.ToLower(kv[0])] = kv[1]
+ }
+ }
+
+ isMulti := false
+ names := make([]string, 0, len(mi.fields.dbcols)-1)
+ Q := d.ins.TableQuote()
+ values, _, err := d.collectValues(mi, ind, mi.fields.dbcols, true, true, &names, a.TZ)
+
+ if err != nil {
+ return 0, err
+ }
+
+ marks := make([]string, len(names))
+ updateValues := make([]interface{}, 0)
+ updates := make([]string, len(names))
+
+ for i, v := range names {
+ marks[i] = "?"
+ valueStr := argsMap[strings.ToLower(v)]
+ if valueStr != "" {
+ updates[i] = "`" + v + "`" + "=" + valueStr
+ } else {
+ updates[i] = "`" + v + "`" + "=?"
+ updateValues = append(updateValues, values[i])
+ }
+ }
+
+ values = append(values, updateValues...)
+
+ sep := fmt.Sprintf("%s, %s", Q, Q)
+ qmarks := strings.Join(marks, ", ")
+ qupdates := strings.Join(updates, ", ")
+ columns := strings.Join(names, sep)
+
+ multi := len(values) / len(names)
+
+ if isMulti {
+ qmarks = strings.Repeat(qmarks+"), (", multi-1) + qmarks
+ }
+ //conflitValue maybe is a int,can`t use fmt.Sprintf
+ query := fmt.Sprintf("INSERT INTO %s%s%s (%s%s%s) VALUES (%s) %s "+qupdates, Q, mi.table, Q, Q, columns, Q, qmarks, iouStr)
+
+ d.ins.ReplaceMarks(&query)
+
+ if isMulti || !d.ins.HasReturningID(mi, &query) {
+ res, err := q.Exec(query, values...)
+ if err == nil {
+ if isMulti {
+ return res.RowsAffected()
+ }
+ return res.LastInsertId()
+ }
+ return 0, err
+ }
+
+ row := q.QueryRow(query, values...)
+ var id int64
+ err = row.Scan(&id)
+ return id, err
+}
+
// create new mysql dbBaser.
func newdbBaseMysql() dbBaser {
b := new(dbBaseMysql)
diff --git a/src/vendor/github.com/astaxie/beego/orm/db_oracle.go b/src/vendor/github.com/astaxie/beego/orm/db_oracle.go
index deca36ad6..f5d6aaa26 100644
--- a/src/vendor/github.com/astaxie/beego/orm/db_oracle.go
+++ b/src/vendor/github.com/astaxie/beego/orm/db_oracle.go
@@ -94,3 +94,43 @@ func (d *dbBaseOracle) IndexExists(db dbQuerier, table string, name string) bool
row.Scan(&cnt)
return cnt > 0
}
+
+// execute insert sql with given struct and given values.
+// insert the given values, not the field values in struct.
+func (d *dbBaseOracle) InsertValue(q dbQuerier, mi *modelInfo, isMulti bool, names []string, values []interface{}) (int64, error) {
+ Q := d.ins.TableQuote()
+
+ marks := make([]string, len(names))
+ for i := range marks {
+ marks[i] = ":" + names[i]
+ }
+
+ sep := fmt.Sprintf("%s, %s", Q, Q)
+ qmarks := strings.Join(marks, ", ")
+ columns := strings.Join(names, sep)
+
+ multi := len(values) / len(names)
+
+ if isMulti {
+ qmarks = strings.Repeat(qmarks+"), (", multi-1) + qmarks
+ }
+
+ query := fmt.Sprintf("INSERT INTO %s%s%s (%s%s%s) VALUES (%s)", Q, mi.table, Q, Q, columns, Q, qmarks)
+
+ d.ins.ReplaceMarks(&query)
+
+ if isMulti || !d.ins.HasReturningID(mi, &query) {
+ res, err := q.Exec(query, values...)
+ if err == nil {
+ if isMulti {
+ return res.RowsAffected()
+ }
+ return res.LastInsertId()
+ }
+ return 0, err
+ }
+ row := q.QueryRow(query, values...)
+ var id int64
+ err := row.Scan(&id)
+ return id, err
+}
diff --git a/src/vendor/github.com/astaxie/beego/orm/db_postgres.go b/src/vendor/github.com/astaxie/beego/orm/db_postgres.go
index 7dbef95a2..e972c4a25 100644
--- a/src/vendor/github.com/astaxie/beego/orm/db_postgres.go
+++ b/src/vendor/github.com/astaxie/beego/orm/db_postgres.go
@@ -56,6 +56,8 @@ var postgresTypes = map[string]string{
"uint64": `bigint CHECK("%COL%" >= 0)`,
"float64": "double precision",
"float64-decimal": "numeric(%d, %d)",
+ "json": "json",
+ "jsonb": "jsonb",
}
// postgresql dbBaser.
@@ -123,14 +125,35 @@ func (d *dbBasePostgres) ReplaceMarks(query *string) {
}
// make returning sql support for postgresql.
-func (d *dbBasePostgres) HasReturningID(mi *modelInfo, query *string) (has bool) {
- if mi.fields.pk.auto {
- if query != nil {
- *query = fmt.Sprintf(`%s RETURNING "%s"`, *query, mi.fields.pk.column)
- }
- has = true
+func (d *dbBasePostgres) HasReturningID(mi *modelInfo, query *string) bool {
+ fi := mi.fields.pk
+ if fi.fieldType&IsPositiveIntegerField == 0 && fi.fieldType&IsIntegerField == 0 {
+ return false
}
- return
+
+ if query != nil {
+ *query = fmt.Sprintf(`%s RETURNING "%s"`, *query, fi.column)
+ }
+ return true
+}
+
+// sync auto key
+func (d *dbBasePostgres) setval(db dbQuerier, mi *modelInfo, autoFields []string) error {
+ if len(autoFields) == 0 {
+ return nil
+ }
+
+ Q := d.ins.TableQuote()
+ for _, name := range autoFields {
+ query := fmt.Sprintf("SELECT setval(pg_get_serial_sequence('%s', '%s'), (SELECT MAX(%s%s%s) FROM %s%s%s));",
+ mi.table, name,
+ Q, name, Q,
+ Q, mi.table, Q)
+ if _, err := db.Exec(query); err != nil {
+ return err
+ }
+ }
+ return nil
}
// show table sql for postgresql.
diff --git a/src/vendor/github.com/astaxie/beego/orm/db_sqlite.go b/src/vendor/github.com/astaxie/beego/orm/db_sqlite.go
index a3cb69a79..a43a5594c 100644
--- a/src/vendor/github.com/astaxie/beego/orm/db_sqlite.go
+++ b/src/vendor/github.com/astaxie/beego/orm/db_sqlite.go
@@ -134,7 +134,7 @@ func (d *dbBaseSqlite) IndexExists(db dbQuerier, table string, name string) bool
defer rows.Close()
for rows.Next() {
var tmp, index sql.NullString
- rows.Scan(&tmp, &index, &tmp)
+ rows.Scan(&tmp, &index, &tmp, &tmp, &tmp)
if name == index.String {
return true
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/db_tables.go b/src/vendor/github.com/astaxie/beego/orm/db_tables.go
index e4c74acee..42be5550e 100644
--- a/src/vendor/github.com/astaxie/beego/orm/db_tables.go
+++ b/src/vendor/github.com/astaxie/beego/orm/db_tables.go
@@ -63,7 +63,7 @@ func (t *dbTables) set(names []string, mi *modelInfo, fi *fieldInfo, inner bool)
// add table info to collection.
func (t *dbTables) add(names []string, mi *modelInfo, fi *fieldInfo, inner bool) (*dbTable, bool) {
name := strings.Join(names, ExprSep)
- if _, ok := t.tablesM[name]; ok == false {
+ if _, ok := t.tablesM[name]; !ok {
i := len(t.tables) + 1
jt := &dbTable{i, fmt.Sprintf("T%d", i), name, names, false, inner, mi, fi, nil}
t.tablesM[name] = jt
@@ -261,7 +261,7 @@ loopFor:
fiN, okN = mmi.fields.GetByAny(exprs[i+1])
}
- if isRel && (fi.mi.isThrough == false || num != i) {
+ if isRel && (!fi.mi.isThrough || num != i) {
if fi.null || t.skipEnd {
inner = false
}
@@ -364,7 +364,7 @@ func (t *dbTables) getCondSQL(cond *Condition, sub bool, tz *time.Location) (whe
}
index, _, fi, suc := t.parseExprs(mi, exprs)
- if suc == false {
+ if !suc {
panic(fmt.Errorf("unknown field/column name `%s`", strings.Join(p.exprs, ExprSep)))
}
@@ -383,7 +383,7 @@ func (t *dbTables) getCondSQL(cond *Condition, sub bool, tz *time.Location) (whe
}
}
- if sub == false && where != "" {
+ if !sub && where != "" {
where = "WHERE " + where
}
@@ -403,7 +403,7 @@ func (t *dbTables) getGroupSQL(groups []string) (groupSQL string) {
exprs := strings.Split(group, ExprSep)
index, _, fi, suc := t.parseExprs(t.mi, exprs)
- if suc == false {
+ if !suc {
panic(fmt.Errorf("unknown field/column name `%s`", strings.Join(exprs, ExprSep)))
}
@@ -432,7 +432,7 @@ func (t *dbTables) getOrderSQL(orders []string) (orderSQL string) {
exprs := strings.Split(order, ExprSep)
index, _, fi, suc := t.parseExprs(t.mi, exprs)
- if suc == false {
+ if !suc {
panic(fmt.Errorf("unknown field/column name `%s`", strings.Join(exprs, ExprSep)))
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/db_utils.go b/src/vendor/github.com/astaxie/beego/orm/db_utils.go
index c97caf361..7ae10ca5e 100644
--- a/src/vendor/github.com/astaxie/beego/orm/db_utils.go
+++ b/src/vendor/github.com/astaxie/beego/orm/db_utils.go
@@ -33,14 +33,16 @@ func getExistPk(mi *modelInfo, ind reflect.Value) (column string, value interfac
fi := mi.fields.pk
v := ind.FieldByIndex(fi.fieldIndex)
- if fi.fieldType&IsPostiveIntegerField > 0 {
+ if fi.fieldType&IsPositiveIntegerField > 0 {
vu := v.Uint()
exist = vu > 0
value = vu
} else if fi.fieldType&IsIntegerField > 0 {
vu := v.Int()
- exist = vu > 0
+ exist = true
value = vu
+ } else if fi.fieldType&IsRelField > 0 {
+ _, value, exist = getExistPk(fi.relModelInfo, reflect.Indirect(v))
} else {
vu := v.String()
exist = vu != ""
@@ -74,24 +76,32 @@ outFor:
case reflect.String:
v := val.String()
if fi != nil {
- if fi.fieldType == TypeDateField || fi.fieldType == TypeDateTimeField {
+ if fi.fieldType == TypeTimeField || fi.fieldType == TypeDateField || fi.fieldType == TypeDateTimeField {
var t time.Time
var err error
if len(v) >= 19 {
s := v[:19]
t, err = time.ParseInLocation(formatDateTime, s, DefaultTimeLoc)
- } else {
+ } else if len(v) >= 10 {
s := v
if len(v) > 10 {
s = v[:10]
}
t, err = time.ParseInLocation(formatDate, s, tz)
+ } else {
+ s := v
+ if len(s) > 8 {
+ s = v[:8]
+ }
+ t, err = time.ParseInLocation(formatTime, s, tz)
}
if err == nil {
if fi.fieldType == TypeDateField {
v = t.In(tz).Format(formatDate)
- } else {
+ } else if fi.fieldType == TypeDateTimeField {
v = t.In(tz).Format(formatDateTime)
+ } else {
+ v = t.In(tz).Format(formatTime)
}
}
}
@@ -137,6 +147,10 @@ outFor:
if v, ok := arg.(time.Time); ok {
if fi != nil && fi.fieldType == TypeDateField {
arg = v.In(tz).Format(formatDate)
+ } else if fi != nil && fi.fieldType == TypeDateTimeField {
+ arg = v.In(tz).Format(formatDateTime)
+ } else if fi != nil && fi.fieldType == TypeTimeField {
+ arg = v.In(tz).Format(formatTime)
} else {
arg = v.In(tz).Format(formatDateTime)
}
@@ -144,7 +158,7 @@ outFor:
typ := val.Type()
name := getFullName(typ)
var value interface{}
- if mmi, ok := modelCache.getByFN(name); ok {
+ if mmi, ok := modelCache.getByFullName(name); ok {
if _, vu, exist := getExistPk(mmi, val); exist {
value = vu
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/models.go b/src/vendor/github.com/astaxie/beego/orm/models.go
index faf551be2..1d5a4dc26 100644
--- a/src/vendor/github.com/astaxie/beego/orm/models.go
+++ b/src/vendor/github.com/astaxie/beego/orm/models.go
@@ -29,39 +29,18 @@ const (
var (
modelCache = &_modelCache{
- cache: make(map[string]*modelInfo),
- cacheByFN: make(map[string]*modelInfo),
- }
- supportTag = map[string]int{
- "-": 1,
- "null": 1,
- "index": 1,
- "unique": 1,
- "pk": 1,
- "auto": 1,
- "auto_now": 1,
- "auto_now_add": 1,
- "size": 2,
- "column": 2,
- "default": 2,
- "rel": 2,
- "reverse": 2,
- "rel_table": 2,
- "rel_through": 2,
- "digits": 2,
- "decimals": 2,
- "on_delete": 2,
- "type": 2,
+ cache: make(map[string]*modelInfo),
+ cacheByFullName: make(map[string]*modelInfo),
}
)
// model info collection
type _modelCache struct {
- sync.RWMutex
- orders []string
- cache map[string]*modelInfo
- cacheByFN map[string]*modelInfo
- done bool
+ sync.RWMutex // only used outsite for bootStrap
+ orders []string
+ cache map[string]*modelInfo
+ cacheByFullName map[string]*modelInfo
+ done bool
}
// get all model info
@@ -88,9 +67,9 @@ func (mc *_modelCache) get(table string) (mi *modelInfo, ok bool) {
return
}
-// get model info by field name
-func (mc *_modelCache) getByFN(name string) (mi *modelInfo, ok bool) {
- mi, ok = mc.cacheByFN[name]
+// get model info by full name
+func (mc *_modelCache) getByFullName(name string) (mi *modelInfo, ok bool) {
+ mi, ok = mc.cacheByFullName[name]
return
}
@@ -98,7 +77,7 @@ func (mc *_modelCache) getByFN(name string) (mi *modelInfo, ok bool) {
func (mc *_modelCache) set(table string, mi *modelInfo) *modelInfo {
mii := mc.cache[table]
mc.cache[table] = mi
- mc.cacheByFN[mi.fullName] = mi
+ mc.cacheByFullName[mi.fullName] = mi
if mii == nil {
mc.orders = append(mc.orders, table)
}
@@ -109,7 +88,7 @@ func (mc *_modelCache) set(table string, mi *modelInfo) *modelInfo {
func (mc *_modelCache) clean() {
mc.orders = make([]string, 0)
mc.cache = make(map[string]*modelInfo)
- mc.cacheByFN = make(map[string]*modelInfo)
+ mc.cacheByFullName = make(map[string]*modelInfo)
mc.done = false
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/models_boot.go b/src/vendor/github.com/astaxie/beego/orm/models_boot.go
index 3690557b8..5327f754c 100644
--- a/src/vendor/github.com/astaxie/beego/orm/models_boot.go
+++ b/src/vendor/github.com/astaxie/beego/orm/models_boot.go
@@ -15,7 +15,6 @@
package orm
import (
- "errors"
"fmt"
"os"
"reflect"
@@ -23,24 +22,34 @@ import (
)
// register models.
-// prefix means table name prefix.
-func registerModel(prefix string, model interface{}) {
+// PrefixOrSuffix means table name prefix or suffix.
+// isPrefix whether the prefix is prefix or suffix
+func registerModel(PrefixOrSuffix string, model interface{}, isPrefix bool) {
val := reflect.ValueOf(model)
- ind := reflect.Indirect(val)
- typ := ind.Type()
+ typ := reflect.Indirect(val).Type()
if val.Kind() != reflect.Ptr {
panic(fmt.Errorf(" cannot use non-ptr model struct `%s`", getFullName(typ)))
}
+ // For this case:
+ // u := &User{}
+ // registerModel(&u)
+ if typ.Kind() == reflect.Ptr {
+ panic(fmt.Errorf(" only allow ptr model struct, it looks you use two reference to the struct `%s`", typ))
+ }
table := getTableName(val)
- if prefix != "" {
- table = prefix + table
+ if PrefixOrSuffix != "" {
+ if isPrefix {
+ table = PrefixOrSuffix + table
+ } else {
+ table = table + PrefixOrSuffix
+ }
}
-
+ // models's fullname is pkgpath + struct name
name := getFullName(typ)
- if _, ok := modelCache.getByFN(name); ok {
+ if _, ok := modelCache.getByFullName(name); ok {
fmt.Printf(" model `%s` repeat register, must be unique\n", name)
os.Exit(2)
}
@@ -50,34 +59,34 @@ func registerModel(prefix string, model interface{}) {
os.Exit(2)
}
- info := newModelInfo(val)
- if info.fields.pk == nil {
+ mi := newModelInfo(val)
+ if mi.fields.pk == nil {
outFor:
- for _, fi := range info.fields.fieldsDB {
+ for _, fi := range mi.fields.fieldsDB {
if strings.ToLower(fi.name) == "id" {
switch fi.addrValue.Elem().Kind() {
case reflect.Int, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint32, reflect.Uint64:
fi.auto = true
fi.pk = true
- info.fields.pk = fi
+ mi.fields.pk = fi
break outFor
}
}
}
- if info.fields.pk == nil {
- fmt.Printf(" `%s` need a primary key field\n", name)
+ if mi.fields.pk == nil {
+ fmt.Printf(" `%s` needs a primary key field, default is to use 'id' if not set\n", name)
os.Exit(2)
}
}
- info.table = table
- info.pkg = typ.PkgPath()
- info.model = model
- info.manual = true
+ mi.table = table
+ mi.pkg = typ.PkgPath()
+ mi.model = model
+ mi.manual = true
- modelCache.set(table, info)
+ modelCache.set(table, mi)
}
// boostrap models
@@ -85,31 +94,30 @@ func bootStrap() {
if modelCache.done {
return
}
-
var (
err error
models map[string]*modelInfo
)
-
if dataBaseCache.getDefault() == nil {
err = fmt.Errorf("must have one register DataBase alias named `default`")
goto end
}
+ // set rel and reverse model
+ // RelManyToMany set the relTable
models = modelCache.all()
for _, mi := range models {
for _, fi := range mi.fields.columns {
if fi.rel || fi.reverse {
elm := fi.addrValue.Type().Elem()
- switch fi.fieldType {
- case RelReverseMany, RelManyToMany:
+ if fi.fieldType == RelReverseMany || fi.fieldType == RelManyToMany {
elm = elm.Elem()
}
-
+ // check the rel or reverse model already register
name := getFullName(elm)
- mii, ok := modelCache.getByFN(name)
- if ok == false || mii.pkg != elm.PkgPath() {
- err = fmt.Errorf("can not found rel in field `%s`, `%s` may be miss register", fi.fullName, elm.String())
+ mii, ok := modelCache.getByFullName(name)
+ if !ok || mii.pkg != elm.PkgPath() {
+ err = fmt.Errorf("can not find rel in field `%s`, `%s` may be miss register", fi.fullName, elm.String())
goto end
}
fi.relModelInfo = mii
@@ -117,20 +125,17 @@ func bootStrap() {
switch fi.fieldType {
case RelManyToMany:
if fi.relThrough != "" {
- msg := fmt.Sprintf("field `%s` wrong rel_through value `%s`", fi.fullName, fi.relThrough)
if i := strings.LastIndex(fi.relThrough, "."); i != -1 && len(fi.relThrough) > (i+1) {
pn := fi.relThrough[:i]
- rmi, ok := modelCache.getByFN(fi.relThrough)
- if ok == false || pn != rmi.pkg {
- err = errors.New(msg + " cannot find table")
+ rmi, ok := modelCache.getByFullName(fi.relThrough)
+ if !ok || pn != rmi.pkg {
+ err = fmt.Errorf("field `%s` wrong rel_through value `%s` cannot find table", fi.fullName, fi.relThrough)
goto end
}
-
fi.relThroughModelInfo = rmi
fi.relTable = rmi.table
-
} else {
- err = errors.New(msg)
+ err = fmt.Errorf("field `%s` wrong rel_through value `%s`", fi.fullName, fi.relThrough)
goto end
}
} else {
@@ -138,7 +143,6 @@ func bootStrap() {
if fi.relTable != "" {
i.table = fi.relTable
}
-
if v := modelCache.set(i.table, i); v != nil {
err = fmt.Errorf("the rel table name `%s` already registered, cannot be use, please change one", fi.relTable)
goto end
@@ -153,6 +157,8 @@ func bootStrap() {
}
}
+ // check the rel filed while the relModelInfo also has filed point to current model
+ // if not exist, add a new field to the relModelInfo
models = modelCache.all()
for _, mi := range models {
for _, fi := range mi.fields.fieldsRel {
@@ -165,8 +171,7 @@ func bootStrap() {
break
}
}
-
- if inModel == false {
+ if !inModel {
rmi := fi.relModelInfo
ffi := new(fieldInfo)
ffi.name = mi.name
@@ -180,7 +185,7 @@ func bootStrap() {
} else {
ffi.fieldType = RelReverseMany
}
- if rmi.fields.Add(ffi) == false {
+ if !rmi.fields.Add(ffi) {
added := false
for cnt := 0; cnt < 5; cnt++ {
ffi.name = fmt.Sprintf("%s%d", mi.name, cnt)
@@ -190,7 +195,7 @@ func bootStrap() {
break
}
}
- if added == false {
+ if !added {
panic(fmt.Errorf("cannot generate auto reverse field info `%s` to `%s`", fi.fullName, ffi.fullName))
}
}
@@ -216,7 +221,6 @@ func bootStrap() {
}
}
}
-
if fi.reverseFieldInfoTwo == nil {
err = fmt.Errorf("can not find m2m field for m2m model `%s`, ensure your m2m model defined correct",
fi.relThroughModelInfo.fullName)
@@ -244,7 +248,7 @@ func bootStrap() {
break mForA
}
}
- if found == false {
+ if !found {
err = fmt.Errorf("reverse field `%s` not found in model `%s`", fi.fullName, fi.relModelInfo.fullName)
goto end
}
@@ -263,7 +267,7 @@ func bootStrap() {
break mForB
}
}
- if found == false {
+ if !found {
mForC:
for _, ffi := range fi.relModelInfo.fields.fieldsByType[RelManyToMany] {
conditions := fi.relThrough != "" && fi.relThrough == ffi.relThrough ||
@@ -283,7 +287,7 @@ func bootStrap() {
}
}
}
- if found == false {
+ if !found {
err = fmt.Errorf("reverse field for `%s` not found in model `%s`", fi.fullName, fi.relModelInfo.fullName)
goto end
}
@@ -300,17 +304,31 @@ end:
// RegisterModel register models
func RegisterModel(models ...interface{}) {
+ if modelCache.done {
+ panic(fmt.Errorf("RegisterModel must be run before BootStrap"))
+ }
RegisterModelWithPrefix("", models...)
}
// RegisterModelWithPrefix register models with a prefix
func RegisterModelWithPrefix(prefix string, models ...interface{}) {
if modelCache.done {
- panic(fmt.Errorf("RegisterModel must be run before BootStrap"))
+ panic(fmt.Errorf("RegisterModelWithPrefix must be run before BootStrap"))
}
for _, model := range models {
- registerModel(prefix, model)
+ registerModel(prefix, model, true)
+ }
+}
+
+// RegisterModelWithSuffix register models with a suffix
+func RegisterModelWithSuffix(suffix string, models ...interface{}) {
+ if modelCache.done {
+ panic(fmt.Errorf("RegisterModelWithSuffix must be run before BootStrap"))
+ }
+
+ for _, model := range models {
+ registerModel(suffix, model, false)
}
}
@@ -320,7 +338,6 @@ func BootStrap() {
if modelCache.done {
return
}
-
modelCache.Lock()
defer modelCache.Unlock()
bootStrap()
diff --git a/src/vendor/github.com/astaxie/beego/orm/models_fields.go b/src/vendor/github.com/astaxie/beego/orm/models_fields.go
index a8cf8e4f6..578206009 100644
--- a/src/vendor/github.com/astaxie/beego/orm/models_fields.go
+++ b/src/vendor/github.com/astaxie/beego/orm/models_fields.go
@@ -25,6 +25,7 @@ const (
TypeBooleanField = 1 << iota
TypeCharField
TypeTextField
+ TypeTimeField
TypeDateField
TypeDateTimeField
TypeBitField
@@ -37,6 +38,8 @@ const (
TypePositiveBigIntegerField
TypeFloatField
TypeDecimalField
+ TypeJSONField
+ TypeJsonbField
RelForeignKey
RelOneToOne
RelManyToMany
@@ -46,10 +49,10 @@ const (
// Define some logic enum
const (
- IsIntegerField = ^-TypePositiveBigIntegerField >> 4 << 5
- IsPostiveIntegerField = ^-TypePositiveBigIntegerField >> 8 << 9
- IsRelField = ^-RelReverseMany >> 14 << 15
- IsFieldType = ^-RelReverseMany<<1 + 1
+ IsIntegerField = ^-TypePositiveBigIntegerField >> 5 << 6
+ IsPositiveIntegerField = ^-TypePositiveBigIntegerField >> 9 << 10
+ IsRelField = ^-RelReverseMany >> 17 << 18
+ IsFieldType = ^-RelReverseMany<<1 + 1
)
// BooleanField A true/false field.
@@ -145,6 +148,65 @@ func (e *CharField) RawValue() interface{} {
// verify CharField implement Fielder
var _ Fielder = new(CharField)
+// TimeField A time, represented in go by a time.Time instance.
+// only time values like 10:00:00
+// Has a few extra, optional attr tag:
+//
+// auto_now:
+// Automatically set the field to now every time the object is saved. Useful for “last-modified” timestamps.
+// Note that the current date is always used; it’s not just a default value that you can override.
+//
+// auto_now_add:
+// Automatically set the field to now when the object is first created. Useful for creation of timestamps.
+// Note that the current date is always used; it’s not just a default value that you can override.
+//
+// eg: `orm:"auto_now"` or `orm:"auto_now_add"`
+type TimeField time.Time
+
+// Value return the time.Time
+func (e TimeField) Value() time.Time {
+ return time.Time(e)
+}
+
+// Set set the TimeField's value
+func (e *TimeField) Set(d time.Time) {
+ *e = TimeField(d)
+}
+
+// String convert time to string
+func (e *TimeField) String() string {
+ return e.Value().String()
+}
+
+// FieldType return enum type Date
+func (e *TimeField) FieldType() int {
+ return TypeDateField
+}
+
+// SetRaw convert the interface to time.Time. Allow string and time.Time
+func (e *TimeField) SetRaw(value interface{}) error {
+ switch d := value.(type) {
+ case time.Time:
+ e.Set(d)
+ case string:
+ v, err := timeParse(d, formatTime)
+ if err != nil {
+ e.Set(v)
+ }
+ return err
+ default:
+ return fmt.Errorf(" unknown value `%s`", value)
+ }
+ return nil
+}
+
+// RawValue return time value
+func (e *TimeField) RawValue() interface{} {
+ return e.Value()
+}
+
+var _ Fielder = new(TimeField)
+
// DateField A date, represented in go by a time.Time instance.
// only date values like 2006-01-02
// Has a few extra, optional attr tag:
@@ -627,3 +689,87 @@ func (e *TextField) RawValue() interface{} {
// verify TextField implement Fielder
var _ Fielder = new(TextField)
+
+// JSONField postgres json field.
+type JSONField string
+
+// Value return JSONField value
+func (j JSONField) Value() string {
+ return string(j)
+}
+
+// Set the JSONField value
+func (j *JSONField) Set(d string) {
+ *j = JSONField(d)
+}
+
+// String convert JSONField to string
+func (j *JSONField) String() string {
+ return j.Value()
+}
+
+// FieldType return enum type
+func (j *JSONField) FieldType() int {
+ return TypeJSONField
+}
+
+// SetRaw convert interface string to string
+func (j *JSONField) SetRaw(value interface{}) error {
+ switch d := value.(type) {
+ case string:
+ j.Set(d)
+ default:
+ return fmt.Errorf(" unknown value `%s`", value)
+ }
+ return nil
+}
+
+// RawValue return JSONField value
+func (j *JSONField) RawValue() interface{} {
+ return j.Value()
+}
+
+// verify JSONField implement Fielder
+var _ Fielder = new(JSONField)
+
+// JsonbField postgres json field.
+type JsonbField string
+
+// Value return JsonbField value
+func (j JsonbField) Value() string {
+ return string(j)
+}
+
+// Set the JsonbField value
+func (j *JsonbField) Set(d string) {
+ *j = JsonbField(d)
+}
+
+// String convert JsonbField to string
+func (j *JsonbField) String() string {
+ return j.Value()
+}
+
+// FieldType return enum type
+func (j *JsonbField) FieldType() int {
+ return TypeJsonbField
+}
+
+// SetRaw convert interface string to string
+func (j *JsonbField) SetRaw(value interface{}) error {
+ switch d := value.(type) {
+ case string:
+ j.Set(d)
+ default:
+ return fmt.Errorf(" unknown value `%s`", value)
+ }
+ return nil
+}
+
+// RawValue return JsonbField value
+func (j *JsonbField) RawValue() interface{} {
+ return j.Value()
+}
+
+// verify JsonbField implement Fielder
+var _ Fielder = new(JsonbField)
diff --git a/src/vendor/github.com/astaxie/beego/orm/models_info_f.go b/src/vendor/github.com/astaxie/beego/orm/models_info_f.go
index 996a2f408..bbb7d71fe 100644
--- a/src/vendor/github.com/astaxie/beego/orm/models_info_f.go
+++ b/src/vendor/github.com/astaxie/beego/orm/models_info_f.go
@@ -47,7 +47,7 @@ func (f *fields) Add(fi *fieldInfo) (added bool) {
} else {
return
}
- if _, ok := f.fieldsByType[fi.fieldType]; ok == false {
+ if _, ok := f.fieldsByType[fi.fieldType]; !ok {
f.fieldsByType[fi.fieldType] = make([]*fieldInfo, 0)
}
f.fieldsByType[fi.fieldType] = append(f.fieldsByType[fi.fieldType], fi)
@@ -104,7 +104,7 @@ type fieldInfo struct {
mi *modelInfo
fieldIndex []int
fieldType int
- dbcol bool
+ dbcol bool // table column fk and onetoone
inModel bool
name string
fullName string
@@ -116,12 +116,13 @@ type fieldInfo struct {
null bool
index bool
unique bool
- colDefault bool
- initial StrTo
+ colDefault bool // whether has default tag
+ initial StrTo // store the default value
size int
+ toText bool
autoNow bool
autoNowAdd bool
- rel bool
+ rel bool // if type equal to RelForeignKey, RelOneToOne, RelManyToMany then true
reverse bool
reverseField string
reverseFieldInfo *fieldInfo
@@ -133,7 +134,7 @@ type fieldInfo struct {
relModelInfo *modelInfo
digits int
decimals int
- isFielder bool
+ isFielder bool // implement Fielder interface
onDelete string
}
@@ -142,7 +143,7 @@ func newFieldInfo(mi *modelInfo, field reflect.Value, sf reflect.StructField, mN
var (
tag string
tagValue string
- initial StrTo
+ initial StrTo // store the default value
fieldType int
attrs map[string]bool
tags map[string]string
@@ -151,6 +152,10 @@ func newFieldInfo(mi *modelInfo, field reflect.Value, sf reflect.StructField, mN
fi = new(fieldInfo)
+ // if field which CanAddr is the follow type
+ // A value is addressable if it is an element of a slice,
+ // an element of an addressable array, a field of an
+ // addressable struct, or the result of dereferencing a pointer.
addrField = field
if field.CanAddr() && field.Kind() != reflect.Ptr {
addrField = field.Addr()
@@ -161,7 +166,7 @@ func newFieldInfo(mi *modelInfo, field reflect.Value, sf reflect.StructField, mN
}
}
- parseStructTag(sf.Tag.Get(defaultStructTagName), &attrs, &tags)
+ attrs, tags = parseStructTag(sf.Tag.Get(defaultStructTagName))
if _, ok := attrs["-"]; ok {
return nil, errSkipField
@@ -187,7 +192,7 @@ checkType:
}
fieldType = f.FieldType()
if fieldType&IsRelField > 0 {
- err = fmt.Errorf("unsupport rel type custom field")
+ err = fmt.Errorf("unsupport type custom field, please refer to https://github.com/astaxie/beego/blob/master/orm/models_fields.go#L24-L42")
goto end
}
default:
@@ -210,7 +215,7 @@ checkType:
}
break checkType
default:
- err = fmt.Errorf("error")
+ err = fmt.Errorf("rel only allow these value: fk, one, m2m")
goto wrongTag
}
}
@@ -230,7 +235,7 @@ checkType:
}
break checkType
default:
- err = fmt.Errorf("error")
+ err = fmt.Errorf("reverse only allow these value: one, many")
goto wrongTag
}
}
@@ -239,8 +244,15 @@ checkType:
if err != nil {
goto end
}
- if fieldType == TypeCharField && tags["type"] == "text" {
- fieldType = TypeTextField
+ if fieldType == TypeCharField {
+ switch tags["type"] {
+ case "text":
+ fieldType = TypeTextField
+ case "json":
+ fieldType = TypeJSONField
+ case "jsonb":
+ fieldType = TypeJsonbField
+ }
}
if fieldType == TypeFloatField && (digits != "" || decimals != "") {
fieldType = TypeDecimalField
@@ -248,8 +260,14 @@ checkType:
if fieldType == TypeDateTimeField && tags["type"] == "date" {
fieldType = TypeDateField
}
+ if fieldType == TypeTimeField && tags["type"] == "time" {
+ fieldType = TypeTimeField
+ }
}
+ // check the rel and reverse type
+ // rel should Ptr
+ // reverse should slice []*struct
switch fieldType {
case RelForeignKey, RelOneToOne, RelReverseOne:
if field.Kind() != reflect.Ptr {
@@ -316,12 +334,12 @@ checkType:
switch onDelete {
case odCascade, odDoNothing:
case odSetDefault:
- if initial.Exist() == false {
+ if !initial.Exist() {
err = errors.New("on_delete: set_default need set field a default value")
goto end
}
case odSetNULL:
- if fi.null == false {
+ if !fi.null {
err = errors.New("on_delete: set_null need set field null")
goto end
}
@@ -339,7 +357,7 @@ checkType:
switch fieldType {
case TypeBooleanField:
- case TypeCharField:
+ case TypeCharField, TypeJSONField, TypeJsonbField:
if size != "" {
v, e := StrTo(size).Int32()
if e != nil {
@@ -349,11 +367,12 @@ checkType:
}
} else {
fi.size = 255
+ fi.toText = true
}
case TypeTextField:
fi.index = false
fi.unique = false
- case TypeDateField, TypeDateTimeField:
+ case TypeTimeField, TypeDateField, TypeDateTimeField:
if attrs["auto_now"] {
fi.autoNow = true
} else if attrs["auto_now_add"] {
@@ -387,14 +406,12 @@ checkType:
if fi.auto || fi.pk {
if fi.auto {
-
switch addrField.Elem().Kind() {
case reflect.Int, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint32, reflect.Uint64:
default:
err = fmt.Errorf("auto primary key only support int, int32, int64, uint, uint32, uint64 but found `%s`", addrField.Elem().Kind())
goto end
}
-
fi.pk = true
}
fi.null = false
@@ -406,8 +423,8 @@ checkType:
fi.index = false
}
- if fi.auto || fi.pk || fi.unique || fieldType == TypeDateField || fieldType == TypeDateTimeField {
- // can not set default
+ // can not set default for these type
+ if fi.auto || fi.pk || fi.unique || fieldType == TypeTimeField || fieldType == TypeDateField || fieldType == TypeDateTimeField {
initial.Clear()
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/models_info_m.go b/src/vendor/github.com/astaxie/beego/orm/models_info_m.go
index bbb82444d..4a3a37f94 100644
--- a/src/vendor/github.com/astaxie/beego/orm/models_info_m.go
+++ b/src/vendor/github.com/astaxie/beego/orm/models_info_m.go
@@ -29,31 +29,25 @@ type modelInfo struct {
model interface{}
fields *fields
manual bool
- addrField reflect.Value
+ addrField reflect.Value //store the original struct value
uniques []string
isThrough bool
}
// new model info
-func newModelInfo(val reflect.Value) (info *modelInfo) {
-
- info = &modelInfo{}
- info.fields = newFields()
-
+func newModelInfo(val reflect.Value) (mi *modelInfo) {
+ mi = &modelInfo{}
+ mi.fields = newFields()
ind := reflect.Indirect(val)
- typ := ind.Type()
-
- info.addrField = val
-
- info.name = typ.Name()
- info.fullName = getFullName(typ)
-
- addModelFields(info, ind, "", []int{})
-
+ mi.addrField = val
+ mi.name = ind.Type().Name()
+ mi.fullName = getFullName(ind.Type())
+ addModelFields(mi, ind, "", []int{})
return
}
-func addModelFields(info *modelInfo, ind reflect.Value, mName string, index []int) {
+// index: FieldByIndex returns the nested field corresponding to index
+func addModelFields(mi *modelInfo, ind reflect.Value, mName string, index []int) {
var (
err error
fi *fieldInfo
@@ -63,43 +57,39 @@ func addModelFields(info *modelInfo, ind reflect.Value, mName string, index []in
for i := 0; i < ind.NumField(); i++ {
field := ind.Field(i)
sf = ind.Type().Field(i)
+ // if the field is unexported skip
if sf.PkgPath != "" {
continue
}
// add anonymous struct fields
if sf.Anonymous {
- addModelFields(info, field, mName+"."+sf.Name, append(index, i))
+ addModelFields(mi, field, mName+"."+sf.Name, append(index, i))
continue
}
- fi, err = newFieldInfo(info, field, sf, mName)
-
- if err != nil {
- if err == errSkipField {
- err = nil
- continue
- }
+ fi, err = newFieldInfo(mi, field, sf, mName)
+ if err == errSkipField {
+ err = nil
+ continue
+ } else if err != nil {
break
}
-
- added := info.fields.Add(fi)
- if added == false {
+ //record current field index
+ fi.fieldIndex = append(index, i)
+ fi.mi = mi
+ fi.inModel = true
+ if !mi.fields.Add(fi) {
err = fmt.Errorf("duplicate column name: %s", fi.column)
break
}
-
if fi.pk {
- if info.fields.pk != nil {
+ if mi.fields.pk != nil {
err = fmt.Errorf("one model must have one pk field only")
break
} else {
- info.fields.pk = fi
+ mi.fields.pk = fi
}
}
-
- fi.fieldIndex = append(index, i)
- fi.mi = info
- fi.inModel = true
}
if err != nil {
@@ -110,23 +100,23 @@ func addModelFields(info *modelInfo, ind reflect.Value, mName string, index []in
// combine related model info to new model info.
// prepare for relation models query.
-func newM2MModelInfo(m1, m2 *modelInfo) (info *modelInfo) {
- info = new(modelInfo)
- info.fields = newFields()
- info.table = m1.table + "_" + m2.table + "s"
- info.name = camelString(info.table)
- info.fullName = m1.pkg + "." + info.name
+func newM2MModelInfo(m1, m2 *modelInfo) (mi *modelInfo) {
+ mi = new(modelInfo)
+ mi.fields = newFields()
+ mi.table = m1.table + "_" + m2.table + "s"
+ mi.name = camelString(mi.table)
+ mi.fullName = m1.pkg + "." + mi.name
- fa := new(fieldInfo)
- f1 := new(fieldInfo)
- f2 := new(fieldInfo)
+ fa := new(fieldInfo) // pk
+ f1 := new(fieldInfo) // m1 table RelForeignKey
+ f2 := new(fieldInfo) // m2 table RelForeignKey
fa.fieldType = TypeBigIntegerField
fa.auto = true
fa.pk = true
fa.dbcol = true
fa.name = "Id"
fa.column = "id"
- fa.fullName = info.fullName + "." + fa.name
+ fa.fullName = mi.fullName + "." + fa.name
f1.dbcol = true
f2.dbcol = true
@@ -134,8 +124,8 @@ func newM2MModelInfo(m1, m2 *modelInfo) (info *modelInfo) {
f2.fieldType = RelForeignKey
f1.name = camelString(m1.table)
f2.name = camelString(m2.table)
- f1.fullName = info.fullName + "." + f1.name
- f2.fullName = info.fullName + "." + f2.name
+ f1.fullName = mi.fullName + "." + f1.name
+ f2.fullName = mi.fullName + "." + f2.name
f1.column = m1.table + "_id"
f2.column = m2.table + "_id"
f1.rel = true
@@ -144,14 +134,14 @@ func newM2MModelInfo(m1, m2 *modelInfo) (info *modelInfo) {
f2.relTable = m2.table
f1.relModelInfo = m1
f2.relModelInfo = m2
- f1.mi = info
- f2.mi = info
+ f1.mi = mi
+ f2.mi = mi
- info.fields.Add(fa)
- info.fields.Add(f1)
- info.fields.Add(f2)
- info.fields.pk = fa
+ mi.fields.Add(fa)
+ mi.fields.Add(f1)
+ mi.fields.Add(f2)
+ mi.fields.pk = fa
- info.uniques = []string{f1.column, f2.column}
+ mi.uniques = []string{f1.column, f2.column}
return
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/models_test.go b/src/vendor/github.com/astaxie/beego/orm/models_test.go
index ffb16ea02..9843a87de 100644
--- a/src/vendor/github.com/astaxie/beego/orm/models_test.go
+++ b/src/vendor/github.com/astaxie/beego/orm/models_test.go
@@ -78,40 +78,43 @@ func (e *SliceStringField) RawValue() interface{} {
var _ Fielder = new(SliceStringField)
// A json field.
-type JSONField struct {
+type JSONFieldTest struct {
Name string
Data string
}
-func (e *JSONField) String() string {
+func (e *JSONFieldTest) String() string {
data, _ := json.Marshal(e)
return string(data)
}
-func (e *JSONField) FieldType() int {
+func (e *JSONFieldTest) FieldType() int {
return TypeTextField
}
-func (e *JSONField) SetRaw(value interface{}) error {
+func (e *JSONFieldTest) SetRaw(value interface{}) error {
switch d := value.(type) {
case string:
return json.Unmarshal([]byte(d), e)
default:
- return fmt.Errorf(" unknown value `%v`", value)
+ return fmt.Errorf(" unknown value `%v`", value)
}
}
-func (e *JSONField) RawValue() interface{} {
+func (e *JSONFieldTest) RawValue() interface{} {
return e.String()
}
-var _ Fielder = new(JSONField)
+var _ Fielder = new(JSONFieldTest)
type Data struct {
ID int `orm:"column(id)"`
Boolean bool
Char string `orm:"size(50)"`
Text string `orm:"type(text)"`
+ JSON string `orm:"type(json);default({\"name\":\"json\"})"`
+ Jsonb string `orm:"type(jsonb)"`
+ Time time.Time `orm:"type(time)"`
Date time.Time `orm:"type(date)"`
DateTime time.Time `orm:"column(datetime)"`
Byte byte
@@ -136,6 +139,9 @@ type DataNull struct {
Boolean bool `orm:"null"`
Char string `orm:"null;size(50)"`
Text string `orm:"null;type(text)"`
+ JSON string `orm:"type(json);null"`
+ Jsonb string `orm:"type(jsonb);null"`
+ Time time.Time `orm:"null;type(time)"`
Date time.Time `orm:"null;type(date)"`
DateTime time.Time `orm:"null;column(datetime)"`
Byte byte `orm:"null"`
@@ -175,6 +181,9 @@ type DataNull struct {
Float32Ptr *float32 `orm:"null"`
Float64Ptr *float64 `orm:"null"`
DecimalPtr *float64 `orm:"digits(8);decimals(4);null"`
+ TimePtr *time.Time `orm:"null;type(time)"`
+ DatePtr *time.Time `orm:"null;type(date)"`
+ DateTimePtr *time.Time `orm:"null"`
}
type String string
@@ -237,7 +246,7 @@ type User struct {
ShouldSkip string `orm:"-"`
Nums int
Langs SliceStringField `orm:"size(100)"`
- Extra JSONField `orm:"type(text)"`
+ Extra JSONFieldTest `orm:"type(text)"`
unexport bool `orm:"-"`
unexportBool bool
}
@@ -375,6 +384,33 @@ func NewInLine() *InLine {
return new(InLine)
}
+type InLineOneToOne struct {
+ // Common Fields
+ ModelBase
+
+ Note string
+ InLine *InLine `orm:"rel(fk);column(inline)"`
+}
+
+func NewInLineOneToOne() *InLineOneToOne {
+ return new(InLineOneToOne)
+}
+
+type IntegerPk struct {
+ ID int64 `orm:"pk"`
+ Value string
+}
+
+type UintPk struct {
+ ID uint32 `orm:"pk"`
+ Name string
+}
+
+type PtrPk struct {
+ ID *IntegerPk `orm:"pk;rel(one)"`
+ Positive bool
+}
+
var DBARGS = struct {
Driver string
Source string
diff --git a/src/vendor/github.com/astaxie/beego/orm/models_utils.go b/src/vendor/github.com/astaxie/beego/orm/models_utils.go
index ec11d5169..44a0e76a1 100644
--- a/src/vendor/github.com/astaxie/beego/orm/models_utils.go
+++ b/src/vendor/github.com/astaxie/beego/orm/models_utils.go
@@ -22,25 +22,47 @@ import (
"time"
)
+// 1 is attr
+// 2 is tag
+var supportTag = map[string]int{
+ "-": 1,
+ "null": 1,
+ "index": 1,
+ "unique": 1,
+ "pk": 1,
+ "auto": 1,
+ "auto_now": 1,
+ "auto_now_add": 1,
+ "size": 2,
+ "column": 2,
+ "default": 2,
+ "rel": 2,
+ "reverse": 2,
+ "rel_table": 2,
+ "rel_through": 2,
+ "digits": 2,
+ "decimals": 2,
+ "on_delete": 2,
+ "type": 2,
+}
+
// get reflect.Type name with package path.
func getFullName(typ reflect.Type) string {
return typ.PkgPath() + "." + typ.Name()
}
-// get table name. method, or field name. auto snaked.
+// getTableName get struct table name.
+// If the struct implement the TableName, then get the result as tablename
+// else use the struct name which will apply snakeString.
func getTableName(val reflect.Value) string {
- ind := reflect.Indirect(val)
- fun := val.MethodByName("TableName")
- if fun.IsValid() {
+ if fun := val.MethodByName("TableName"); fun.IsValid() {
vals := fun.Call([]reflect.Value{})
- if len(vals) > 0 {
- val := vals[0]
- if val.Kind() == reflect.String {
- return val.String()
- }
+ // has return and the first val is string
+ if len(vals) > 0 && vals[0].Kind() == reflect.String {
+ return vals[0].String()
}
}
- return snakeString(ind.Type().Name())
+ return snakeString(reflect.Indirect(val).Type().Name())
}
// get table engine, mysiam or innodb.
@@ -48,11 +70,8 @@ func getTableEngine(val reflect.Value) string {
fun := val.MethodByName("TableEngine")
if fun.IsValid() {
vals := fun.Call([]reflect.Value{})
- if len(vals) > 0 {
- val := vals[0]
- if val.Kind() == reflect.String {
- return val.String()
- }
+ if len(vals) > 0 && vals[0].Kind() == reflect.String {
+ return vals[0].String()
}
}
return ""
@@ -63,12 +82,9 @@ func getTableIndex(val reflect.Value) [][]string {
fun := val.MethodByName("TableIndex")
if fun.IsValid() {
vals := fun.Call([]reflect.Value{})
- if len(vals) > 0 {
- val := vals[0]
- if val.CanInterface() {
- if d, ok := val.Interface().([][]string); ok {
- return d
- }
+ if len(vals) > 0 && vals[0].CanInterface() {
+ if d, ok := vals[0].Interface().([][]string); ok {
+ return d
}
}
}
@@ -80,12 +96,9 @@ func getTableUnique(val reflect.Value) [][]string {
fun := val.MethodByName("TableUnique")
if fun.IsValid() {
vals := fun.Call([]reflect.Value{})
- if len(vals) > 0 {
- val := vals[0]
- if val.CanInterface() {
- if d, ok := val.Interface().([][]string); ok {
- return d
- }
+ if len(vals) > 0 && vals[0].CanInterface() {
+ if d, ok := vals[0].Interface().([][]string); ok {
+ return d
}
}
}
@@ -137,6 +150,8 @@ func getFieldType(val reflect.Value) (ft int, err error) {
ft = TypeBooleanField
case reflect.TypeOf(new(string)):
ft = TypeCharField
+ case reflect.TypeOf(new(time.Time)):
+ ft = TypeDateTimeField
default:
elm := reflect.Indirect(val)
switch elm.Kind() {
@@ -187,21 +202,25 @@ func getFieldType(val reflect.Value) (ft int, err error) {
}
// parse struct tag string
-func parseStructTag(data string, attrs *map[string]bool, tags *map[string]string) {
- attr := make(map[string]bool)
- tag := make(map[string]string)
+func parseStructTag(data string) (attrs map[string]bool, tags map[string]string) {
+ attrs = make(map[string]bool)
+ tags = make(map[string]string)
for _, v := range strings.Split(data, defaultStructTagDelim) {
+ if v == "" {
+ continue
+ }
v = strings.TrimSpace(v)
- if supportTag[v] == 1 {
- attr[v] = true
+ if t := strings.ToLower(v); supportTag[t] == 1 {
+ attrs[t] = true
} else if i := strings.Index(v, "("); i > 0 && strings.Index(v, ")") == len(v)-1 {
- name := v[:i]
+ name := t[:i]
if supportTag[name] == 2 {
v = v[i+1 : len(v)-1]
- tag[name] = v
+ tags[name] = v
}
+ } else {
+ DebugLog.Println("unsupport orm tag", v)
}
}
- *attrs = attr
- *tags = tag
+ return
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/orm.go b/src/vendor/github.com/astaxie/beego/orm/orm.go
index 0ffb6b869..fcf82590f 100644
--- a/src/vendor/github.com/astaxie/beego/orm/orm.go
+++ b/src/vendor/github.com/astaxie/beego/orm/orm.go
@@ -68,7 +68,7 @@ const (
// Define common vars
var (
Debug = false
- DebugLog = NewLog(os.Stderr)
+ DebugLog = NewLog(os.Stdout)
DefaultRowsLimit = 1000
DefaultRelsDepth = 2
DefaultTimeLoc = time.Local
@@ -104,10 +104,10 @@ func (o *orm) getMiInd(md interface{}, needPtr bool) (mi *modelInfo, ind reflect
panic(fmt.Errorf(" cannot use non-ptr model struct `%s`", getFullName(typ)))
}
name := getFullName(typ)
- if mi, ok := modelCache.getByFN(name); ok {
+ if mi, ok := modelCache.getByFullName(name); ok {
return mi, ind
}
- panic(fmt.Errorf(" table: `%s` not found, maybe not RegisterModel", name))
+ panic(fmt.Errorf(" table: `%s` not found, make sure it was registered with `RegisterModel()`", name))
}
// get field info from model info by given field name
@@ -122,25 +122,36 @@ func (o *orm) getFieldInfo(mi *modelInfo, name string) *fieldInfo {
// read data to model
func (o *orm) Read(md interface{}, cols ...string) error {
mi, ind := o.getMiInd(md, true)
- err := o.alias.DbBaser.Read(o.db, mi, ind, o.alias.TZ, cols)
- if err != nil {
- return err
- }
- return nil
+ return o.alias.DbBaser.Read(o.db, mi, ind, o.alias.TZ, cols, false)
+}
+
+// read data to model, like Read(), but use "SELECT FOR UPDATE" form
+func (o *orm) ReadForUpdate(md interface{}, cols ...string) error {
+ mi, ind := o.getMiInd(md, true)
+ return o.alias.DbBaser.Read(o.db, mi, ind, o.alias.TZ, cols, true)
}
// Try to read a row from the database, or insert one if it doesn't exist
func (o *orm) ReadOrCreate(md interface{}, col1 string, cols ...string) (bool, int64, error) {
cols = append([]string{col1}, cols...)
mi, ind := o.getMiInd(md, true)
- err := o.alias.DbBaser.Read(o.db, mi, ind, o.alias.TZ, cols)
+ err := o.alias.DbBaser.Read(o.db, mi, ind, o.alias.TZ, cols, false)
if err == ErrNoRows {
// Create
id, err := o.Insert(md)
return (err == nil), id, err
}
- return false, ind.FieldByIndex(mi.fields.pk.fieldIndex).Int(), err
+ id, vid := int64(0), ind.FieldByIndex(mi.fields.pk.fieldIndex)
+ if mi.fields.pk.fieldType&IsPositiveIntegerField > 0 {
+ id = int64(vid.Uint())
+ } else if mi.fields.pk.rel {
+ return o.ReadOrCreate(vid.Interface(), mi.fields.pk.relModelInfo.fields.pk.name)
+ } else {
+ id = vid.Int()
+ }
+
+ return false, id, err
}
// insert model data to database
@@ -159,7 +170,7 @@ func (o *orm) Insert(md interface{}) (int64, error) {
// set auto pk field
func (o *orm) setPk(mi *modelInfo, ind reflect.Value, id int64) {
if mi.fields.pk.auto {
- if mi.fields.pk.fieldType&IsPostiveIntegerField > 0 {
+ if mi.fields.pk.fieldType&IsPositiveIntegerField > 0 {
ind.FieldByIndex(mi.fields.pk.fieldIndex).SetUint(uint64(id))
} else {
ind.FieldByIndex(mi.fields.pk.fieldIndex).SetInt(id)
@@ -184,7 +195,7 @@ func (o *orm) InsertMulti(bulk int, mds interface{}) (int64, error) {
if bulk <= 1 {
for i := 0; i < sind.Len(); i++ {
- ind := sind.Index(i)
+ ind := reflect.Indirect(sind.Index(i))
mi, _ := o.getMiInd(ind.Interface(), false)
id, err := o.alias.DbBaser.Insert(o.db, mi, ind, o.alias.TZ)
if err != nil {
@@ -202,21 +213,31 @@ func (o *orm) InsertMulti(bulk int, mds interface{}) (int64, error) {
return cnt, nil
}
+// InsertOrUpdate data to database
+func (o *orm) InsertOrUpdate(md interface{}, colConflitAndArgs ...string) (int64, error) {
+ mi, ind := o.getMiInd(md, true)
+ id, err := o.alias.DbBaser.InsertOrUpdate(o.db, mi, ind, o.alias, colConflitAndArgs...)
+ if err != nil {
+ return id, err
+ }
+
+ o.setPk(mi, ind, id)
+
+ return id, nil
+}
+
// update model to database.
// cols set the columns those want to update.
func (o *orm) Update(md interface{}, cols ...string) (int64, error) {
mi, ind := o.getMiInd(md, true)
- num, err := o.alias.DbBaser.Update(o.db, mi, ind, o.alias.TZ, cols)
- if err != nil {
- return num, err
- }
- return num, nil
+ return o.alias.DbBaser.Update(o.db, mi, ind, o.alias.TZ, cols)
}
// delete model in database
-func (o *orm) Delete(md interface{}) (int64, error) {
+// cols shows the delete conditions values read from. default is pk
+func (o *orm) Delete(md interface{}, cols ...string) (int64, error) {
mi, ind := o.getMiInd(md, true)
- num, err := o.alias.DbBaser.Delete(o.db, mi, ind, o.alias.TZ)
+ num, err := o.alias.DbBaser.Delete(o.db, mi, ind, o.alias.TZ, cols)
if err != nil {
return num, err
}
@@ -328,7 +349,7 @@ func (o *orm) queryRelated(md interface{}, name string) (*modelInfo, *fieldInfo,
fi := o.getFieldInfo(mi, name)
_, _, exist := getExistPk(mi, ind)
- if exist == false {
+ if !exist {
panic(ErrMissPK)
}
@@ -399,7 +420,7 @@ func (o *orm) getRelQs(md interface{}, mi *modelInfo, fi *fieldInfo) *querySet {
// table name can be string or struct.
// e.g. QueryTable("user"), QueryTable(&user{}) or QueryTable((*User)(nil)),
func (o *orm) QueryTable(ptrStructOrTableName interface{}) (qs QuerySeter) {
- name := ""
+ var name string
if table, ok := ptrStructOrTableName.(string); ok {
name = snakeString(table)
if mi, ok := modelCache.get(name); ok {
@@ -407,7 +428,7 @@ func (o *orm) QueryTable(ptrStructOrTableName interface{}) (qs QuerySeter) {
}
} else {
name = getFullName(indirectType(reflect.TypeOf(ptrStructOrTableName)))
- if mi, ok := modelCache.getByFN(name); ok {
+ if mi, ok := modelCache.getByFullName(name); ok {
qs = newQuerySet(o, mi)
}
}
@@ -456,7 +477,7 @@ func (o *orm) Begin() error {
// commit transaction
func (o *orm) Commit() error {
- if o.isTx == false {
+ if !o.isTx {
return ErrTxDone
}
err := o.db.(txEnder).Commit()
@@ -471,7 +492,7 @@ func (o *orm) Commit() error {
// rollback transaction
func (o *orm) Rollback() error {
- if o.isTx == false {
+ if !o.isTx {
return ErrTxDone
}
err := o.db.(txEnder).Rollback()
diff --git a/src/vendor/github.com/astaxie/beego/orm/orm_conds.go b/src/vendor/github.com/astaxie/beego/orm/orm_conds.go
index e56d6fbbd..f6e389ec7 100644
--- a/src/vendor/github.com/astaxie/beego/orm/orm_conds.go
+++ b/src/vendor/github.com/astaxie/beego/orm/orm_conds.go
@@ -75,6 +75,19 @@ func (c *Condition) AndCond(cond *Condition) *Condition {
return c
}
+// AndNotCond combine a AND NOT condition to current condition
+func (c *Condition) AndNotCond(cond *Condition) *Condition {
+ c = c.clone()
+ if c == cond {
+ panic(fmt.Errorf(" cannot use self as sub cond"))
+ }
+
+ if cond != nil {
+ c.params = append(c.params, condValue{cond: cond, isCond: true, isNot: true})
+ }
+ return c
+}
+
// Or add OR expression to condition
func (c Condition) Or(expr string, args ...interface{}) *Condition {
if expr == "" || len(args) == 0 {
@@ -105,6 +118,19 @@ func (c *Condition) OrCond(cond *Condition) *Condition {
return c
}
+// OrNotCond combine a OR NOT condition to current condition
+func (c *Condition) OrNotCond(cond *Condition) *Condition {
+ c = c.clone()
+ if c == cond {
+ panic(fmt.Errorf(" cannot use self as sub cond"))
+ }
+
+ if cond != nil {
+ c.params = append(c.params, condValue{cond: cond, isCond: true, isNot: true, isOr: true})
+ }
+ return c
+}
+
// IsEmpty check the condition arguments are empty or not.
func (c *Condition) IsEmpty() bool {
return len(c.params) == 0
diff --git a/src/vendor/github.com/astaxie/beego/orm/orm_log.go b/src/vendor/github.com/astaxie/beego/orm/orm_log.go
index 712eb219f..26c73f9ee 100644
--- a/src/vendor/github.com/astaxie/beego/orm/orm_log.go
+++ b/src/vendor/github.com/astaxie/beego/orm/orm_log.go
@@ -31,7 +31,7 @@ type Log struct {
// NewLog set io.Writer to create a Logger.
func NewLog(out io.Writer) *Log {
d := new(Log)
- d.Logger = log.New(out, "[ORM]", 1e9)
+ d.Logger = log.New(out, "[ORM]", log.LstdFlags)
return d
}
@@ -42,7 +42,7 @@ func debugLogQueies(alias *alias, operaton, query string, t time.Time, err error
if err != nil {
flag = "FAIL"
}
- con := fmt.Sprintf(" - %s - [Queries/%s] - [%s / %11s / %7.1fms] - [%s]", t.Format(formatDateTime), alias.Name, flag, operaton, elsp, query)
+ con := fmt.Sprintf(" -[Queries/%s] - [%s / %11s / %7.1fms] - [%s]", alias.Name, flag, operaton, elsp, query)
cons := make([]string, 0, len(args))
for _, arg := range args {
cons = append(cons, fmt.Sprintf("%v", arg))
diff --git a/src/vendor/github.com/astaxie/beego/orm/orm_object.go b/src/vendor/github.com/astaxie/beego/orm/orm_object.go
index 8a5d85e28..de3181ce2 100644
--- a/src/vendor/github.com/astaxie/beego/orm/orm_object.go
+++ b/src/vendor/github.com/astaxie/beego/orm/orm_object.go
@@ -50,7 +50,7 @@ func (o *insertSet) Insert(md interface{}) (int64, error) {
}
if id > 0 {
if o.mi.fields.pk.auto {
- if o.mi.fields.pk.fieldType&IsPostiveIntegerField > 0 {
+ if o.mi.fields.pk.fieldType&IsPositiveIntegerField > 0 {
ind.FieldByIndex(o.mi.fields.pk.fieldIndex).SetUint(uint64(id))
} else {
ind.FieldByIndex(o.mi.fields.pk.fieldIndex).SetInt(id)
diff --git a/src/vendor/github.com/astaxie/beego/orm/orm_querym2m.go b/src/vendor/github.com/astaxie/beego/orm/orm_querym2m.go
index b220bda6e..6a270a0d8 100644
--- a/src/vendor/github.com/astaxie/beego/orm/orm_querym2m.go
+++ b/src/vendor/github.com/astaxie/beego/orm/orm_querym2m.go
@@ -72,7 +72,7 @@ func (o *queryM2M) Add(mds ...interface{}) (int64, error) {
}
_, v1, exist := getExistPk(o.mi, o.ind)
- if exist == false {
+ if !exist {
panic(ErrMissPK)
}
@@ -87,7 +87,7 @@ func (o *queryM2M) Add(mds ...interface{}) (int64, error) {
v2 = ind.Interface()
} else {
_, v2, exist = getExistPk(fi.relModelInfo, ind)
- if exist == false {
+ if !exist {
panic(ErrMissPK)
}
}
@@ -104,11 +104,7 @@ func (o *queryM2M) Remove(mds ...interface{}) (int64, error) {
fi := o.fi
qs := o.qs.Filter(fi.reverseFieldInfo.name, o.md)
- nums, err := qs.Filter(fi.reverseFieldInfoTwo.name+ExprSep+"in", mds).Delete()
- if err != nil {
- return nums, err
- }
- return nums, nil
+ return qs.Filter(fi.reverseFieldInfoTwo.name+ExprSep+"in", mds).Delete()
}
// check model is existed in relationship of origin model
diff --git a/src/vendor/github.com/astaxie/beego/orm/orm_queryset.go b/src/vendor/github.com/astaxie/beego/orm/orm_queryset.go
index 802a1fe08..4e33646d6 100644
--- a/src/vendor/github.com/astaxie/beego/orm/orm_queryset.go
+++ b/src/vendor/github.com/astaxie/beego/orm/orm_queryset.go
@@ -153,6 +153,11 @@ func (o querySet) SetCond(cond *Condition) QuerySeter {
return &o
}
+// get condition from QuerySeter
+func (o querySet) GetCond() *Condition {
+ return o.cond
+}
+
// return QuerySeter execution result number
func (o *querySet) Count() (int64, error) {
return o.orm.alias.DbBaser.Count(o.orm.db, o, o.mi, o.cond, o.orm.alias.TZ)
@@ -192,16 +197,18 @@ func (o *querySet) All(container interface{}, cols ...string) (int64, error) {
// query one row data and map to containers.
// cols means the columns when querying.
func (o *querySet) One(container interface{}, cols ...string) error {
+ o.limit = 1
num, err := o.orm.alias.DbBaser.ReadBatch(o.orm.db, o, o.mi, o.cond, container, o.orm.alias.TZ, cols)
if err != nil {
return err
}
- if num > 1 {
- return ErrMultiRows
- }
if num == 0 {
return ErrNoRows
}
+
+ if num > 1 {
+ return ErrMultiRows
+ }
return nil
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/orm_raw.go b/src/vendor/github.com/astaxie/beego/orm/orm_raw.go
index 5f88121cb..c8e741ea0 100644
--- a/src/vendor/github.com/astaxie/beego/orm/orm_raw.go
+++ b/src/vendor/github.com/astaxie/beego/orm/orm_raw.go
@@ -286,7 +286,7 @@ func (o *rawSet) QueryRow(containers ...interface{}) error {
structMode = true
fn := getFullName(typ)
- if mi, ok := modelCache.getByFN(fn); ok {
+ if mi, ok := modelCache.getByFullName(fn); ok {
sMi = mi
}
} else {
@@ -342,19 +342,22 @@ func (o *rawSet) QueryRow(containers ...interface{}) error {
for _, col := range columns {
if fi := sMi.fields.GetByColumn(col); fi != nil {
value := reflect.ValueOf(columnsMp[col]).Elem().Interface()
- o.setFieldValue(ind.FieldByIndex(fi.fieldIndex), value)
+ field := ind.FieldByIndex(fi.fieldIndex)
+ if fi.fieldType&IsRelField > 0 {
+ mf := reflect.New(fi.relModelInfo.addrField.Elem().Type())
+ field.Set(mf)
+ field = mf.Elem().FieldByIndex(fi.relModelInfo.fields.pk.fieldIndex)
+ }
+ o.setFieldValue(field, value)
}
}
} else {
for i := 0; i < ind.NumField(); i++ {
f := ind.Field(i)
fe := ind.Type().Field(i)
-
- var attrs map[string]bool
- var tags map[string]string
- parseStructTag(fe.Tag.Get("orm"), &attrs, &tags)
+ _, tags := parseStructTag(fe.Tag.Get(defaultStructTagName))
var col string
- if col = tags["column"]; len(col) == 0 {
+ if col = tags["column"]; col == "" {
col = snakeString(fe.Name)
}
if v, ok := columnsMp[col]; ok {
@@ -416,7 +419,7 @@ func (o *rawSet) QueryRows(containers ...interface{}) (int64, error) {
structMode = true
fn := getFullName(typ)
- if mi, ok := modelCache.getByFN(fn); ok {
+ if mi, ok := modelCache.getByFullName(fn); ok {
sMi = mi
}
} else {
@@ -480,26 +483,43 @@ func (o *rawSet) QueryRows(containers ...interface{}) (int64, error) {
for _, col := range columns {
if fi := sMi.fields.GetByColumn(col); fi != nil {
value := reflect.ValueOf(columnsMp[col]).Elem().Interface()
- o.setFieldValue(ind.FieldByIndex(fi.fieldIndex), value)
+ field := ind.FieldByIndex(fi.fieldIndex)
+ if fi.fieldType&IsRelField > 0 {
+ mf := reflect.New(fi.relModelInfo.addrField.Elem().Type())
+ field.Set(mf)
+ field = mf.Elem().FieldByIndex(fi.relModelInfo.fields.pk.fieldIndex)
+ }
+ o.setFieldValue(field, value)
}
}
} else {
- for i := 0; i < ind.NumField(); i++ {
- f := ind.Field(i)
- fe := ind.Type().Field(i)
+ // define recursive function
+ var recursiveSetField func(rv reflect.Value)
+ recursiveSetField = func(rv reflect.Value) {
+ for i := 0; i < rv.NumField(); i++ {
+ f := rv.Field(i)
+ fe := rv.Type().Field(i)
- var attrs map[string]bool
- var tags map[string]string
- parseStructTag(fe.Tag.Get("orm"), &attrs, &tags)
- var col string
- if col = tags["column"]; len(col) == 0 {
- col = snakeString(fe.Name)
- }
- if v, ok := columnsMp[col]; ok {
- value := reflect.ValueOf(v).Elem().Interface()
- o.setFieldValue(f, value)
+ // check if the field is a Struct
+ // recursive the Struct type
+ if fe.Type.Kind() == reflect.Struct {
+ recursiveSetField(f)
+ }
+
+ _, tags := parseStructTag(fe.Tag.Get(defaultStructTagName))
+ var col string
+ if col = tags["column"]; col == "" {
+ col = snakeString(fe.Name)
+ }
+ if v, ok := columnsMp[col]; ok {
+ value := reflect.ValueOf(v).Elem().Interface()
+ o.setFieldValue(f, value)
+ }
}
}
+
+ // init call the recursive function
+ recursiveSetField(ind)
}
if eTyps[0].Kind() == reflect.Ptr {
@@ -665,7 +685,7 @@ func (o *rawSet) queryRowsTo(container interface{}, keyCol, valueCol string) (in
ind *reflect.Value
)
- typ := 0
+ var typ int
switch container.(type) {
case *Params:
typ = 1
diff --git a/src/vendor/github.com/astaxie/beego/orm/orm_test.go b/src/vendor/github.com/astaxie/beego/orm/orm_test.go
index ec0f0d3ab..f1f2d85ec 100644
--- a/src/vendor/github.com/astaxie/beego/orm/orm_test.go
+++ b/src/vendor/github.com/astaxie/beego/orm/orm_test.go
@@ -19,6 +19,7 @@ import (
"database/sql"
"fmt"
"io/ioutil"
+ "math"
"os"
"path/filepath"
"reflect"
@@ -33,6 +34,7 @@ var _ = os.PathSeparator
var (
testDate = formatDate + " -0700"
testDateTime = formatDateTime + " -0700"
+ testTime = formatTime + " -0700"
)
type argAny []interface{}
@@ -91,14 +93,14 @@ wrongArg:
}
func AssertIs(a interface{}, args ...interface{}) error {
- if ok, err := ValuesCompare(true, a, args...); ok == false {
+ if ok, err := ValuesCompare(true, a, args...); !ok {
return err
}
return nil
}
func AssertNot(a interface{}, args ...interface{}) error {
- if ok, err := ValuesCompare(false, a, args...); ok == false {
+ if ok, err := ValuesCompare(false, a, args...); !ok {
return err
}
return nil
@@ -133,7 +135,7 @@ func getCaller(skip int) string {
if i := strings.LastIndex(funName, "."); i > -1 {
funName = funName[i+1:]
}
- return fmt.Sprintf("%s:%d: \n%s", fn, line, strings.Join(codes, "\n"))
+ return fmt.Sprintf("%s:%s:%d: \n%s", fn, funName, line, strings.Join(codes, "\n"))
}
func throwFail(t *testing.T, err error, args ...interface{}) {
@@ -188,6 +190,10 @@ func TestSyncDb(t *testing.T) {
RegisterModel(new(Permission))
RegisterModel(new(GroupPermissions))
RegisterModel(new(InLine))
+ RegisterModel(new(InLineOneToOne))
+ RegisterModel(new(IntegerPk))
+ RegisterModel(new(UintPk))
+ RegisterModel(new(PtrPk))
err := RunSyncdb("default", true, Debug)
throwFail(t, err)
@@ -208,6 +214,10 @@ func TestRegisterModels(t *testing.T) {
RegisterModel(new(Permission))
RegisterModel(new(GroupPermissions))
RegisterModel(new(InLine))
+ RegisterModel(new(InLineOneToOne))
+ RegisterModel(new(IntegerPk))
+ RegisterModel(new(UintPk))
+ RegisterModel(new(PtrPk))
BootStrap()
@@ -219,7 +229,7 @@ func TestModelSyntax(t *testing.T) {
user := &User{}
ind := reflect.ValueOf(user).Elem()
fn := getFullName(ind.Type())
- mi, ok := modelCache.getByFN(fn)
+ mi, ok := modelCache.getByFullName(fn)
throwFail(t, AssertIs(ok, true))
mi, ok = modelCache.get("user")
@@ -233,6 +243,9 @@ var DataValues = map[string]interface{}{
"Boolean": true,
"Char": "char",
"Text": "text",
+ "JSON": `{"name":"json"}`,
+ "Jsonb": `{"name": "jsonb"}`,
+ "Time": time.Now(),
"Date": time.Now(),
"DateTime": time.Now(),
"Byte": byte(1<<8 - 1),
@@ -257,10 +270,12 @@ func TestDataTypes(t *testing.T) {
ind := reflect.Indirect(reflect.ValueOf(&d))
for name, value := range DataValues {
+ if name == "JSON" {
+ continue
+ }
e := ind.FieldByName(name)
e.Set(reflect.ValueOf(value))
}
-
id, err := dORM.Insert(&d)
throwFail(t, err)
throwFail(t, AssertIs(id, 1))
@@ -281,6 +296,9 @@ func TestDataTypes(t *testing.T) {
case "DateTime":
vu = vu.(time.Time).In(DefaultTimeLoc).Format(testDateTime)
value = value.(time.Time).In(DefaultTimeLoc).Format(testDateTime)
+ case "Time":
+ vu = vu.(time.Time).In(DefaultTimeLoc).Format(testTime)
+ value = value.(time.Time).In(DefaultTimeLoc).Format(testTime)
}
throwFail(t, AssertIs(vu == value, true), value, vu)
}
@@ -299,10 +317,18 @@ func TestNullDataTypes(t *testing.T) {
throwFail(t, err)
throwFail(t, AssertIs(id, 1))
+ data := `{"ok":1,"data":{"arr":[1,2],"msg":"gopher"}}`
+ d = DataNull{ID: 1, JSON: data}
+ num, err := dORM.Update(&d)
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, 1))
+
d = DataNull{ID: 1}
err = dORM.Read(&d)
throwFail(t, err)
+ throwFail(t, AssertIs(d.JSON, data))
+
throwFail(t, AssertIs(d.NullBool.Valid, false))
throwFail(t, AssertIs(d.NullString.Valid, false))
throwFail(t, AssertIs(d.NullInt64.Valid, false))
@@ -326,6 +352,9 @@ func TestNullDataTypes(t *testing.T) {
throwFail(t, AssertIs(d.Float32Ptr, nil))
throwFail(t, AssertIs(d.Float64Ptr, nil))
throwFail(t, AssertIs(d.DecimalPtr, nil))
+ throwFail(t, AssertIs(d.TimePtr, nil))
+ throwFail(t, AssertIs(d.DatePtr, nil))
+ throwFail(t, AssertIs(d.DateTimePtr, nil))
_, err = dORM.Raw(`INSERT INTO data_null (boolean) VALUES (?)`, nil).Exec()
throwFail(t, err)
@@ -352,6 +381,9 @@ func TestNullDataTypes(t *testing.T) {
float32Ptr := float32(42.0)
float64Ptr := float64(42.0)
decimalPtr := float64(42.0)
+ timePtr := time.Now()
+ datePtr := time.Now()
+ dateTimePtr := time.Now()
d = DataNull{
DateTime: time.Now(),
@@ -377,6 +409,9 @@ func TestNullDataTypes(t *testing.T) {
Float32Ptr: &float32Ptr,
Float64Ptr: &float64Ptr,
DecimalPtr: &decimalPtr,
+ TimePtr: &timePtr,
+ DatePtr: &datePtr,
+ DateTimePtr: &dateTimePtr,
}
id, err = dORM.Insert(&d)
@@ -417,6 +452,9 @@ func TestNullDataTypes(t *testing.T) {
throwFail(t, AssertIs(*d.Float32Ptr, float32Ptr))
throwFail(t, AssertIs(*d.Float64Ptr, float64Ptr))
throwFail(t, AssertIs(*d.DecimalPtr, decimalPtr))
+ throwFail(t, AssertIs((*d.TimePtr).Format(testTime), timePtr.Format(testTime)))
+ throwFail(t, AssertIs((*d.DatePtr).Format(testDate), datePtr.Format(testDate)))
+ throwFail(t, AssertIs((*d.DateTimePtr).Format(testDateTime), dateTimePtr.Format(testDateTime)))
}
func TestDataCustomTypes(t *testing.T) {
@@ -541,6 +579,10 @@ func TestCRUD(t *testing.T) {
err = dORM.Read(&ub)
throwFail(t, err)
throwFail(t, AssertIs(ub.Name, "name"))
+
+ num, err = dORM.Delete(&ub, "name")
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, 1))
}
func TestInsertTestData(t *testing.T) {
@@ -869,6 +911,16 @@ func TestSetCond(t *testing.T) {
num, err = qs.SetCond(cond2).Count()
throwFail(t, err)
throwFail(t, AssertIs(num, 2))
+
+ cond3 := cond.AndNotCond(cond.And("status__in", 1))
+ num, err = qs.SetCond(cond3).Count()
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, 2))
+
+ cond4 := cond.And("user_name", "slene").OrNotCond(cond.And("user_name", "slene"))
+ num, err = qs.SetCond(cond4).Count()
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, 3))
}
func TestLimit(t *testing.T) {
@@ -962,6 +1014,8 @@ func TestAll(t *testing.T) {
var users3 []*User
qs = dORM.QueryTable("user")
num, err = qs.Filter("user_name", "nothing").All(&users3)
+ throwFailNow(t, err)
+ throwFailNow(t, AssertIs(num, 0))
throwFailNow(t, AssertIs(users3 == nil, false))
}
@@ -969,12 +1023,19 @@ func TestOne(t *testing.T) {
var user User
qs := dORM.QueryTable("user")
err := qs.One(&user)
- throwFail(t, AssertIs(err, ErrMultiRows))
+ throwFail(t, err)
user = User{}
err = qs.OrderBy("Id").Limit(1).One(&user)
throwFailNow(t, err)
throwFail(t, AssertIs(user.UserName, "slene"))
+ throwFail(t, AssertNot(err, ErrMultiRows))
+
+ user = User{}
+ err = qs.OrderBy("-Id").Limit(100).One(&user)
+ throwFailNow(t, err)
+ throwFail(t, AssertIs(user.UserName, "nobody"))
+ throwFail(t, AssertNot(err, ErrMultiRows))
err = qs.Filter("user_name", "nothing").One(&user)
throwFail(t, AssertIs(err, ErrNoRows))
@@ -1079,6 +1140,7 @@ func TestRelatedSel(t *testing.T) {
}
err = qs.Filter("user_name", "nobody").RelatedSel("profile").One(&user)
+ throwFail(t, err)
throwFail(t, AssertIs(num, 1))
throwFail(t, AssertIs(user.Profile, nil))
@@ -1187,20 +1249,24 @@ func TestLoadRelated(t *testing.T) {
num, err = dORM.LoadRelated(&user, "Posts", true)
throwFailNow(t, err)
+ throwFailNow(t, AssertIs(num, 2))
throwFailNow(t, AssertIs(len(user.Posts), 2))
throwFailNow(t, AssertIs(user.Posts[0].User.UserName, "astaxie"))
num, err = dORM.LoadRelated(&user, "Posts", true, 1)
throwFailNow(t, err)
+ throwFailNow(t, AssertIs(num, 1))
throwFailNow(t, AssertIs(len(user.Posts), 1))
num, err = dORM.LoadRelated(&user, "Posts", true, 0, 0, "-Id")
throwFailNow(t, err)
+ throwFailNow(t, AssertIs(num, 2))
throwFailNow(t, AssertIs(len(user.Posts), 2))
throwFailNow(t, AssertIs(user.Posts[0].Title, "Formatting"))
num, err = dORM.LoadRelated(&user, "Posts", true, 1, 1, "Id")
throwFailNow(t, err)
+ throwFailNow(t, AssertIs(num, 1))
throwFailNow(t, AssertIs(len(user.Posts), 1))
throwFailNow(t, AssertIs(user.Posts[0].Title, "Formatting"))
@@ -1514,6 +1580,7 @@ func TestRawQueryRow(t *testing.T) {
Boolean bool
Char string
Text string
+ Time time.Time
Date time.Time
DateTime time.Time
Byte byte
@@ -1542,14 +1609,14 @@ func TestRawQueryRow(t *testing.T) {
Q := dDbBaser.TableQuote()
cols := []string{
- "id", "boolean", "char", "text", "date", "datetime", "byte", "rune", "int", "int8", "int16", "int32",
+ "id", "boolean", "char", "text", "time", "date", "datetime", "byte", "rune", "int", "int8", "int16", "int32",
"int64", "uint", "uint8", "uint16", "uint32", "uint64", "float32", "float64", "decimal",
}
sep := fmt.Sprintf("%s, %s", Q, Q)
query := fmt.Sprintf("SELECT %s%s%s FROM data WHERE id = ?", Q, strings.Join(cols, sep), Q)
var id int
values := []interface{}{
- &id, &Boolean, &Char, &Text, &Date, &DateTime, &Byte, &Rune, &Int, &Int8, &Int16, &Int32,
+ &id, &Boolean, &Char, &Text, &Time, &Date, &DateTime, &Byte, &Rune, &Int, &Int8, &Int16, &Int32,
&Int64, &Uint, &Uint8, &Uint16, &Uint32, &Uint64, &Float32, &Float64, &Decimal,
}
err := dORM.Raw(query, 1).QueryRow(values...)
@@ -1560,6 +1627,10 @@ func TestRawQueryRow(t *testing.T) {
switch col {
case "id":
throwFail(t, AssertIs(id, 1))
+ case "time":
+ v = v.(time.Time).In(DefaultTimeLoc)
+ value := dataValues[col].(time.Time).In(DefaultTimeLoc)
+ throwFail(t, AssertIs(v, value, testTime))
case "date":
v = v.(time.Time).In(DefaultTimeLoc)
value := dataValues[col].(time.Time).In(DefaultTimeLoc)
@@ -1590,6 +1661,13 @@ func TestRawQueryRow(t *testing.T) {
throwFail(t, AssertIs(pid, nil))
}
+// user_profile table
+type userProfile struct {
+ User
+ Age int
+ Money float64
+}
+
func TestQueryRows(t *testing.T) {
Q := dDbBaser.TableQuote()
@@ -1607,6 +1685,9 @@ func TestQueryRows(t *testing.T) {
e := ind.FieldByName(name)
vu := e.Interface()
switch name {
+ case "Time":
+ vu = vu.(time.Time).In(DefaultTimeLoc).Format(testTime)
+ value = value.(time.Time).In(DefaultTimeLoc).Format(testTime)
case "Date":
vu = vu.(time.Time).In(DefaultTimeLoc).Format(testDate)
value = value.(time.Time).In(DefaultTimeLoc).Format(testDate)
@@ -1631,6 +1712,9 @@ func TestQueryRows(t *testing.T) {
e := ind.FieldByName(name)
vu := e.Interface()
switch name {
+ case "Time":
+ vu = vu.(time.Time).In(DefaultTimeLoc).Format(testTime)
+ value = value.(time.Time).In(DefaultTimeLoc).Format(testTime)
case "Date":
vu = vu.(time.Time).In(DefaultTimeLoc).Format(testDate)
value = value.(time.Time).In(DefaultTimeLoc).Format(testDate)
@@ -1654,6 +1738,19 @@ func TestQueryRows(t *testing.T) {
throwFailNow(t, AssertIs(usernames[1], "astaxie"))
throwFailNow(t, AssertIs(ids[2], 4))
throwFailNow(t, AssertIs(usernames[2], "nobody"))
+
+ //test query rows by nested struct
+ var l []userProfile
+ query = fmt.Sprintf("SELECT * FROM %suser_profile%s LEFT JOIN %suser%s ON %suser_profile%s.%sid%s = %suser%s.%sid%s", Q, Q, Q, Q, Q, Q, Q, Q, Q, Q, Q, Q)
+ num, err = dORM.Raw(query).QueryRows(&l)
+ throwFailNow(t, err)
+ throwFailNow(t, AssertIs(num, 2))
+ throwFailNow(t, AssertIs(len(l), 2))
+ throwFailNow(t, AssertIs(l[0].UserName, "slene"))
+ throwFailNow(t, AssertIs(l[0].Age, 28))
+ throwFailNow(t, AssertIs(l[1].UserName, "astaxie"))
+ throwFailNow(t, AssertIs(l[1].Age, 30))
+
}
func TestRawValues(t *testing.T) {
@@ -1906,6 +2003,7 @@ func TestReadOrCreate(t *testing.T) {
created, pk, err := dORM.ReadOrCreate(u, "UserName")
throwFail(t, err)
throwFail(t, AssertIs(created, true))
+ throwFail(t, AssertIs(u.ID, pk))
throwFail(t, AssertIs(u.UserName, "Kyle"))
throwFail(t, AssertIs(u.Email, "kylemcc@gmail.com"))
throwFail(t, AssertIs(u.Password, "other_pass"))
@@ -1952,3 +2050,299 @@ func TestInLine(t *testing.T) {
throwFail(t, AssertIs(il.Created.In(DefaultTimeLoc), inline.Created.In(DefaultTimeLoc), testDate))
throwFail(t, AssertIs(il.Updated.In(DefaultTimeLoc), inline.Updated.In(DefaultTimeLoc), testDateTime))
}
+
+func TestInLineOneToOne(t *testing.T) {
+ name := "121"
+ email := "121@go.com"
+ inline := NewInLine()
+ inline.Name = name
+ inline.Email = email
+
+ id, err := dORM.Insert(inline)
+ throwFail(t, err)
+ throwFail(t, AssertIs(id, 2))
+
+ note := "one2one"
+ il121 := NewInLineOneToOne()
+ il121.Note = note
+ il121.InLine = inline
+ _, err = dORM.Insert(il121)
+ throwFail(t, err)
+ throwFail(t, AssertIs(il121.ID, 1))
+
+ il := NewInLineOneToOne()
+ err = dORM.QueryTable(il).Filter("Id", 1).RelatedSel().One(il)
+
+ throwFail(t, err)
+ throwFail(t, AssertIs(il.Note, note))
+ throwFail(t, AssertIs(il.InLine.ID, id))
+ throwFail(t, AssertIs(il.InLine.Name, name))
+ throwFail(t, AssertIs(il.InLine.Email, email))
+
+ rinline := NewInLine()
+ err = dORM.QueryTable(rinline).Filter("InLineOneToOne__Id", 1).One(rinline)
+
+ throwFail(t, err)
+ throwFail(t, AssertIs(rinline.ID, id))
+ throwFail(t, AssertIs(rinline.Name, name))
+ throwFail(t, AssertIs(rinline.Email, email))
+}
+
+func TestIntegerPk(t *testing.T) {
+ its := []IntegerPk{
+ {ID: math.MinInt64, Value: "-"},
+ {ID: 0, Value: "0"},
+ {ID: math.MaxInt64, Value: "+"},
+ }
+
+ num, err := dORM.InsertMulti(len(its), its)
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, len(its)))
+
+ for _, intPk := range its {
+ out := IntegerPk{ID: intPk.ID}
+ err = dORM.Read(&out)
+ throwFail(t, err)
+ throwFail(t, AssertIs(out.Value, intPk.Value))
+ }
+
+ num, err = dORM.InsertMulti(1, []*IntegerPk{{
+ ID: 1, Value: "ok",
+ }})
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, 1))
+}
+
+func TestInsertAuto(t *testing.T) {
+ u := &User{
+ UserName: "autoPre",
+ Email: "autoPre@gmail.com",
+ }
+
+ id, err := dORM.Insert(u)
+ throwFail(t, err)
+
+ id += 100
+ su := &User{
+ ID: int(id),
+ UserName: "auto",
+ Email: "auto@gmail.com",
+ }
+
+ nid, err := dORM.Insert(su)
+ throwFail(t, err)
+ throwFail(t, AssertIs(nid, id))
+
+ users := []User{
+ {ID: int(id + 100), UserName: "auto_100"},
+ {ID: int(id + 110), UserName: "auto_110"},
+ {ID: int(id + 120), UserName: "auto_120"},
+ }
+ num, err := dORM.InsertMulti(100, users)
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, 3))
+
+ u = &User{
+ UserName: "auto_121",
+ }
+
+ nid, err = dORM.Insert(u)
+ throwFail(t, err)
+ throwFail(t, AssertIs(nid, id+120+1))
+}
+
+func TestUintPk(t *testing.T) {
+ name := "go"
+ u := &UintPk{
+ ID: 8,
+ Name: name,
+ }
+
+ created, _, err := dORM.ReadOrCreate(u, "ID")
+ throwFail(t, err)
+ throwFail(t, AssertIs(created, true))
+ throwFail(t, AssertIs(u.Name, name))
+
+ nu := &UintPk{ID: 8}
+ created, pk, err := dORM.ReadOrCreate(nu, "ID")
+ throwFail(t, err)
+ throwFail(t, AssertIs(created, false))
+ throwFail(t, AssertIs(nu.ID, u.ID))
+ throwFail(t, AssertIs(pk, u.ID))
+ throwFail(t, AssertIs(nu.Name, name))
+
+ dORM.Delete(u)
+}
+
+func TestPtrPk(t *testing.T) {
+ parent := &IntegerPk{ID: 10, Value: "10"}
+
+ id, _ := dORM.Insert(parent)
+ if !IsMysql {
+ // MySql does not support last_insert_id in this case: see #2382
+ throwFail(t, AssertIs(id, 10))
+ }
+
+ ptr := PtrPk{ID: parent, Positive: true}
+ num, err := dORM.InsertMulti(2, []PtrPk{ptr})
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, 1))
+ throwFail(t, AssertIs(ptr.ID, parent))
+
+ nptr := &PtrPk{ID: parent}
+ created, pk, err := dORM.ReadOrCreate(nptr, "ID")
+ throwFail(t, err)
+ throwFail(t, AssertIs(created, false))
+ throwFail(t, AssertIs(pk, 10))
+ throwFail(t, AssertIs(nptr.ID, parent))
+ throwFail(t, AssertIs(nptr.Positive, true))
+
+ nptr = &PtrPk{Positive: true}
+ created, pk, err = dORM.ReadOrCreate(nptr, "Positive")
+ throwFail(t, err)
+ throwFail(t, AssertIs(created, false))
+ throwFail(t, AssertIs(pk, 10))
+ throwFail(t, AssertIs(nptr.ID, parent))
+
+ nptr.Positive = false
+ num, err = dORM.Update(nptr)
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, 1))
+ throwFail(t, AssertIs(nptr.ID, parent))
+ throwFail(t, AssertIs(nptr.Positive, false))
+
+ num, err = dORM.Delete(nptr)
+ throwFail(t, err)
+ throwFail(t, AssertIs(num, 1))
+}
+
+func TestSnake(t *testing.T) {
+ cases := map[string]string{
+ "i": "i",
+ "I": "i",
+ "iD": "i_d",
+ "ID": "i_d",
+ "NO": "n_o",
+ "NOO": "n_o_o",
+ "NOOooOOoo": "n_o_ooo_o_ooo",
+ "OrderNO": "order_n_o",
+ "tagName": "tag_name",
+ "tag_Name": "tag__name",
+ "tag_name": "tag_name",
+ "_tag_name": "_tag_name",
+ "tag_666name": "tag_666name",
+ "tag_666Name": "tag_666_name",
+ }
+ for name, want := range cases {
+ got := snakeString(name)
+ throwFail(t, AssertIs(got, want))
+ }
+}
+
+func TestIgnoreCaseTag(t *testing.T) {
+ type testTagModel struct {
+ ID int `orm:"pk"`
+ NOO string `orm:"column(n)"`
+ Name01 string `orm:"NULL"`
+ Name02 string `orm:"COLUMN(Name)"`
+ Name03 string `orm:"Column(name)"`
+ }
+ modelCache.clean()
+ RegisterModel(&testTagModel{})
+ info, ok := modelCache.get("test_tag_model")
+ throwFail(t, AssertIs(ok, true))
+ throwFail(t, AssertNot(info, nil))
+ if t == nil {
+ return
+ }
+ throwFail(t, AssertIs(info.fields.GetByName("NOO").column, "n"))
+ throwFail(t, AssertIs(info.fields.GetByName("Name01").null, true))
+ throwFail(t, AssertIs(info.fields.GetByName("Name02").column, "Name"))
+ throwFail(t, AssertIs(info.fields.GetByName("Name03").column, "name"))
+}
+func TestInsertOrUpdate(t *testing.T) {
+ RegisterModel(new(User))
+ user := User{UserName: "unique_username133", Status: 1, Password: "o"}
+ user1 := User{UserName: "unique_username133", Status: 2, Password: "o"}
+ user2 := User{UserName: "unique_username133", Status: 3, Password: "oo"}
+ dORM.Insert(&user)
+ test := User{UserName: "unique_username133"}
+ fmt.Println(dORM.Driver().Name())
+ if dORM.Driver().Name() == "sqlite3" {
+ fmt.Println("sqlite3 is nonsupport")
+ return
+ }
+ //test1
+ _, err := dORM.InsertOrUpdate(&user1, "user_name")
+ if err != nil {
+ fmt.Println(err)
+ if err.Error() == "postgres version must 9.5 or higher" || err.Error() == "`sqlite3` nonsupport InsertOrUpdate in beego" {
+ } else {
+ throwFailNow(t, err)
+ }
+ } else {
+ dORM.Read(&test, "user_name")
+ throwFailNow(t, AssertIs(user1.Status, test.Status))
+ }
+ //test2
+ _, err = dORM.InsertOrUpdate(&user2, "user_name")
+ if err != nil {
+ fmt.Println(err)
+ if err.Error() == "postgres version must 9.5 or higher" || err.Error() == "`sqlite3` nonsupport InsertOrUpdate in beego" {
+ } else {
+ throwFailNow(t, err)
+ }
+ } else {
+ dORM.Read(&test, "user_name")
+ throwFailNow(t, AssertIs(user2.Status, test.Status))
+ throwFailNow(t, AssertIs(user2.Password, strings.TrimSpace(test.Password)))
+ }
+ //test3 +
+ _, err = dORM.InsertOrUpdate(&user2, "user_name", "status=status+1")
+ if err != nil {
+ fmt.Println(err)
+ if err.Error() == "postgres version must 9.5 or higher" || err.Error() == "`sqlite3` nonsupport InsertOrUpdate in beego" {
+ } else {
+ throwFailNow(t, err)
+ }
+ } else {
+ dORM.Read(&test, "user_name")
+ throwFailNow(t, AssertIs(user2.Status+1, test.Status))
+ }
+ //test4 -
+ _, err = dORM.InsertOrUpdate(&user2, "user_name", "status=status-1")
+ if err != nil {
+ fmt.Println(err)
+ if err.Error() == "postgres version must 9.5 or higher" || err.Error() == "`sqlite3` nonsupport InsertOrUpdate in beego" {
+ } else {
+ throwFailNow(t, err)
+ }
+ } else {
+ dORM.Read(&test, "user_name")
+ throwFailNow(t, AssertIs((user2.Status+1)-1, test.Status))
+ }
+ //test5 *
+ _, err = dORM.InsertOrUpdate(&user2, "user_name", "status=status*3")
+ if err != nil {
+ fmt.Println(err)
+ if err.Error() == "postgres version must 9.5 or higher" || err.Error() == "`sqlite3` nonsupport InsertOrUpdate in beego" {
+ } else {
+ throwFailNow(t, err)
+ }
+ } else {
+ dORM.Read(&test, "user_name")
+ throwFailNow(t, AssertIs(((user2.Status+1)-1)*3, test.Status))
+ }
+ //test6 /
+ _, err = dORM.InsertOrUpdate(&user2, "user_name", "Status=Status/3")
+ if err != nil {
+ fmt.Println(err)
+ if err.Error() == "postgres version must 9.5 or higher" || err.Error() == "`sqlite3` nonsupport InsertOrUpdate in beego" {
+ } else {
+ throwFailNow(t, err)
+ }
+ } else {
+ dORM.Read(&test, "user_name")
+ throwFailNow(t, AssertIs((((user2.Status+1)-1)*3)/3, test.Status))
+ }
+}
diff --git a/src/vendor/github.com/astaxie/beego/orm/qb.go b/src/vendor/github.com/astaxie/beego/orm/qb.go
index 9f778916d..e0655a178 100644
--- a/src/vendor/github.com/astaxie/beego/orm/qb.go
+++ b/src/vendor/github.com/astaxie/beego/orm/qb.go
@@ -19,6 +19,7 @@ import "errors"
// QueryBuilder is the Query builder interface
type QueryBuilder interface {
Select(fields ...string) QueryBuilder
+ ForUpdate() QueryBuilder
From(tables ...string) QueryBuilder
InnerJoin(table string) QueryBuilder
LeftJoin(table string) QueryBuilder
diff --git a/src/vendor/github.com/astaxie/beego/orm/qb_mysql.go b/src/vendor/github.com/astaxie/beego/orm/qb_mysql.go
index 886bc50e3..23bdc9eef 100644
--- a/src/vendor/github.com/astaxie/beego/orm/qb_mysql.go
+++ b/src/vendor/github.com/astaxie/beego/orm/qb_mysql.go
@@ -34,6 +34,12 @@ func (qb *MySQLQueryBuilder) Select(fields ...string) QueryBuilder {
return qb
}
+// ForUpdate add the FOR UPDATE clause
+func (qb *MySQLQueryBuilder) ForUpdate() QueryBuilder {
+ qb.Tokens = append(qb.Tokens, "FOR UPDATE")
+ return qb
+}
+
// From join the tables
func (qb *MySQLQueryBuilder) From(tables ...string) QueryBuilder {
qb.Tokens = append(qb.Tokens, "FROM", strings.Join(tables, CommaSpace))
diff --git a/src/vendor/github.com/astaxie/beego/orm/qb_tidb.go b/src/vendor/github.com/astaxie/beego/orm/qb_tidb.go
index c504049eb..87b3ae84f 100644
--- a/src/vendor/github.com/astaxie/beego/orm/qb_tidb.go
+++ b/src/vendor/github.com/astaxie/beego/orm/qb_tidb.go
@@ -31,6 +31,12 @@ func (qb *TiDBQueryBuilder) Select(fields ...string) QueryBuilder {
return qb
}
+// ForUpdate add the FOR UPDATE clause
+func (qb *TiDBQueryBuilder) ForUpdate() QueryBuilder {
+ qb.Tokens = append(qb.Tokens, "FOR UPDATE")
+ return qb
+}
+
// From join the tables
func (qb *TiDBQueryBuilder) From(tables ...string) QueryBuilder {
qb.Tokens = append(qb.Tokens, "FROM", strings.Join(tables, CommaSpace))
diff --git a/src/vendor/github.com/astaxie/beego/orm/types.go b/src/vendor/github.com/astaxie/beego/orm/types.go
index 41933dd15..3e6a9e87d 100644
--- a/src/vendor/github.com/astaxie/beego/orm/types.go
+++ b/src/vendor/github.com/astaxie/beego/orm/types.go
@@ -45,6 +45,9 @@ type Ormer interface {
// u = &User{UserName: "astaxie", Password: "pass"}
// err = Ormer.Read(u, "UserName")
Read(md interface{}, cols ...string) error
+ // Like Read(), but with "FOR UPDATE" clause, useful in transaction.
+ // Some databases are not support this feature.
+ ReadForUpdate(md interface{}, cols ...string) error
// Try to read a row from the database, or insert one if it doesn't exist
ReadOrCreate(md interface{}, col1 string, cols ...string) (bool, int64, error)
// insert model data to database
@@ -53,6 +56,11 @@ type Ormer interface {
// id, err = Ormer.Insert(user)
// user must a pointer and Insert will set user's pk field
Insert(interface{}) (int64, error)
+ // mysql:InsertOrUpdate(model) or InsertOrUpdate(model,"colu=colu+value")
+ // if colu type is integer : can use(+-*/), string : convert(colu,"value")
+ // postgres: InsertOrUpdate(model,"conflictColumnName") or InsertOrUpdate(model,"conflictColumnName","colu=colu+value")
+ // if colu type is integer : can use(+-*/), string : colu || "value"
+ InsertOrUpdate(md interface{}, colConflitAndArgs ...string) (int64, error)
// insert some models to database
InsertMulti(bulk int, mds interface{}) (int64, error)
// update model to database.
@@ -66,7 +74,7 @@ type Ormer interface {
// num, err = Ormer.Update(&user, "Langs", "Extra")
Update(md interface{}, cols ...string) (int64, error)
// delete model in database
- Delete(md interface{}) (int64, error)
+ Delete(md interface{}, cols ...string) (int64, error)
// load related models to md model.
// args are limit, offset int and order string.
//
@@ -137,6 +145,16 @@ type QuerySeter interface {
// //sql-> WHERE T0.`profile_id` IS NOT NULL AND NOT T0.`Status` IN (?) OR T1.`age` > 2000
// num, err := qs.SetCond(cond1).Count()
SetCond(*Condition) QuerySeter
+ // get condition from QuerySeter.
+ // sql's where condition
+ // cond := orm.NewCondition()
+ // cond = cond.And("profile__isnull", false).AndNot("status__in", 1)
+ // qs = qs.SetCond(cond)
+ // cond = qs.GetCond()
+ // cond := cond.Or("profile__age__gt", 2000)
+ // //sql-> WHERE T0.`profile_id` IS NOT NULL AND NOT T0.`Status` IN (?) OR T1.`age` > 2000
+ // num, err := qs.SetCond(cond).Count()
+ GetCond() *Condition
// add LIMIT value.
// args[0] means offset, e.g. LIMIT num,offset.
// if Limit <= 0 then Limit will be set to default limit ,eg 1000
@@ -389,13 +407,14 @@ type txEnder interface {
// base database struct
type dbBaser interface {
- Read(dbQuerier, *modelInfo, reflect.Value, *time.Location, []string) error
+ Read(dbQuerier, *modelInfo, reflect.Value, *time.Location, []string, bool) error
Insert(dbQuerier, *modelInfo, reflect.Value, *time.Location) (int64, error)
+ InsertOrUpdate(dbQuerier, *modelInfo, reflect.Value, *alias, ...string) (int64, error)
InsertMulti(dbQuerier, *modelInfo, reflect.Value, int, *time.Location) (int64, error)
InsertValue(dbQuerier, *modelInfo, bool, []string, []interface{}) (int64, error)
InsertStmt(stmtQuerier, *modelInfo, reflect.Value, *time.Location) (int64, error)
Update(dbQuerier, *modelInfo, reflect.Value, *time.Location, []string) (int64, error)
- Delete(dbQuerier, *modelInfo, reflect.Value, *time.Location) (int64, error)
+ Delete(dbQuerier, *modelInfo, reflect.Value, *time.Location, []string) (int64, error)
ReadBatch(dbQuerier, *querySet, *modelInfo, *Condition, interface{}, *time.Location, []string) (int64, error)
SupportUpdateJoin() bool
UpdateBatch(dbQuerier, *querySet, *modelInfo, *Condition, Params, *time.Location) (int64, error)
@@ -420,4 +439,5 @@ type dbBaser interface {
ShowColumnsQuery(string) string
IndexExists(dbQuerier, string, string) bool
collectFieldValue(*modelInfo, *fieldInfo, reflect.Value, bool, *time.Location) (interface{}, error)
+ setval(dbQuerier, *modelInfo, []string) error
}
diff --git a/src/vendor/github.com/astaxie/beego/orm/utils.go b/src/vendor/github.com/astaxie/beego/orm/utils.go
index 99437c7b0..669d47344 100644
--- a/src/vendor/github.com/astaxie/beego/orm/utils.go
+++ b/src/vendor/github.com/astaxie/beego/orm/utils.go
@@ -16,6 +16,7 @@ package orm
import (
"fmt"
+ "math/big"
"reflect"
"strconv"
"strings"
@@ -87,7 +88,15 @@ func (f StrTo) Int32() (int32, error) {
// Int64 string to int64
func (f StrTo) Int64() (int64, error) {
v, err := strconv.ParseInt(f.String(), 10, 64)
- return int64(v), err
+ if err != nil {
+ i := new(big.Int)
+ ni, ok := i.SetString(f.String(), 10) // octal
+ if !ok {
+ return v, err
+ }
+ return ni.Int64(), nil
+ }
+ return v, err
}
// Uint string to uint
@@ -117,7 +126,15 @@ func (f StrTo) Uint32() (uint32, error) {
// Uint64 string to uint64
func (f StrTo) Uint64() (uint64, error) {
v, err := strconv.ParseUint(f.String(), 10, 64)
- return uint64(v), err
+ if err != nil {
+ i := new(big.Int)
+ ni, ok := i.SetString(f.String(), 10)
+ if !ok {
+ return v, err
+ }
+ return ni.Uint64(), nil
+ }
+ return v, err
}
// String string to string
@@ -181,7 +198,7 @@ func ToInt64(value interface{}) (d int64) {
return
}
-// snake string, XxYy to xx_yy
+// snake string, XxYy to xx_yy , XxYY to xx_yy
func snakeString(s string) string {
data := make([]byte, 0, len(s)*2)
j := false
@@ -202,22 +219,17 @@ func snakeString(s string) string {
// camel string, xx_yy to XxYy
func camelString(s string) string {
data := make([]byte, 0, len(s))
- j := false
- k := false
- num := len(s) - 1
+ flag, num := true, len(s)-1
for i := 0; i <= num; i++ {
d := s[i]
- if k == false && d >= 'A' && d <= 'Z' {
- k = true
- }
- if d >= 'a' && d <= 'z' && (j || k == false) {
- d = d - 32
- j = false
- k = true
- }
- if k && d == '_' && num > i && s[i+1] >= 'a' && s[i+1] <= 'z' {
- j = true
+ if d == '_' {
+ flag = true
continue
+ } else if flag {
+ if d >= 'a' && d <= 'z' {
+ d = d - 32
+ }
+ flag = false
}
data = append(data, d)
}
diff --git a/src/vendor/github.com/astaxie/beego/docs.go b/src/vendor/github.com/astaxie/beego/orm/utils_test.go
similarity index 56%
rename from src/vendor/github.com/astaxie/beego/docs.go
rename to src/vendor/github.com/astaxie/beego/orm/utils_test.go
index 725328760..8c7c50086 100644
--- a/src/vendor/github.com/astaxie/beego/docs.go
+++ b/src/vendor/github.com/astaxie/beego/orm/utils_test.go
@@ -12,28 +12,25 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-package beego
+package orm
import (
- "github.com/astaxie/beego/context"
+ "testing"
)
-// GlobalDocAPI store the swagger api documents
-var GlobalDocAPI = make(map[string]interface{})
+func TestCamelString(t *testing.T) {
+ snake := []string{"pic_url", "hello_world_", "hello__World", "_HelLO_Word", "pic_url_1", "pic_url__1"}
+ camel := []string{"PicUrl", "HelloWorld", "HelloWorld", "HelLOWord", "PicUrl1", "PicUrl1"}
-func serverDocs(ctx *context.Context) {
- var obj interface{}
- if splat := ctx.Input.Param(":splat"); splat == "" {
- obj = GlobalDocAPI["Root"]
- } else {
- if v, ok := GlobalDocAPI[splat]; ok {
- obj = v
+ answer := make(map[string]string)
+ for i, v := range snake {
+ answer[v] = camel[i]
+ }
+
+ for _, v := range snake {
+ res := camelString(v)
+ if res != answer[v] {
+ t.Error("Unit Test Fail:", v, res, answer[v])
}
}
- if obj != nil {
- ctx.Output.Header("Access-Control-Allow-Origin", "*")
- ctx.Output.JSON(obj, false, false)
- return
- }
- ctx.Output.SetStatus(404)
}
diff --git a/src/vendor/github.com/astaxie/beego/parser.go b/src/vendor/github.com/astaxie/beego/parser.go
index 46d023201..f787fd5cd 100644
--- a/src/vendor/github.com/astaxie/beego/parser.go
+++ b/src/vendor/github.com/astaxie/beego/parser.go
@@ -23,10 +23,15 @@ import (
"go/token"
"io/ioutil"
"os"
- "path"
+ "path/filepath"
+ "regexp"
"sort"
+ "strconv"
"strings"
+ "unicode"
+ "github.com/astaxie/beego/context/param"
+ "github.com/astaxie/beego/logs"
"github.com/astaxie/beego/utils"
)
@@ -34,6 +39,7 @@ var globalRouterTemplate = `package routers
import (
"github.com/astaxie/beego"
+ "github.com/astaxie/beego/context/param"
)
func init() {
@@ -48,17 +54,18 @@ var (
genInfoList map[string][]ControllerComments
)
-const coomentPrefix = "commentsRouter_"
+const commentPrefix = "commentsRouter_"
func init() {
pkgLastupdate = make(map[string]int64)
}
func parserPkg(pkgRealpath, pkgpath string) error {
- rep := strings.NewReplacer("/", "_", ".", "_")
- commentFilename = coomentPrefix + rep.Replace(pkgpath) + ".go"
+ rep := strings.NewReplacer("\\", "_", "/", "_", ".", "_")
+ commentFilename, _ = filepath.Rel(AppPath, pkgRealpath)
+ commentFilename = commentPrefix + rep.Replace(commentFilename) + ".go"
if !compareFile(pkgRealpath) {
- Info(pkgRealpath + " no changed")
+ logs.Info(pkgRealpath + " no changed")
return nil
}
genInfoList = make(map[string][]ControllerComments)
@@ -79,59 +86,185 @@ func parserPkg(pkgRealpath, pkgpath string) error {
if specDecl.Recv != nil {
exp, ok := specDecl.Recv.List[0].Type.(*ast.StarExpr) // Check that the type is correct first beforing throwing to parser
if ok {
- parserComments(specDecl.Doc, specDecl.Name.String(), fmt.Sprint(exp.X), pkgpath)
+ parserComments(specDecl, fmt.Sprint(exp.X), pkgpath)
}
}
}
}
}
}
- genRouterCode()
+ genRouterCode(pkgRealpath)
savetoFile(pkgRealpath)
return nil
}
-func parserComments(comments *ast.CommentGroup, funcName, controllerName, pkgpath string) error {
- if comments != nil && comments.List != nil {
- for _, c := range comments.List {
- t := strings.TrimSpace(strings.TrimLeft(c.Text, "//"))
- if strings.HasPrefix(t, "@router") {
- elements := strings.TrimLeft(t, "@router ")
- e1 := strings.SplitN(elements, " ", 2)
- if len(e1) < 1 {
- return errors.New("you should has router infomation")
- }
- key := pkgpath + ":" + controllerName
- cc := ControllerComments{}
- cc.Method = funcName
- cc.Router = e1[0]
- if len(e1) == 2 && e1[1] != "" {
- e1 = strings.SplitN(e1[1], " ", 2)
- if len(e1) >= 1 {
- cc.AllowHTTPMethods = strings.Split(strings.Trim(e1[0], "[]"), ",")
- } else {
- cc.AllowHTTPMethods = append(cc.AllowHTTPMethods, "get")
- }
- } else {
- cc.AllowHTTPMethods = append(cc.AllowHTTPMethods, "get")
- }
- if len(e1) == 2 && e1[1] != "" {
- keyval := strings.Split(strings.Trim(e1[1], "[]"), " ")
- for _, kv := range keyval {
- kk := strings.Split(kv, ":")
- cc.Params = append(cc.Params, map[string]string{strings.Join(kk[:len(kk)-1], ":"): kk[len(kk)-1]})
- }
- }
- genInfoList[key] = append(genInfoList[key], cc)
- }
+type parsedComment struct {
+ routerPath string
+ methods []string
+ params map[string]parsedParam
+}
+
+type parsedParam struct {
+ name string
+ datatype string
+ location string
+ defValue string
+ required bool
+}
+
+func parserComments(f *ast.FuncDecl, controllerName, pkgpath string) error {
+ if f.Doc != nil {
+ parsedComment, err := parseComment(f.Doc.List)
+ if err != nil {
+ return err
}
+ if parsedComment.routerPath != "" {
+ key := pkgpath + ":" + controllerName
+ cc := ControllerComments{}
+ cc.Method = f.Name.String()
+ cc.Router = parsedComment.routerPath
+ cc.AllowHTTPMethods = parsedComment.methods
+ cc.MethodParams = buildMethodParams(f.Type.Params.List, parsedComment)
+ genInfoList[key] = append(genInfoList[key], cc)
+ }
+
}
return nil
}
-func genRouterCode() {
- os.Mkdir(path.Join(AppPath, "routers"), 0755)
- Info("generate router from comments")
+func buildMethodParams(funcParams []*ast.Field, pc *parsedComment) []*param.MethodParam {
+ result := make([]*param.MethodParam, 0, len(funcParams))
+ for _, fparam := range funcParams {
+ for _, pName := range fparam.Names {
+ methodParam := buildMethodParam(fparam, pName.Name, pc)
+ result = append(result, methodParam)
+ }
+ }
+ return result
+}
+
+func buildMethodParam(fparam *ast.Field, name string, pc *parsedComment) *param.MethodParam {
+ options := []param.MethodParamOption{}
+ if cparam, ok := pc.params[name]; ok {
+ //Build param from comment info
+ name = cparam.name
+ if cparam.required {
+ options = append(options, param.IsRequired)
+ }
+ switch cparam.location {
+ case "body":
+ options = append(options, param.InBody)
+ case "header":
+ options = append(options, param.InHeader)
+ case "path":
+ options = append(options, param.InPath)
+ }
+ if cparam.defValue != "" {
+ options = append(options, param.Default(cparam.defValue))
+ }
+ } else {
+ if paramInPath(name, pc.routerPath) {
+ options = append(options, param.InPath)
+ }
+ }
+ return param.New(name, options...)
+}
+
+func paramInPath(name, route string) bool {
+ return strings.HasSuffix(route, ":"+name) ||
+ strings.Contains(route, ":"+name+"/")
+}
+
+var routeRegex = regexp.MustCompile(`@router\s+(\S+)(?:\s+\[(\S+)\])?`)
+
+func parseComment(lines []*ast.Comment) (pc *parsedComment, err error) {
+ pc = &parsedComment{}
+ for _, c := range lines {
+ t := strings.TrimSpace(strings.TrimLeft(c.Text, "//"))
+ if strings.HasPrefix(t, "@router") {
+ matches := routeRegex.FindStringSubmatch(t)
+ if len(matches) == 3 {
+ pc.routerPath = matches[1]
+ methods := matches[2]
+ if methods == "" {
+ pc.methods = []string{"get"}
+ //pc.hasGet = true
+ } else {
+ pc.methods = strings.Split(methods, ",")
+ //pc.hasGet = strings.Contains(methods, "get")
+ }
+ } else {
+ return nil, errors.New("Router information is missing")
+ }
+ } else if strings.HasPrefix(t, "@Param") {
+ pv := getparams(strings.TrimSpace(strings.TrimLeft(t, "@Param")))
+ if len(pv) < 4 {
+ logs.Error("Invalid @Param format. Needs at least 4 parameters")
+ }
+ p := parsedParam{}
+ names := strings.SplitN(pv[0], "=>", 2)
+ p.name = names[0]
+ funcParamName := p.name
+ if len(names) > 1 {
+ funcParamName = names[1]
+ }
+ p.location = pv[1]
+ p.datatype = pv[2]
+ switch len(pv) {
+ case 5:
+ p.required, _ = strconv.ParseBool(pv[3])
+ case 6:
+ p.defValue = pv[3]
+ p.required, _ = strconv.ParseBool(pv[4])
+ }
+ if pc.params == nil {
+ pc.params = map[string]parsedParam{}
+ }
+ pc.params[funcParamName] = p
+ }
+ }
+ return
+}
+
+// direct copy from bee\g_docs.go
+// analisys params return []string
+// @Param query form string true "The email for login"
+// [query form string true "The email for login"]
+func getparams(str string) []string {
+ var s []rune
+ var j int
+ var start bool
+ var r []string
+ var quoted int8
+ for _, c := range str {
+ if unicode.IsSpace(c) && quoted == 0 {
+ if !start {
+ continue
+ } else {
+ start = false
+ j++
+ r = append(r, string(s))
+ s = make([]rune, 0)
+ continue
+ }
+ }
+
+ start = true
+ if c == '"' {
+ quoted ^= 1
+ continue
+ }
+ s = append(s, c)
+ }
+ if len(s) > 0 {
+ r = append(r, string(s))
+ }
+ return r
+}
+
+func genRouterCode(pkgRealpath string) {
+ os.Mkdir(getRouterDir(pkgRealpath), 0755)
+ logs.Info("generate router from comments")
var (
globalinfo string
sortKey []string
@@ -142,6 +275,7 @@ func genRouterCode() {
sort.Strings(sortKey)
for _, k := range sortKey {
cList := genInfoList[k]
+ sort.Sort(ControllerCommentsSlice(cList))
for _, c := range cList {
allmethod := "nil"
if len(c.AllowHTTPMethods) > 0 {
@@ -161,18 +295,30 @@ func genRouterCode() {
}
params = strings.TrimRight(params, ",") + "}"
}
+ methodParams := "param.Make("
+ if len(c.MethodParams) > 0 {
+ lines := make([]string, 0, len(c.MethodParams))
+ for _, m := range c.MethodParams {
+ lines = append(lines, fmt.Sprint(m))
+ }
+ methodParams += "\n " +
+ strings.Join(lines, ",\n ") +
+ ",\n "
+ }
+ methodParams += ")"
globalinfo = globalinfo + `
beego.GlobalControllerRouter["` + k + `"] = append(beego.GlobalControllerRouter["` + k + `"],
beego.ControllerComments{
- "` + strings.TrimSpace(c.Method) + `",
- ` + "`" + c.Router + "`" + `,
- ` + allmethod + `,
- ` + params + `})
+ Method: "` + strings.TrimSpace(c.Method) + `",
+ ` + "Router: `" + c.Router + "`" + `,
+ AllowHTTPMethods: ` + allmethod + `,
+ MethodParams: ` + methodParams + `,
+ Params: ` + params + `})
`
}
}
if globalinfo != "" {
- f, err := os.Create(path.Join(AppPath, "routers", commentFilename))
+ f, err := os.Create(filepath.Join(getRouterDir(pkgRealpath), commentFilename))
if err != nil {
panic(err)
}
@@ -182,7 +328,7 @@ func genRouterCode() {
}
func compareFile(pkgRealpath string) bool {
- if !utils.FileExists(path.Join(AppPath, "routers", commentFilename)) {
+ if !utils.FileExists(filepath.Join(getRouterDir(pkgRealpath), commentFilename)) {
return true
}
if utils.FileExists(lastupdateFilename) {
@@ -229,3 +375,19 @@ func getpathTime(pkgRealpath string) (lastupdate int64, err error) {
}
return lastupdate, nil
}
+
+func getRouterDir(pkgRealpath string) string {
+ dir := filepath.Dir(pkgRealpath)
+ for {
+ d := filepath.Join(dir, "routers")
+ if utils.FileExists(d) {
+ return d
+ }
+
+ if r, _ := filepath.Rel(dir, AppPath); r == "." {
+ return d
+ }
+ // Parent dir.
+ dir = filepath.Dir(dir)
+ }
+}
diff --git a/src/vendor/github.com/astaxie/beego/plugins/apiauth/apiauth.go b/src/vendor/github.com/astaxie/beego/plugins/apiauth/apiauth.go
index 8af08088d..f816029c3 100644
--- a/src/vendor/github.com/astaxie/beego/plugins/apiauth/apiauth.go
+++ b/src/vendor/github.com/astaxie/beego/plugins/apiauth/apiauth.go
@@ -35,7 +35,7 @@
//
// beego.InsertFilter("*", beego.BeforeRouter,apiauth.APISecretAuth(getAppSecret, 360))
//
-// Infomation:
+// Information:
//
// In the request user should include these params in the query
//
@@ -56,6 +56,7 @@
package apiauth
import (
+ "bytes"
"crypto/hmac"
"crypto/sha256"
"encoding/base64"
@@ -119,7 +120,7 @@ func APISecretAuth(f AppIDToAppSecret, timeout int) beego.FilterFunc {
return
}
if ctx.Input.Query("signature") !=
- Signature(appsecret, ctx.Input.Method(), ctx.Request.Form, ctx.Input.URI()) {
+ Signature(appsecret, ctx.Input.Method(), ctx.Request.Form, ctx.Input.URL()) {
ctx.ResponseWriter.WriteHeader(403)
ctx.WriteString("auth failed")
}
@@ -127,54 +128,33 @@ func APISecretAuth(f AppIDToAppSecret, timeout int) beego.FilterFunc {
}
// Signature used to generate signature with the appsecret/method/params/RequestURI
-func Signature(appsecret, method string, params url.Values, RequestURI string) (result string) {
- var query string
+func Signature(appsecret, method string, params url.Values, RequestURL string) (result string) {
+ var b bytes.Buffer
+ keys := make([]string, len(params))
pa := make(map[string]string)
for k, v := range params {
pa[k] = v[0]
+ keys = append(keys, k)
}
- vs := mapSorter(pa)
- vs.Sort()
- for i := 0; i < vs.Len(); i++ {
- if vs.Keys[i] == "signature" {
+
+ sort.Strings(keys)
+
+ for _, key := range keys {
+ if key == "signature" {
continue
}
- if vs.Keys[i] != "" && vs.Vals[i] != "" {
- query = fmt.Sprintf("%v%v%v", query, vs.Keys[i], vs.Vals[i])
+
+ val := pa[key]
+ if key != "" && val != "" {
+ b.WriteString(key)
+ b.WriteString(val)
}
}
- stringToSign := fmt.Sprintf("%v\n%v\n%v\n", method, query, RequestURI)
+
+ stringToSign := fmt.Sprintf("%v\n%v\n%v\n", method, b.String(), RequestURL)
sha256 := sha256.New
hash := hmac.New(sha256, []byte(appsecret))
hash.Write([]byte(stringToSign))
return base64.StdEncoding.EncodeToString(hash.Sum(nil))
}
-
-type valSorter struct {
- Keys []string
- Vals []string
-}
-
-func mapSorter(m map[string]string) *valSorter {
- vs := &valSorter{
- Keys: make([]string, 0, len(m)),
- Vals: make([]string, 0, len(m)),
- }
- for k, v := range m {
- vs.Keys = append(vs.Keys, k)
- vs.Vals = append(vs.Vals, v)
- }
- return vs
-}
-
-func (vs *valSorter) Sort() {
- sort.Sort(vs)
-}
-
-func (vs *valSorter) Len() int { return len(vs.Keys) }
-func (vs *valSorter) Less(i, j int) bool { return vs.Keys[i] < vs.Keys[j] }
-func (vs *valSorter) Swap(i, j int) {
- vs.Vals[i], vs.Vals[j] = vs.Vals[j], vs.Vals[i]
- vs.Keys[i], vs.Keys[j] = vs.Keys[j], vs.Keys[i]
-}
diff --git a/src/vendor/github.com/astaxie/beego/plugins/apiauth/apiauth_test.go b/src/vendor/github.com/astaxie/beego/plugins/apiauth/apiauth_test.go
new file mode 100644
index 000000000..1f56cb0fa
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/plugins/apiauth/apiauth_test.go
@@ -0,0 +1,20 @@
+package apiauth
+
+import (
+ "net/url"
+ "testing"
+)
+
+func TestSignature(t *testing.T) {
+ appsecret := "beego secret"
+ method := "GET"
+ RequestURL := "http://localhost/test/url"
+ params := make(url.Values)
+ params.Add("arg1", "hello")
+ params.Add("arg2", "beego")
+
+ signature := "mFdpvLh48ca4mDVEItE9++AKKQ/IVca7O/ZyyB8hR58="
+ if Signature(appsecret, method, params, RequestURL) != signature {
+ t.Error("Signature error")
+ }
+}
diff --git a/src/vendor/github.com/astaxie/beego/plugins/authz/authz.go b/src/vendor/github.com/astaxie/beego/plugins/authz/authz.go
new file mode 100644
index 000000000..9dc0db76e
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/plugins/authz/authz.go
@@ -0,0 +1,86 @@
+// Copyright 2014 beego Author. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package authz provides handlers to enable ACL, RBAC, ABAC authorization support.
+// Simple Usage:
+// import(
+// "github.com/astaxie/beego"
+// "github.com/astaxie/beego/plugins/authz"
+// "github.com/casbin/casbin"
+// )
+//
+// func main(){
+// // mediate the access for every request
+// beego.InsertFilter("*", beego.BeforeRouter, authz.NewAuthorizer(casbin.NewEnforcer("authz_model.conf", "authz_policy.csv")))
+// beego.Run()
+// }
+//
+//
+// Advanced Usage:
+//
+// func main(){
+// e := casbin.NewEnforcer("authz_model.conf", "")
+// e.AddRoleForUser("alice", "admin")
+// e.AddPolicy(...)
+//
+// beego.InsertFilter("*", beego.BeforeRouter, authz.NewAuthorizer(e))
+// beego.Run()
+// }
+package authz
+
+import (
+ "github.com/astaxie/beego"
+ "github.com/astaxie/beego/context"
+ "github.com/casbin/casbin"
+ "net/http"
+)
+
+// NewAuthorizer returns the authorizer.
+// Use a casbin enforcer as input
+func NewAuthorizer(e *casbin.Enforcer) beego.FilterFunc {
+ return func(ctx *context.Context) {
+ a := &BasicAuthorizer{enforcer: e}
+
+ if !a.CheckPermission(ctx.Request) {
+ a.RequirePermission(ctx.ResponseWriter)
+ }
+ }
+}
+
+// BasicAuthorizer stores the casbin handler
+type BasicAuthorizer struct {
+ enforcer *casbin.Enforcer
+}
+
+// GetUserName gets the user name from the request.
+// Currently, only HTTP basic authentication is supported
+func (a *BasicAuthorizer) GetUserName(r *http.Request) string {
+ username, _, _ := r.BasicAuth()
+ return username
+}
+
+// CheckPermission checks the user/method/path combination from the request.
+// Returns true (permission granted) or false (permission forbidden)
+func (a *BasicAuthorizer) CheckPermission(r *http.Request) bool {
+ user := a.GetUserName(r)
+ method := r.Method
+ path := r.URL.Path
+ return a.enforcer.Enforce(user, path, method)
+}
+
+// RequirePermission returns the 403 Forbidden to the client
+func (a *BasicAuthorizer) RequirePermission(w http.ResponseWriter) {
+ w.WriteHeader(403)
+ w.Write([]byte("403 Forbidden\n"))
+}
diff --git a/src/vendor/github.com/astaxie/beego/plugins/authz/authz_model.conf b/src/vendor/github.com/astaxie/beego/plugins/authz/authz_model.conf
new file mode 100644
index 000000000..d1b3dbd7a
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/plugins/authz/authz_model.conf
@@ -0,0 +1,14 @@
+[request_definition]
+r = sub, obj, act
+
+[policy_definition]
+p = sub, obj, act
+
+[role_definition]
+g = _, _
+
+[policy_effect]
+e = some(where (p.eft == allow))
+
+[matchers]
+m = g(r.sub, p.sub) && keyMatch(r.obj, p.obj) && (r.act == p.act || p.act == "*")
\ No newline at end of file
diff --git a/src/vendor/github.com/astaxie/beego/plugins/authz/authz_policy.csv b/src/vendor/github.com/astaxie/beego/plugins/authz/authz_policy.csv
new file mode 100644
index 000000000..c062dd3e2
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/plugins/authz/authz_policy.csv
@@ -0,0 +1,7 @@
+p, alice, /dataset1/*, GET
+p, alice, /dataset1/resource1, POST
+p, bob, /dataset2/resource1, *
+p, bob, /dataset2/resource2, GET
+p, bob, /dataset2/folder1/*, POST
+p, dataset1_admin, /dataset1/*, *
+g, cathy, dataset1_admin
\ No newline at end of file
diff --git a/src/vendor/github.com/astaxie/beego/plugins/authz/authz_test.go b/src/vendor/github.com/astaxie/beego/plugins/authz/authz_test.go
new file mode 100644
index 000000000..49aed84ce
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/plugins/authz/authz_test.go
@@ -0,0 +1,107 @@
+// Copyright 2014 beego Author. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package authz
+
+import (
+ "github.com/astaxie/beego"
+ "github.com/astaxie/beego/context"
+ "github.com/astaxie/beego/plugins/auth"
+ "github.com/casbin/casbin"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+)
+
+func testRequest(t *testing.T, handler *beego.ControllerRegister, user string, path string, method string, code int) {
+ r, _ := http.NewRequest(method, path, nil)
+ r.SetBasicAuth(user, "123")
+ w := httptest.NewRecorder()
+ handler.ServeHTTP(w, r)
+
+ if w.Code != code {
+ t.Errorf("%s, %s, %s: %d, supposed to be %d", user, path, method, w.Code, code)
+ }
+}
+
+func TestBasic(t *testing.T) {
+ handler := beego.NewControllerRegister()
+
+ handler.InsertFilter("*", beego.BeforeRouter, auth.Basic("alice", "123"))
+ handler.InsertFilter("*", beego.BeforeRouter, NewAuthorizer(casbin.NewEnforcer("authz_model.conf", "authz_policy.csv")))
+
+ handler.Any("*", func(ctx *context.Context) {
+ ctx.Output.SetStatus(200)
+ })
+
+ testRequest(t, handler, "alice", "/dataset1/resource1", "GET", 200)
+ testRequest(t, handler, "alice", "/dataset1/resource1", "POST", 200)
+ testRequest(t, handler, "alice", "/dataset1/resource2", "GET", 200)
+ testRequest(t, handler, "alice", "/dataset1/resource2", "POST", 403)
+}
+
+func TestPathWildcard(t *testing.T) {
+ handler := beego.NewControllerRegister()
+
+ handler.InsertFilter("*", beego.BeforeRouter, auth.Basic("bob", "123"))
+ handler.InsertFilter("*", beego.BeforeRouter, NewAuthorizer(casbin.NewEnforcer("authz_model.conf", "authz_policy.csv")))
+
+ handler.Any("*", func(ctx *context.Context) {
+ ctx.Output.SetStatus(200)
+ })
+
+ testRequest(t, handler, "bob", "/dataset2/resource1", "GET", 200)
+ testRequest(t, handler, "bob", "/dataset2/resource1", "POST", 200)
+ testRequest(t, handler, "bob", "/dataset2/resource1", "DELETE", 200)
+ testRequest(t, handler, "bob", "/dataset2/resource2", "GET", 200)
+ testRequest(t, handler, "bob", "/dataset2/resource2", "POST", 403)
+ testRequest(t, handler, "bob", "/dataset2/resource2", "DELETE", 403)
+
+ testRequest(t, handler, "bob", "/dataset2/folder1/item1", "GET", 403)
+ testRequest(t, handler, "bob", "/dataset2/folder1/item1", "POST", 200)
+ testRequest(t, handler, "bob", "/dataset2/folder1/item1", "DELETE", 403)
+ testRequest(t, handler, "bob", "/dataset2/folder1/item2", "GET", 403)
+ testRequest(t, handler, "bob", "/dataset2/folder1/item2", "POST", 200)
+ testRequest(t, handler, "bob", "/dataset2/folder1/item2", "DELETE", 403)
+}
+
+func TestRBAC(t *testing.T) {
+ handler := beego.NewControllerRegister()
+
+ handler.InsertFilter("*", beego.BeforeRouter, auth.Basic("cathy", "123"))
+ e := casbin.NewEnforcer("authz_model.conf", "authz_policy.csv")
+ handler.InsertFilter("*", beego.BeforeRouter, NewAuthorizer(e))
+
+ handler.Any("*", func(ctx *context.Context) {
+ ctx.Output.SetStatus(200)
+ })
+
+ // cathy can access all /dataset1/* resources via all methods because it has the dataset1_admin role.
+ testRequest(t, handler, "cathy", "/dataset1/item", "GET", 200)
+ testRequest(t, handler, "cathy", "/dataset1/item", "POST", 200)
+ testRequest(t, handler, "cathy", "/dataset1/item", "DELETE", 200)
+ testRequest(t, handler, "cathy", "/dataset2/item", "GET", 403)
+ testRequest(t, handler, "cathy", "/dataset2/item", "POST", 403)
+ testRequest(t, handler, "cathy", "/dataset2/item", "DELETE", 403)
+
+ // delete all roles on user cathy, so cathy cannot access any resources now.
+ e.DeleteRolesForUser("cathy")
+
+ testRequest(t, handler, "cathy", "/dataset1/item", "GET", 403)
+ testRequest(t, handler, "cathy", "/dataset1/item", "POST", 403)
+ testRequest(t, handler, "cathy", "/dataset1/item", "DELETE", 403)
+ testRequest(t, handler, "cathy", "/dataset2/item", "GET", 403)
+ testRequest(t, handler, "cathy", "/dataset2/item", "POST", 403)
+ testRequest(t, handler, "cathy", "/dataset2/item", "DELETE", 403)
+}
diff --git a/src/vendor/github.com/astaxie/beego/policy.go b/src/vendor/github.com/astaxie/beego/policy.go
new file mode 100644
index 000000000..ab23f927a
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/policy.go
@@ -0,0 +1,97 @@
+// Copyright 2016 beego authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package beego
+
+import (
+ "strings"
+
+ "github.com/astaxie/beego/context"
+)
+
+// PolicyFunc defines a policy function which is invoked before the controller handler is executed.
+type PolicyFunc func(*context.Context)
+
+// FindPolicy Find Router info for URL
+func (p *ControllerRegister) FindPolicy(cont *context.Context) []PolicyFunc {
+ var urlPath = cont.Input.URL()
+ if !BConfig.RouterCaseSensitive {
+ urlPath = strings.ToLower(urlPath)
+ }
+ httpMethod := cont.Input.Method()
+ isWildcard := false
+ // Find policy for current method
+ t, ok := p.policies[httpMethod]
+ // If not found - find policy for whole controller
+ if !ok {
+ t, ok = p.policies["*"]
+ isWildcard = true
+ }
+ if ok {
+ runObjects := t.Match(urlPath, cont)
+ if r, ok := runObjects.([]PolicyFunc); ok {
+ return r
+ } else if !isWildcard {
+ // If no policies found and we checked not for "*" method - try to find it
+ t, ok = p.policies["*"]
+ if ok {
+ runObjects = t.Match(urlPath, cont)
+ if r, ok = runObjects.([]PolicyFunc); ok {
+ return r
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (p *ControllerRegister) addToPolicy(method, pattern string, r ...PolicyFunc) {
+ method = strings.ToUpper(method)
+ p.enablePolicy = true
+ if !BConfig.RouterCaseSensitive {
+ pattern = strings.ToLower(pattern)
+ }
+ if t, ok := p.policies[method]; ok {
+ t.AddRouter(pattern, r)
+ } else {
+ t := NewTree()
+ t.AddRouter(pattern, r)
+ p.policies[method] = t
+ }
+}
+
+// Policy Register new policy in beego
+func Policy(pattern, method string, policy ...PolicyFunc) {
+ BeeApp.Handlers.addToPolicy(method, pattern, policy...)
+}
+
+// Find policies and execute if were found
+func (p *ControllerRegister) execPolicy(cont *context.Context, urlPath string) (started bool) {
+ if !p.enablePolicy {
+ return false
+ }
+ // Find Policy for method
+ policyList := p.FindPolicy(cont)
+ if len(policyList) > 0 {
+ // Run policies
+ for _, runPolicy := range policyList {
+ runPolicy(cont)
+ if cont.ResponseWriter.Started {
+ return true
+ }
+ }
+ return false
+ }
+ return false
+}
diff --git a/src/vendor/github.com/astaxie/beego/router.go b/src/vendor/github.com/astaxie/beego/router.go
index d0bf534f2..e5a4e80de 100644
--- a/src/vendor/github.com/astaxie/beego/router.go
+++ b/src/vendor/github.com/astaxie/beego/router.go
@@ -17,7 +17,6 @@ package beego
import (
"fmt"
"net/http"
- "os"
"path"
"path/filepath"
"reflect"
@@ -28,6 +27,8 @@ import (
"time"
beecontext "github.com/astaxie/beego/context"
+ "github.com/astaxie/beego/context/param"
+ "github.com/astaxie/beego/logs"
"github.com/astaxie/beego/toolbox"
"github.com/astaxie/beego/utils"
)
@@ -50,15 +51,22 @@ const (
var (
// HTTPMETHOD list the supported http methods.
HTTPMETHOD = map[string]string{
- "GET": "GET",
- "POST": "POST",
- "PUT": "PUT",
- "DELETE": "DELETE",
- "PATCH": "PATCH",
- "OPTIONS": "OPTIONS",
- "HEAD": "HEAD",
- "TRACE": "TRACE",
- "CONNECT": "CONNECT",
+ "GET": "GET",
+ "POST": "POST",
+ "PUT": "PUT",
+ "DELETE": "DELETE",
+ "PATCH": "PATCH",
+ "OPTIONS": "OPTIONS",
+ "HEAD": "HEAD",
+ "TRACE": "TRACE",
+ "CONNECT": "CONNECT",
+ "MKCOL": "MKCOL",
+ "COPY": "COPY",
+ "MOVE": "MOVE",
+ "PROPFIND": "PROPFIND",
+ "PROPPATCH": "PROPPATCH",
+ "LOCK": "LOCK",
+ "UNLOCK": "UNLOCK",
}
// these beego.Controller's methods shouldn't reflect to AutoRouter
exceptMethod = []string{"Init", "Prepare", "Finish", "Render", "RenderString",
@@ -101,28 +109,32 @@ func ExceptMethodAppend(action string) {
exceptMethod = append(exceptMethod, action)
}
-type controllerInfo struct {
+// ControllerInfo holds information about the controller.
+type ControllerInfo struct {
pattern string
controllerType reflect.Type
methods map[string]string
handler http.Handler
runFunction FilterFunc
routerType int
+ methodParams []*param.MethodParam
}
// ControllerRegister containers registered router rules, controller handlers and filters.
type ControllerRegister struct {
routers map[string]*Tree
+ enablePolicy bool
+ policies map[string]*Tree
enableFilter bool
- filters map[int][]*FilterRouter
+ filters [FinishRouter + 1][]*FilterRouter
pool sync.Pool
}
// NewControllerRegister returns a new ControllerRegister.
func NewControllerRegister() *ControllerRegister {
cr := &ControllerRegister{
- routers: make(map[string]*Tree),
- filters: make(map[int][]*FilterRouter),
+ routers: make(map[string]*Tree),
+ policies: make(map[string]*Tree),
}
cr.pool.New = func() interface{} {
return beecontext.NewContext()
@@ -141,6 +153,10 @@ func NewControllerRegister() *ControllerRegister {
// Add("/api",&RestController{},"get,post:ApiFunc"
// Add("/simple",&SimpleController{},"get:GetFunc;post:PostFunc")
func (p *ControllerRegister) Add(pattern string, c ControllerInterface, mappingMethods ...string) {
+ p.addWithMethodParams(pattern, c, nil, mappingMethods...)
+}
+
+func (p *ControllerRegister) addWithMethodParams(pattern string, c ControllerInterface, methodParams []*param.MethodParam, mappingMethods ...string) {
reflectVal := reflect.ValueOf(c)
t := reflect.Indirect(reflectVal).Type()
methods := make(map[string]string)
@@ -166,11 +182,12 @@ func (p *ControllerRegister) Add(pattern string, c ControllerInterface, mappingM
}
}
- route := &controllerInfo{}
+ route := &ControllerInfo{}
route.pattern = pattern
route.methods = methods
route.routerType = routerTypeBeego
route.controllerType = t
+ route.methodParams = methodParams
if len(methods) == 0 {
for _, m := range HTTPMETHOD {
p.addToRouter(m, pattern, route)
@@ -188,7 +205,7 @@ func (p *ControllerRegister) Add(pattern string, c ControllerInterface, mappingM
}
}
-func (p *ControllerRegister) addToRouter(method, pattern string, r *controllerInfo) {
+func (p *ControllerRegister) addToRouter(method, pattern string, r *ControllerInfo) {
if !BConfig.RouterCaseSensitive {
pattern = strings.ToLower(pattern)
}
@@ -209,13 +226,11 @@ func (p *ControllerRegister) Include(cList ...ControllerInterface) {
for _, c := range cList {
reflectVal := reflect.ValueOf(c)
t := reflect.Indirect(reflectVal).Type()
- gopath := os.Getenv("GOPATH")
- if gopath == "" {
+ wgopath := utils.GetGOPATHs()
+ if len(wgopath) == 0 {
panic("you are in dev mode. So please set gopath")
}
pkgpath := ""
-
- wgopath := filepath.SplitList(gopath)
for _, wg := range wgopath {
wg, _ = filepath.EvalSymlinks(filepath.Join(wg, "src", t.PkgPath()))
if utils.FileExists(wg) {
@@ -237,7 +252,7 @@ func (p *ControllerRegister) Include(cList ...ControllerInterface) {
key := t.PkgPath() + ":" + t.Name()
if comm, ok := GlobalControllerRouter[key]; ok {
for _, a := range comm {
- p.Add(a.Router, c, strings.Join(a.AllowHTTPMethods, ",")+":"+a.Method)
+ p.addWithMethodParams(a.Router, c, a.MethodParams, strings.Join(a.AllowHTTPMethods, ",")+":"+a.Method)
}
}
}
@@ -325,7 +340,7 @@ func (p *ControllerRegister) AddMethod(method, pattern string, f FilterFunc) {
if _, ok := HTTPMETHOD[method]; method != "*" && !ok {
panic("not support http method: " + method)
}
- route := &controllerInfo{}
+ route := &ControllerInfo{}
route.pattern = pattern
route.routerType = routerTypeRESTFul
route.runFunction = f
@@ -351,7 +366,7 @@ func (p *ControllerRegister) AddMethod(method, pattern string, f FilterFunc) {
// Handler add user defined Handler
func (p *ControllerRegister) Handler(pattern string, h http.Handler, options ...interface{}) {
- route := &controllerInfo{}
+ route := &ControllerInfo{}
route.pattern = pattern
route.routerType = routerTypeHandler
route.handler = h
@@ -386,7 +401,7 @@ func (p *ControllerRegister) AddAutoPrefix(prefix string, c ControllerInterface)
controllerName := strings.TrimSuffix(ct.Name(), "Controller")
for i := 0; i < rt.NumMethod(); i++ {
if !utils.InSlice(rt.Method(i).Name, exceptMethod) {
- route := &controllerInfo{}
+ route := &ControllerInfo{}
route.routerType = routerTypeBeego
route.methods = map[string]string{"*": rt.Method(i).Name}
route.controllerType = ct
@@ -406,29 +421,39 @@ func (p *ControllerRegister) AddAutoPrefix(prefix string, c ControllerInterface)
}
// InsertFilter Add a FilterFunc with pattern rule and action constant.
-// The bool params is for setting the returnOnOutput value (false allows multiple filters to execute)
+// params is for:
+// 1. setting the returnOnOutput value (false allows multiple filters to execute)
+// 2. determining whether or not params need to be reset.
func (p *ControllerRegister) InsertFilter(pattern string, pos int, filter FilterFunc, params ...bool) error {
-
- mr := new(FilterRouter)
- mr.tree = NewTree()
- mr.pattern = pattern
- mr.filterFunc = filter
- if !BConfig.RouterCaseSensitive {
- pattern = strings.ToLower(pattern)
+ mr := &FilterRouter{
+ tree: NewTree(),
+ pattern: pattern,
+ filterFunc: filter,
+ returnOnOutput: true,
}
- if len(params) == 0 {
- mr.returnOnOutput = true
- } else {
+ if !BConfig.RouterCaseSensitive {
+ mr.pattern = strings.ToLower(pattern)
+ }
+
+ paramsLen := len(params)
+ if paramsLen > 0 {
mr.returnOnOutput = params[0]
}
+ if paramsLen > 1 {
+ mr.resetParams = params[1]
+ }
mr.tree.AddRouter(pattern, true)
return p.insertFilterRouter(pos, mr)
}
// add Filter into
-func (p *ControllerRegister) insertFilterRouter(pos int, mr *FilterRouter) error {
- p.filters[pos] = append(p.filters[pos], mr)
+func (p *ControllerRegister) insertFilterRouter(pos int, mr *FilterRouter) (err error) {
+ if pos < BeforeStatic || pos > FinishRouter {
+ err = fmt.Errorf("can not find your filter position")
+ return
+ }
p.enableFilter = true
+ p.filters[pos] = append(p.filters[pos], mr)
return nil
}
@@ -437,11 +462,11 @@ func (p *ControllerRegister) insertFilterRouter(pos int, mr *FilterRouter) error
func (p *ControllerRegister) URLFor(endpoint string, values ...interface{}) string {
paths := strings.Split(endpoint, ".")
if len(paths) <= 1 {
- Warn("urlfor endpoint must like path.controller.method")
+ logs.Warn("urlfor endpoint must like path.controller.method")
return ""
}
if len(values)%2 != 0 {
- Warn("urlfor params must key-value pair")
+ logs.Warn("urlfor params must key-value pair")
return ""
}
params := make(map[string]string)
@@ -482,7 +507,7 @@ func (p *ControllerRegister) geturl(t *Tree, url, controllName, methodName strin
}
}
for _, l := range t.leaves {
- if c, ok := l.runObject.(*controllerInfo); ok {
+ if c, ok := l.runObject.(*ControllerInfo); ok {
if c.routerType == routerTypeBeego &&
strings.HasSuffix(path.Join(c.controllerType.PkgPath(), c.controllerType.Name()), controllName) {
find := false
@@ -577,21 +602,27 @@ func (p *ControllerRegister) geturl(t *Tree, url, controllName, methodName strin
return false, ""
}
-func (p *ControllerRegister) execFilter(context *beecontext.Context, pos int, urlPath string) (started bool) {
- if p.enableFilter {
- if l, ok := p.filters[pos]; ok {
- for _, filterR := range l {
- if filterR.returnOnOutput && context.ResponseWriter.Started {
- return true
- }
- if ok := filterR.ValidRouter(urlPath, context); ok {
- filterR.filterFunc(context)
- }
- if filterR.returnOnOutput && context.ResponseWriter.Started {
- return true
+func (p *ControllerRegister) execFilter(context *beecontext.Context, urlPath string, pos int) (started bool) {
+ var preFilterParams map[string]string
+ for _, filterR := range p.filters[pos] {
+ if filterR.returnOnOutput && context.ResponseWriter.Started {
+ return true
+ }
+ if filterR.resetParams {
+ preFilterParams = context.Input.Params()
+ }
+ if ok := filterR.ValidRouter(urlPath, context); ok {
+ filterR.filterFunc(context)
+ if filterR.resetParams {
+ context.Input.ResetParams()
+ for k, v := range preFilterParams {
+ context.Input.SetParam(k, v)
}
}
}
+ if filterR.returnOnOutput && context.ResponseWriter.Started {
+ return true
+ }
}
return false
}
@@ -600,16 +631,20 @@ func (p *ControllerRegister) execFilter(context *beecontext.Context, pos int, ur
func (p *ControllerRegister) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
startTime := time.Now()
var (
- runRouter reflect.Type
- findRouter bool
- runMethod string
- routerInfo *controllerInfo
+ runRouter reflect.Type
+ findRouter bool
+ runMethod string
+ methodParams []*param.MethodParam
+ routerInfo *ControllerInfo
+ isRunnable bool
)
context := p.pool.Get().(*beecontext.Context)
context.Reset(rw, r)
defer p.pool.Put(context)
- defer p.recoverPanic(context)
+ if BConfig.RecoverFunc != nil {
+ defer BConfig.RecoverFunc(context)
+ }
context.Output.EnableGzip = BConfig.EnableGzip
@@ -617,11 +652,10 @@ func (p *ControllerRegister) ServeHTTP(rw http.ResponseWriter, r *http.Request)
context.Output.Header("Server", BConfig.ServerName)
}
- var urlPath string
+ var urlPath = r.URL.Path
+
if !BConfig.RouterCaseSensitive {
- urlPath = strings.ToLower(r.URL.Path)
- } else {
- urlPath = r.URL.Path
+ urlPath = strings.ToLower(urlPath)
}
// filter wrong http method
@@ -631,17 +665,18 @@ func (p *ControllerRegister) ServeHTTP(rw http.ResponseWriter, r *http.Request)
}
// filter for static file
- if p.execFilter(context, BeforeStatic, urlPath) {
+ if len(p.filters[BeforeStatic]) > 0 && p.execFilter(context, urlPath, BeforeStatic) {
goto Admin
}
serverStaticRouter(context)
+
if context.ResponseWriter.Started {
findRouter = true
goto Admin
}
- if r.Method != "GET" && r.Method != "HEAD" {
+ if r.Method != http.MethodGet && r.Method != http.MethodHead {
if BConfig.CopyRequestBody && !context.Input.IsUpload() {
context.Input.CopyBody(BConfig.MaxMemory)
}
@@ -653,9 +688,9 @@ func (p *ControllerRegister) ServeHTTP(rw http.ResponseWriter, r *http.Request)
var err error
context.Input.CruSession, err = GlobalSessions.SessionStart(rw, r)
if err != nil {
- Error(err)
+ logs.Error(err)
exception("503", context)
- return
+ goto Admin
}
defer func() {
if context.Input.CruSession != nil {
@@ -663,26 +698,16 @@ func (p *ControllerRegister) ServeHTTP(rw http.ResponseWriter, r *http.Request)
}
}()
}
-
- if p.execFilter(context, BeforeRouter, urlPath) {
+ if len(p.filters[BeforeRouter]) > 0 && p.execFilter(context, urlPath, BeforeRouter) {
goto Admin
}
-
- if !findRouter {
- httpMethod := r.Method
- if t, ok := p.routers[httpMethod]; ok {
- runObject := t.Match(urlPath, context)
- if r, ok := runObject.(*controllerInfo); ok {
- routerInfo = r
- findRouter = true
- if splat := context.Input.Param(":splat"); splat != "" {
- for k, v := range strings.Split(splat, "/") {
- context.Input.SetParam(strconv.Itoa(k), v)
- }
- }
- }
- }
-
+ // User can define RunController and RunMethod in filter
+ if context.Input.RunController != nil && context.Input.RunMethod != "" {
+ findRouter = true
+ runMethod = context.Input.RunMethod
+ runRouter = context.Input.RunController
+ } else {
+ routerInfo, findRouter = p.FindRouter(context)
}
//if no matches to url, throw a not found exception
@@ -690,122 +715,148 @@ func (p *ControllerRegister) ServeHTTP(rw http.ResponseWriter, r *http.Request)
exception("404", context)
goto Admin
}
-
- if findRouter {
- //execute middleware filters
- if p.execFilter(context, BeforeExec, urlPath) {
- goto Admin
- }
- isRunnable := false
- if routerInfo != nil {
- if routerInfo.routerType == routerTypeRESTFul {
- if _, ok := routerInfo.methods[r.Method]; ok {
- isRunnable = true
- routerInfo.runFunction(context)
- } else {
- exception("405", context)
- goto Admin
- }
- } else if routerInfo.routerType == routerTypeHandler {
- isRunnable = true
- routerInfo.handler.ServeHTTP(rw, r)
- } else {
- runRouter = routerInfo.controllerType
- method := r.Method
- if r.Method == "POST" && context.Input.Query("_method") == "PUT" {
- method = "PUT"
- }
- if r.Method == "POST" && context.Input.Query("_method") == "DELETE" {
- method = "DELETE"
- }
- if m, ok := routerInfo.methods[method]; ok {
- runMethod = m
- } else if m, ok = routerInfo.methods["*"]; ok {
- runMethod = m
- } else {
- runMethod = method
- }
- }
- }
-
- // also defined runRouter & runMethod from filter
- if !isRunnable {
- //Invoke the request handler
- vc := reflect.New(runRouter)
- execController, ok := vc.Interface().(ControllerInterface)
- if !ok {
- panic("controller is not ControllerInterface")
- }
-
- //call the controller init function
- execController.Init(context, runRouter.Name(), runMethod, vc.Interface())
-
- //call prepare function
- execController.Prepare()
-
- //if XSRF is Enable then check cookie where there has any cookie in the request's cookie _csrf
- if BConfig.WebConfig.EnableXSRF {
- execController.XSRFToken()
- if r.Method == "POST" || r.Method == "DELETE" || r.Method == "PUT" ||
- (r.Method == "POST" && (context.Input.Query("_method") == "DELETE" || context.Input.Query("_method") == "PUT")) {
- execController.CheckXSRFCookie()
- }
- }
-
- execController.URLMapping()
-
- if !context.ResponseWriter.Started {
- //exec main logic
- switch runMethod {
- case "GET":
- execController.Get()
- case "POST":
- execController.Post()
- case "DELETE":
- execController.Delete()
- case "PUT":
- execController.Put()
- case "HEAD":
- execController.Head()
- case "PATCH":
- execController.Patch()
- case "OPTIONS":
- execController.Options()
- default:
- if !execController.HandlerFunc(runMethod) {
- var in []reflect.Value
- method := vc.MethodByName(runMethod)
- method.Call(in)
- }
- }
-
- //render template
- if !context.ResponseWriter.Started && context.Output.Status == 0 {
- if BConfig.WebConfig.AutoRender {
- if err := execController.Render(); err != nil {
- panic(err)
- }
- }
- }
- }
-
- // finish all runRouter. release resource
- execController.Finish()
- }
-
- //execute middleware filters
- if p.execFilter(context, AfterExec, urlPath) {
- goto Admin
+ if splat := context.Input.Param(":splat"); splat != "" {
+ for k, v := range strings.Split(splat, "/") {
+ context.Input.SetParam(strconv.Itoa(k), v)
}
}
- p.execFilter(context, FinishRouter, urlPath)
+ //execute middleware filters
+ if len(p.filters[BeforeExec]) > 0 && p.execFilter(context, urlPath, BeforeExec) {
+ goto Admin
+ }
+
+ //check policies
+ if p.execPolicy(context, urlPath) {
+ goto Admin
+ }
+
+ if routerInfo != nil {
+ //store router pattern into context
+ context.Input.SetData("RouterPattern", routerInfo.pattern)
+ if routerInfo.routerType == routerTypeRESTFul {
+ if _, ok := routerInfo.methods[r.Method]; ok {
+ isRunnable = true
+ routerInfo.runFunction(context)
+ } else {
+ exception("405", context)
+ goto Admin
+ }
+ } else if routerInfo.routerType == routerTypeHandler {
+ isRunnable = true
+ routerInfo.handler.ServeHTTP(rw, r)
+ } else {
+ runRouter = routerInfo.controllerType
+ methodParams = routerInfo.methodParams
+ method := r.Method
+ if r.Method == http.MethodPost && context.Input.Query("_method") == http.MethodPost {
+ method = http.MethodPut
+ }
+ if r.Method == http.MethodPost && context.Input.Query("_method") == http.MethodDelete {
+ method = http.MethodDelete
+ }
+ if m, ok := routerInfo.methods[method]; ok {
+ runMethod = m
+ } else if m, ok = routerInfo.methods["*"]; ok {
+ runMethod = m
+ } else {
+ runMethod = method
+ }
+ }
+ }
+
+ // also defined runRouter & runMethod from filter
+ if !isRunnable {
+ //Invoke the request handler
+ vc := reflect.New(runRouter)
+ execController, ok := vc.Interface().(ControllerInterface)
+ if !ok {
+ panic("controller is not ControllerInterface")
+ }
+
+ //call the controller init function
+ execController.Init(context, runRouter.Name(), runMethod, vc.Interface())
+
+ //call prepare function
+ execController.Prepare()
+
+ //if XSRF is Enable then check cookie where there has any cookie in the request's cookie _csrf
+ if BConfig.WebConfig.EnableXSRF {
+ execController.XSRFToken()
+ if r.Method == http.MethodPost || r.Method == http.MethodDelete || r.Method == http.MethodPut ||
+ (r.Method == http.MethodPost && (context.Input.Query("_method") == http.MethodDelete || context.Input.Query("_method") == http.MethodPut)) {
+ execController.CheckXSRFCookie()
+ }
+ }
+
+ execController.URLMapping()
+
+ if !context.ResponseWriter.Started {
+ //exec main logic
+ switch runMethod {
+ case http.MethodGet:
+ execController.Get()
+ case http.MethodPost:
+ execController.Post()
+ case http.MethodDelete:
+ execController.Delete()
+ case http.MethodPut:
+ execController.Put()
+ case http.MethodHead:
+ execController.Head()
+ case http.MethodPatch:
+ execController.Patch()
+ case http.MethodOptions:
+ execController.Options()
+ default:
+ if !execController.HandlerFunc(runMethod) {
+ method := vc.MethodByName(runMethod)
+ in := param.ConvertParams(methodParams, method.Type(), context)
+ out := method.Call(in)
+
+ //For backward compatibility we only handle response if we had incoming methodParams
+ if methodParams != nil {
+ p.handleParamResponse(context, execController, out)
+ }
+ }
+ }
+
+ //render template
+ if !context.ResponseWriter.Started && context.Output.Status == 0 {
+ if BConfig.WebConfig.AutoRender {
+ if err := execController.Render(); err != nil {
+ logs.Error(err)
+ }
+ }
+ }
+ }
+
+ // finish all runRouter. release resource
+ execController.Finish()
+ }
+
+ //execute middleware filters
+ if len(p.filters[AfterExec]) > 0 && p.execFilter(context, urlPath, AfterExec) {
+ goto Admin
+ }
+
+ if len(p.filters[FinishRouter]) > 0 && p.execFilter(context, urlPath, FinishRouter) {
+ goto Admin
+ }
Admin:
- timeDur := time.Since(startTime)
//admin module record QPS
if BConfig.Listen.EnableAdmin {
- if FilterMonitorFunc(r.Method, r.URL.Path, timeDur) {
+ timeDur := time.Since(startTime)
+ pattern := ""
+ if routerInfo != nil {
+ pattern = routerInfo.pattern
+ }
+ statusCode := context.ResponseWriter.Status
+ if statusCode == 0 {
+ statusCode = 200
+ }
+ if FilterMonitorFunc(r.Method, r.URL.Path, timeDur, pattern, statusCode) {
if runRouter != nil {
go toolbox.StatisticsMap.AddStatistics(r.Method, r.URL.Path, runRouter.Name(), timeDur)
} else {
@@ -815,18 +866,36 @@ Admin:
}
if BConfig.RunMode == DEV || BConfig.Log.AccessLogs {
+ timeDur := time.Since(startTime)
var devInfo string
+
+ statusCode := context.ResponseWriter.Status
+ if statusCode == 0 {
+ statusCode = 200
+ }
+
+ iswin := (runtime.GOOS == "windows")
+ statusColor := logs.ColorByStatus(iswin, statusCode)
+ methodColor := logs.ColorByMethod(iswin, r.Method)
+ resetColor := logs.ColorByMethod(iswin, "")
+
if findRouter {
if routerInfo != nil {
- devInfo = fmt.Sprintf("| % -10s | % -40s | % -16s | % -10s | % -40s |", r.Method, r.URL.Path, timeDur.String(), "match", routerInfo.pattern)
+ devInfo = fmt.Sprintf("|%15s|%s %3d %s|%13s|%8s|%s %-7s %s %-3s r:%s", context.Input.IP(), statusColor, statusCode,
+ resetColor, timeDur.String(), "match", methodColor, r.Method, resetColor, r.URL.Path,
+ routerInfo.pattern)
} else {
- devInfo = fmt.Sprintf("| % -10s | % -40s | % -16s | % -10s |", r.Method, r.URL.Path, timeDur.String(), "match")
+ devInfo = fmt.Sprintf("|%15s|%s %3d %s|%13s|%8s|%s %-7s %s %-3s", context.Input.IP(), statusColor, statusCode, resetColor,
+ timeDur.String(), "match", methodColor, r.Method, resetColor, r.URL.Path)
}
} else {
- devInfo = fmt.Sprintf("| % -10s | % -40s | % -16s | % -10s |", r.Method, r.URL.Path, timeDur.String(), "notmatch")
+ devInfo = fmt.Sprintf("|%15s|%s %3d %s|%13s|%8s|%s %-7s %s %-3s", context.Input.IP(), statusColor, statusCode, resetColor,
+ timeDur.String(), "nomatch", methodColor, r.Method, resetColor, r.URL.Path)
}
- if DefaultAccessLogFilter == nil || !DefaultAccessLogFilter.Filter(context) {
- Debug(devInfo)
+ if iswin {
+ logs.W32Debug(devInfo)
+ } else {
+ logs.Debug(devInfo)
}
}
@@ -836,36 +905,34 @@ Admin:
}
}
-func (p *ControllerRegister) recoverPanic(context *beecontext.Context) {
- if err := recover(); err != nil {
- if err == ErrAbort {
- return
- }
- if !BConfig.RecoverPanic {
- panic(err)
- } else {
- if BConfig.EnableErrorsShow {
- if _, ok := ErrorMaps[fmt.Sprint(err)]; ok {
- exception(fmt.Sprint(err), context)
- return
- }
- }
- var stack string
- Critical("the request url is ", context.Input.URL())
- Critical("Handler crashed with error", err)
- for i := 1; ; i++ {
- _, file, line, ok := runtime.Caller(i)
- if !ok {
- break
- }
- Critical(fmt.Sprintf("%s:%d", file, line))
- stack = stack + fmt.Sprintln(fmt.Sprintf("%s:%d", file, line))
- }
- if BConfig.RunMode == DEV {
- showErr(err, context, stack)
- }
+func (p *ControllerRegister) handleParamResponse(context *beecontext.Context, execController ControllerInterface, results []reflect.Value) {
+ //looping in reverse order for the case when both error and value are returned and error sets the response status code
+ for i := len(results) - 1; i >= 0; i-- {
+ result := results[i]
+ if result.Kind() != reflect.Interface || !result.IsNil() {
+ resultValue := result.Interface()
+ context.RenderMethodResult(resultValue)
}
}
+ if !context.ResponseWriter.Started && context.Output.Status == 0 {
+ context.Output.SetStatus(200)
+ }
+}
+
+// FindRouter Find Router info for URL
+func (p *ControllerRegister) FindRouter(context *beecontext.Context) (routerInfo *ControllerInfo, isFind bool) {
+ var urlPath = context.Input.URL()
+ if !BConfig.RouterCaseSensitive {
+ urlPath = strings.ToLower(urlPath)
+ }
+ httpMethod := context.Input.Method()
+ if t, ok := p.routers[httpMethod]; ok {
+ runObject := t.Match(urlPath, context)
+ if r, ok := runObject.(*ControllerInfo); ok {
+ return r, true
+ }
+ }
+ return
}
func toURL(params map[string]string) string {
diff --git a/src/vendor/github.com/astaxie/beego/router_test.go b/src/vendor/github.com/astaxie/beego/router_test.go
index f26f0c86b..720b4ca8c 100644
--- a/src/vendor/github.com/astaxie/beego/router_test.go
+++ b/src/vendor/github.com/astaxie/beego/router_test.go
@@ -21,6 +21,7 @@ import (
"testing"
"github.com/astaxie/beego/context"
+ "github.com/astaxie/beego/logs"
)
type TestController struct {
@@ -94,7 +95,7 @@ func TestUrlFor(t *testing.T) {
handler.Add("/api/list", &TestController{}, "*:List")
handler.Add("/person/:last/:first", &TestController{}, "*:Param")
if a := handler.URLFor("TestController.List"); a != "/api/list" {
- Info(a)
+ logs.Info(a)
t.Errorf("TestController.List must equal to /api/list")
}
if a := handler.URLFor("TestController.Param", ":last", "xie", ":first", "asta"); a != "/person/xie/asta" {
@@ -120,24 +121,24 @@ func TestUrlFor2(t *testing.T) {
handler.Add("/v1/:v(.+)_cms/ttt_:id(.+)_:page(.+).html", &TestController{}, "*:Param")
handler.Add("/:year:int/:month:int/:title/:entid", &TestController{})
if handler.URLFor("TestController.GetURL", ":username", "astaxie") != "/v1/astaxie/edit" {
- Info(handler.URLFor("TestController.GetURL"))
+ logs.Info(handler.URLFor("TestController.GetURL"))
t.Errorf("TestController.List must equal to /v1/astaxie/edit")
}
if handler.URLFor("TestController.List", ":v", "za", ":id", "12", ":page", "123") !=
"/v1/za/cms_12_123.html" {
- Info(handler.URLFor("TestController.List"))
+ logs.Info(handler.URLFor("TestController.List"))
t.Errorf("TestController.List must equal to /v1/za/cms_12_123.html")
}
if handler.URLFor("TestController.Param", ":v", "za", ":id", "12", ":page", "123") !=
"/v1/za_cms/ttt_12_123.html" {
- Info(handler.URLFor("TestController.Param"))
+ logs.Info(handler.URLFor("TestController.Param"))
t.Errorf("TestController.List must equal to /v1/za_cms/ttt_12_123.html")
}
if handler.URLFor("TestController.Get", ":year", "1111", ":month", "11",
":title", "aaaa", ":entid", "aaaa") !=
"/1111/11/aaaa/aaaa" {
- Info(handler.URLFor("TestController.Get"))
+ logs.Info(handler.URLFor("TestController.Get"))
t.Errorf("TestController.Get must equal to /1111/11/aaaa/aaaa")
}
}
@@ -419,6 +420,74 @@ func testRequest(method, path string) (*httptest.ResponseRecorder, *http.Request
return recorder, request
}
+// Expectation: A Filter with the correct configuration should be created given
+// specific parameters.
+func TestInsertFilter(t *testing.T) {
+ testName := "TestInsertFilter"
+
+ mux := NewControllerRegister()
+ mux.InsertFilter("*", BeforeRouter, func(*context.Context) {})
+ if !mux.filters[BeforeRouter][0].returnOnOutput {
+ t.Errorf(
+ "%s: passing no variadic params should set returnOnOutput to true",
+ testName)
+ }
+ if mux.filters[BeforeRouter][0].resetParams {
+ t.Errorf(
+ "%s: passing no variadic params should set resetParams to false",
+ testName)
+ }
+
+ mux = NewControllerRegister()
+ mux.InsertFilter("*", BeforeRouter, func(*context.Context) {}, false)
+ if mux.filters[BeforeRouter][0].returnOnOutput {
+ t.Errorf(
+ "%s: passing false as 1st variadic param should set returnOnOutput to false",
+ testName)
+ }
+
+ mux = NewControllerRegister()
+ mux.InsertFilter("*", BeforeRouter, func(*context.Context) {}, true, true)
+ if !mux.filters[BeforeRouter][0].resetParams {
+ t.Errorf(
+ "%s: passing true as 2nd variadic param should set resetParams to true",
+ testName)
+ }
+}
+
+// Expectation: the second variadic arg should cause the execution of the filter
+// to preserve the parameters from before its execution.
+func TestParamResetFilter(t *testing.T) {
+ testName := "TestParamResetFilter"
+ route := "/beego/*" // splat
+ path := "/beego/routes/routes"
+
+ mux := NewControllerRegister()
+
+ mux.InsertFilter("*", BeforeExec, beegoResetParams, true, true)
+
+ mux.Get(route, beegoHandleResetParams)
+
+ rw, r := testRequest("GET", path)
+ mux.ServeHTTP(rw, r)
+
+ // The two functions, `beegoResetParams` and `beegoHandleResetParams` add
+ // a response header of `Splat`. The expectation here is that that Header
+ // value should match what the _request's_ router set, not the filter's.
+
+ headers := rw.HeaderMap
+ if len(headers["Splat"]) != 1 {
+ t.Errorf(
+ "%s: There was an error in the test. Splat param not set in Header",
+ testName)
+ }
+ if headers["Splat"][0] != "routes/routes" {
+ t.Errorf(
+ "%s: expected `:splat` param to be [routes/routes] but it was [%s]",
+ testName, headers["Splat"][0])
+ }
+}
+
// Execution point: BeforeRouter
// expectation: only BeforeRouter function is executed, notmatch output as router doesn't handle
func TestFilterBeforeRouter(t *testing.T) {
@@ -433,10 +502,10 @@ func TestFilterBeforeRouter(t *testing.T) {
rw, r := testRequest("GET", url)
mux.ServeHTTP(rw, r)
- if strings.Contains(rw.Body.String(), "BeforeRouter1") == false {
+ if !strings.Contains(rw.Body.String(), "BeforeRouter1") {
t.Errorf(testName + " BeforeRouter did not run")
}
- if strings.Contains(rw.Body.String(), "hello") == true {
+ if strings.Contains(rw.Body.String(), "hello") {
t.Errorf(testName + " BeforeRouter did not return properly")
}
}
@@ -456,13 +525,13 @@ func TestFilterBeforeExec(t *testing.T) {
rw, r := testRequest("GET", url)
mux.ServeHTTP(rw, r)
- if strings.Contains(rw.Body.String(), "BeforeExec1") == false {
+ if !strings.Contains(rw.Body.String(), "BeforeExec1") {
t.Errorf(testName + " BeforeExec did not run")
}
- if strings.Contains(rw.Body.String(), "hello") == true {
+ if strings.Contains(rw.Body.String(), "hello") {
t.Errorf(testName + " BeforeExec did not return properly")
}
- if strings.Contains(rw.Body.String(), "BeforeRouter") == true {
+ if strings.Contains(rw.Body.String(), "BeforeRouter") {
t.Errorf(testName + " BeforeRouter ran in error")
}
}
@@ -483,16 +552,16 @@ func TestFilterAfterExec(t *testing.T) {
rw, r := testRequest("GET", url)
mux.ServeHTTP(rw, r)
- if strings.Contains(rw.Body.String(), "AfterExec1") == false {
+ if !strings.Contains(rw.Body.String(), "AfterExec1") {
t.Errorf(testName + " AfterExec did not run")
}
- if strings.Contains(rw.Body.String(), "hello") == false {
+ if !strings.Contains(rw.Body.String(), "hello") {
t.Errorf(testName + " handler did not run properly")
}
- if strings.Contains(rw.Body.String(), "BeforeRouter") == true {
+ if strings.Contains(rw.Body.String(), "BeforeRouter") {
t.Errorf(testName + " BeforeRouter ran in error")
}
- if strings.Contains(rw.Body.String(), "BeforeExec") == true {
+ if strings.Contains(rw.Body.String(), "BeforeExec") {
t.Errorf(testName + " BeforeExec ran in error")
}
}
@@ -514,19 +583,19 @@ func TestFilterFinishRouter(t *testing.T) {
rw, r := testRequest("GET", url)
mux.ServeHTTP(rw, r)
- if strings.Contains(rw.Body.String(), "FinishRouter1") == true {
+ if strings.Contains(rw.Body.String(), "FinishRouter1") {
t.Errorf(testName + " FinishRouter did not run")
}
- if strings.Contains(rw.Body.String(), "hello") == false {
+ if !strings.Contains(rw.Body.String(), "hello") {
t.Errorf(testName + " handler did not run properly")
}
- if strings.Contains(rw.Body.String(), "AfterExec1") == true {
+ if strings.Contains(rw.Body.String(), "AfterExec1") {
t.Errorf(testName + " AfterExec ran in error")
}
- if strings.Contains(rw.Body.String(), "BeforeRouter") == true {
+ if strings.Contains(rw.Body.String(), "BeforeRouter") {
t.Errorf(testName + " BeforeRouter ran in error")
}
- if strings.Contains(rw.Body.String(), "BeforeExec") == true {
+ if strings.Contains(rw.Body.String(), "BeforeExec") {
t.Errorf(testName + " BeforeExec ran in error")
}
}
@@ -546,14 +615,14 @@ func TestFilterFinishRouterMultiFirstOnly(t *testing.T) {
rw, r := testRequest("GET", url)
mux.ServeHTTP(rw, r)
- if strings.Contains(rw.Body.String(), "FinishRouter1") == false {
+ if !strings.Contains(rw.Body.String(), "FinishRouter1") {
t.Errorf(testName + " FinishRouter1 did not run")
}
- if strings.Contains(rw.Body.String(), "hello") == false {
+ if !strings.Contains(rw.Body.String(), "hello") {
t.Errorf(testName + " handler did not run properly")
}
// not expected in body
- if strings.Contains(rw.Body.String(), "FinishRouter2") == true {
+ if strings.Contains(rw.Body.String(), "FinishRouter2") {
t.Errorf(testName + " FinishRouter2 did run")
}
}
@@ -573,41 +642,56 @@ func TestFilterFinishRouterMulti(t *testing.T) {
rw, r := testRequest("GET", url)
mux.ServeHTTP(rw, r)
- if strings.Contains(rw.Body.String(), "FinishRouter1") == false {
+ if !strings.Contains(rw.Body.String(), "FinishRouter1") {
t.Errorf(testName + " FinishRouter1 did not run")
}
- if strings.Contains(rw.Body.String(), "hello") == false {
+ if !strings.Contains(rw.Body.String(), "hello") {
t.Errorf(testName + " handler did not run properly")
}
- if strings.Contains(rw.Body.String(), "FinishRouter2") == false {
+ if !strings.Contains(rw.Body.String(), "FinishRouter2") {
t.Errorf(testName + " FinishRouter2 did not run properly")
}
}
func beegoFilterNoOutput(ctx *context.Context) {
- return
}
+
func beegoBeforeRouter1(ctx *context.Context) {
ctx.WriteString("|BeforeRouter1")
}
+
func beegoBeforeRouter2(ctx *context.Context) {
ctx.WriteString("|BeforeRouter2")
}
+
func beegoBeforeExec1(ctx *context.Context) {
ctx.WriteString("|BeforeExec1")
}
+
func beegoBeforeExec2(ctx *context.Context) {
ctx.WriteString("|BeforeExec2")
}
+
func beegoAfterExec1(ctx *context.Context) {
ctx.WriteString("|AfterExec1")
}
+
func beegoAfterExec2(ctx *context.Context) {
ctx.WriteString("|AfterExec2")
}
+
func beegoFinishRouter1(ctx *context.Context) {
ctx.WriteString("|FinishRouter1")
}
+
func beegoFinishRouter2(ctx *context.Context) {
ctx.WriteString("|FinishRouter2")
}
+
+func beegoResetParams(ctx *context.Context) {
+ ctx.ResponseWriter.Header().Set("splat", ctx.Input.Param(":splat"))
+}
+
+func beegoHandleResetParams(ctx *context.Context) {
+ ctx.ResponseWriter.Header().Set("splat", ctx.Input.Param(":splat"))
+}
diff --git a/src/vendor/github.com/astaxie/beego/session/couchbase/sess_couchbase.go b/src/vendor/github.com/astaxie/beego/session/couchbase/sess_couchbase.go
index d5be11d0b..707d042c5 100644
--- a/src/vendor/github.com/astaxie/beego/session/couchbase/sess_couchbase.go
+++ b/src/vendor/github.com/astaxie/beego/session/couchbase/sess_couchbase.go
@@ -155,11 +155,16 @@ func (cp *Provider) SessionInit(maxlifetime int64, savePath string) error {
func (cp *Provider) SessionRead(sid string) (session.Store, error) {
cp.b = cp.getBucket()
- var doc []byte
+ var (
+ kv map[interface{}]interface{}
+ err error
+ doc []byte
+ )
- err := cp.b.Get(sid, &doc)
- var kv map[interface{}]interface{}
- if doc == nil {
+ err = cp.b.Get(sid, &doc)
+ if err != nil {
+ return nil, err
+ } else if doc == nil {
kv = make(map[interface{}]interface{})
} else {
kv, err = session.DecodeGob(doc)
@@ -230,7 +235,6 @@ func (cp *Provider) SessionDestroy(sid string) error {
// SessionGC Recycle
func (cp *Provider) SessionGC() {
- return
}
// SessionAll return all active session
diff --git a/src/vendor/github.com/astaxie/beego/session/ledis/ledis_session.go b/src/vendor/github.com/astaxie/beego/session/ledis/ledis_session.go
index 68f37b08c..77685d1e2 100644
--- a/src/vendor/github.com/astaxie/beego/session/ledis/ledis_session.go
+++ b/src/vendor/github.com/astaxie/beego/session/ledis/ledis_session.go
@@ -12,8 +12,10 @@ import (
"github.com/siddontang/ledisdb/ledis"
)
-var ledispder = &Provider{}
-var c *ledis.DB
+var (
+ ledispder = &Provider{}
+ c *ledis.DB
+)
// SessionStore ledis session store
type SessionStore struct {
@@ -97,27 +99,33 @@ func (lp *Provider) SessionInit(maxlifetime int64, savePath string) error {
}
cfg := new(config.Config)
cfg.DataDir = lp.savePath
- nowLedis, err := ledis.Open(cfg)
- c, err = nowLedis.Select(lp.db)
+
+ var ledisInstance *ledis.Ledis
+ ledisInstance, err = ledis.Open(cfg)
if err != nil {
- println(err)
- return nil
+ return err
}
- return nil
+ c, err = ledisInstance.Select(lp.db)
+ return err
}
// SessionRead read ledis session by sid
func (lp *Provider) SessionRead(sid string) (session.Store, error) {
- kvs, err := c.Get([]byte(sid))
- var kv map[interface{}]interface{}
+ var (
+ kv map[interface{}]interface{}
+ err error
+ )
+
+ kvs, _ := c.Get([]byte(sid))
+
if len(kvs) == 0 {
kv = make(map[interface{}]interface{})
} else {
- kv, err = session.DecodeGob(kvs)
- if err != nil {
+ if kv, err = session.DecodeGob(kvs); err != nil {
return nil, err
}
}
+
ls := &SessionStore{sid: sid, values: kv, maxlifetime: lp.maxlifetime}
return ls, nil
}
@@ -125,10 +133,7 @@ func (lp *Provider) SessionRead(sid string) (session.Store, error) {
// SessionExist check ledis session exist by sid
func (lp *Provider) SessionExist(sid string) bool {
count, _ := c.Exists([]byte(sid))
- if count == 0 {
- return false
- }
- return true
+ return !(count == 0)
}
// SessionRegenerate generate new sid for ledis session
@@ -145,18 +150,7 @@ func (lp *Provider) SessionRegenerate(oldsid, sid string) (session.Store, error)
c.Set([]byte(sid), data)
c.Expire([]byte(sid), lp.maxlifetime)
}
- kvs, err := c.Get([]byte(sid))
- var kv map[interface{}]interface{}
- if len(kvs) == 0 {
- kv = make(map[interface{}]interface{})
- } else {
- kv, err = session.DecodeGob([]byte(kvs))
- if err != nil {
- return nil, err
- }
- }
- ls := &SessionStore{sid: sid, values: kv, maxlifetime: lp.maxlifetime}
- return ls, nil
+ return lp.SessionRead(sid)
}
// SessionDestroy delete ledis session by id
@@ -167,7 +161,6 @@ func (lp *Provider) SessionDestroy(sid string) error {
// SessionGC Impelment method, no used.
func (lp *Provider) SessionGC() {
- return
}
// SessionAll return all active session
diff --git a/src/vendor/github.com/astaxie/beego/session/memcache/sess_memcache.go b/src/vendor/github.com/astaxie/beego/session/memcache/sess_memcache.go
index f1069bc93..755979c42 100644
--- a/src/vendor/github.com/astaxie/beego/session/memcache/sess_memcache.go
+++ b/src/vendor/github.com/astaxie/beego/session/memcache/sess_memcache.go
@@ -205,11 +205,7 @@ func (rp *MemProvider) SessionDestroy(sid string) error {
}
}
- err := client.Delete(sid)
- if err != nil {
- return err
- }
- return nil
+ return client.Delete(sid)
}
func (rp *MemProvider) connectInit() error {
@@ -219,7 +215,6 @@ func (rp *MemProvider) connectInit() error {
// SessionGC Impelment method, no used.
func (rp *MemProvider) SessionGC() {
- return
}
// SessionAll return all activeSession
diff --git a/src/vendor/github.com/astaxie/beego/session/mysql/sess_mysql.go b/src/vendor/github.com/astaxie/beego/session/mysql/sess_mysql.go
index 969d26c97..4c9251e72 100644
--- a/src/vendor/github.com/astaxie/beego/session/mysql/sess_mysql.go
+++ b/src/vendor/github.com/astaxie/beego/session/mysql/sess_mysql.go
@@ -115,7 +115,6 @@ func (st *SessionStore) SessionRelease(w http.ResponseWriter) {
}
st.c.Exec("UPDATE "+TableName+" set `session_data`=?, `session_expiry`=? where session_key=?",
b, time.Now().Unix(), st.sid)
-
}
// Provider mysql session provider
@@ -171,10 +170,7 @@ func (mp *Provider) SessionExist(sid string) bool {
row := c.QueryRow("select session_data from "+TableName+" where session_key=?", sid)
var sessiondata []byte
err := row.Scan(&sessiondata)
- if err == sql.ErrNoRows {
- return false
- }
- return true
+ return !(err == sql.ErrNoRows)
}
// SessionRegenerate generate new sid for mysql session
@@ -213,7 +209,6 @@ func (mp *Provider) SessionGC() {
c := mp.connectInit()
c.Exec("DELETE from "+TableName+" where session_expiry < ?", time.Now().Unix()-mp.maxlifetime)
c.Close()
- return
}
// SessionAll count values in mysql session
diff --git a/src/vendor/github.com/astaxie/beego/session/postgres/sess_postgresql.go b/src/vendor/github.com/astaxie/beego/session/postgres/sess_postgresql.go
index 73f9c13a9..ffc27defb 100644
--- a/src/vendor/github.com/astaxie/beego/session/postgres/sess_postgresql.go
+++ b/src/vendor/github.com/astaxie/beego/session/postgres/sess_postgresql.go
@@ -184,11 +184,7 @@ func (mp *Provider) SessionExist(sid string) bool {
row := c.QueryRow("select session_data from session where session_key=$1", sid)
var sessiondata []byte
err := row.Scan(&sessiondata)
-
- if err == sql.ErrNoRows {
- return false
- }
- return true
+ return !(err == sql.ErrNoRows)
}
// SessionRegenerate generate new sid for postgresql session
@@ -228,7 +224,6 @@ func (mp *Provider) SessionGC() {
c := mp.connectInit()
c.Exec("DELETE from session where EXTRACT(EPOCH FROM (current_timestamp - session_expiry)) > $1", mp.maxlifetime)
c.Close()
- return
}
// SessionAll count values in postgresql session
diff --git a/src/vendor/github.com/astaxie/beego/session/redis/sess_redis.go b/src/vendor/github.com/astaxie/beego/session/redis/sess_redis.go
index c46fa7cdf..d0424515d 100644
--- a/src/vendor/github.com/astaxie/beego/session/redis/sess_redis.go
+++ b/src/vendor/github.com/astaxie/beego/session/redis/sess_redis.go
@@ -128,7 +128,7 @@ func (rp *Provider) SessionInit(maxlifetime int64, savePath string) error {
}
if len(configs) > 1 {
poolsize, err := strconv.Atoi(configs[1])
- if err != nil || poolsize <= 0 {
+ if err != nil || poolsize < 0 {
rp.poolsize = MaxPoolSize
} else {
rp.poolsize = poolsize
@@ -155,7 +155,7 @@ func (rp *Provider) SessionInit(maxlifetime int64, savePath string) error {
return nil, err
}
if rp.password != "" {
- if _, err := c.Do("AUTH", rp.password); err != nil {
+ if _, err = c.Do("AUTH", rp.password); err != nil {
c.Close()
return nil, err
}
@@ -176,13 +176,16 @@ func (rp *Provider) SessionRead(sid string) (session.Store, error) {
c := rp.poollist.Get()
defer c.Close()
- kvs, err := redis.String(c.Do("GET", sid))
var kv map[interface{}]interface{}
+
+ kvs, err := redis.String(c.Do("GET", sid))
+ if err != nil && err != redis.ErrNil {
+ return nil, err
+ }
if len(kvs) == 0 {
kv = make(map[interface{}]interface{})
} else {
- kv, err = session.DecodeGob([]byte(kvs))
- if err != nil {
+ if kv, err = session.DecodeGob([]byte(kvs)); err != nil {
return nil, err
}
}
@@ -216,20 +219,7 @@ func (rp *Provider) SessionRegenerate(oldsid, sid string) (session.Store, error)
c.Do("RENAME", oldsid, sid)
c.Do("EXPIRE", sid, rp.maxlifetime)
}
-
- kvs, err := redis.String(c.Do("GET", sid))
- var kv map[interface{}]interface{}
- if len(kvs) == 0 {
- kv = make(map[interface{}]interface{})
- } else {
- kv, err = session.DecodeGob([]byte(kvs))
- if err != nil {
- return nil, err
- }
- }
-
- rs := &SessionStore{p: rp.poollist, sid: sid, values: kv, maxlifetime: rp.maxlifetime}
- return rs, nil
+ return rp.SessionRead(sid)
}
// SessionDestroy delete redis session by id
@@ -243,7 +233,6 @@ func (rp *Provider) SessionDestroy(sid string) error {
// SessionGC Impelment method, no used.
func (rp *Provider) SessionGC() {
- return
}
// SessionAll return all activeSession
diff --git a/src/vendor/github.com/astaxie/beego/session/sess_cookie.go b/src/vendor/github.com/astaxie/beego/session/sess_cookie.go
index 3fefa360f..145e53c9b 100644
--- a/src/vendor/github.com/astaxie/beego/session/sess_cookie.go
+++ b/src/vendor/github.com/astaxie/beego/session/sess_cookie.go
@@ -74,21 +74,16 @@ func (st *CookieSessionStore) SessionID() string {
// SessionRelease Write cookie session to http response cookie
func (st *CookieSessionStore) SessionRelease(w http.ResponseWriter) {
- str, err := encodeCookie(cookiepder.block,
- cookiepder.config.SecurityKey,
- cookiepder.config.SecurityName,
- st.values)
- if err != nil {
- return
+ encodedCookie, err := encodeCookie(cookiepder.block, cookiepder.config.SecurityKey, cookiepder.config.SecurityName, st.values)
+ if err == nil {
+ cookie := &http.Cookie{Name: cookiepder.config.CookieName,
+ Value: url.QueryEscape(encodedCookie),
+ Path: "/",
+ HttpOnly: true,
+ Secure: cookiepder.config.Secure,
+ MaxAge: cookiepder.config.Maxage}
+ http.SetCookie(w, cookie)
}
- cookie := &http.Cookie{Name: cookiepder.config.CookieName,
- Value: url.QueryEscape(str),
- Path: "/",
- HttpOnly: true,
- Secure: cookiepder.config.Secure,
- MaxAge: cookiepder.config.Maxage}
- http.SetCookie(w, cookie)
- return
}
type cookieConfig struct {
@@ -166,7 +161,6 @@ func (pder *CookieProvider) SessionDestroy(sid string) error {
// SessionGC Implement method, no used.
func (pder *CookieProvider) SessionGC() {
- return
}
// SessionAll Implement method, return 0.
diff --git a/src/vendor/github.com/astaxie/beego/session/sess_cookie_test.go b/src/vendor/github.com/astaxie/beego/session/sess_cookie_test.go
index 209e501ca..b6726005f 100644
--- a/src/vendor/github.com/astaxie/beego/session/sess_cookie_test.go
+++ b/src/vendor/github.com/astaxie/beego/session/sess_cookie_test.go
@@ -15,6 +15,7 @@
package session
import (
+ "encoding/json"
"net/http"
"net/http/httptest"
"strings"
@@ -23,7 +24,11 @@ import (
func TestCookie(t *testing.T) {
config := `{"cookieName":"gosessionid","enableSetCookie":false,"gclifetime":3600,"ProviderConfig":"{\"cookieName\":\"gosessionid\",\"securityKey\":\"beegocookiehashkey\"}"}`
- globalSessions, err := NewManager("cookie", config)
+ conf := new(ManagerConfig)
+ if err := json.Unmarshal([]byte(config), conf); err != nil {
+ t.Fatal("json decode error", err)
+ }
+ globalSessions, err := NewManager("cookie", conf)
if err != nil {
t.Fatal("init cookie session err", err)
}
@@ -56,7 +61,11 @@ func TestCookie(t *testing.T) {
func TestDestorySessionCookie(t *testing.T) {
config := `{"cookieName":"gosessionid","enableSetCookie":true,"gclifetime":3600,"ProviderConfig":"{\"cookieName\":\"gosessionid\",\"securityKey\":\"beegocookiehashkey\"}"}`
- globalSessions, err := NewManager("cookie", config)
+ conf := new(ManagerConfig)
+ if err := json.Unmarshal([]byte(config), conf); err != nil {
+ t.Fatal("json decode error", err)
+ }
+ globalSessions, err := NewManager("cookie", conf)
if err != nil {
t.Fatal("init cookie session err", err)
}
diff --git a/src/vendor/github.com/astaxie/beego/session/sess_file.go b/src/vendor/github.com/astaxie/beego/session/sess_file.go
index 9265b0304..3ca93d555 100644
--- a/src/vendor/github.com/astaxie/beego/session/sess_file.go
+++ b/src/vendor/github.com/astaxie/beego/session/sess_file.go
@@ -15,9 +15,7 @@
package session
import (
- "errors"
"fmt"
- "io"
"io/ioutil"
"net/http"
"os"
@@ -82,14 +80,23 @@ func (fs *FileSessionStore) SessionID() string {
func (fs *FileSessionStore) SessionRelease(w http.ResponseWriter) {
b, err := EncodeGob(fs.values)
if err != nil {
+ SLogger.Println(err)
return
}
_, err = os.Stat(path.Join(filepder.savePath, string(fs.sid[0]), string(fs.sid[1]), fs.sid))
var f *os.File
if err == nil {
f, err = os.OpenFile(path.Join(filepder.savePath, string(fs.sid[0]), string(fs.sid[1]), fs.sid), os.O_RDWR, 0777)
+ if err != nil {
+ SLogger.Println(err)
+ return
+ }
} else if os.IsNotExist(err) {
f, err = os.Create(path.Join(filepder.savePath, string(fs.sid[0]), string(fs.sid[1]), fs.sid))
+ if err != nil {
+ SLogger.Println(err)
+ return
+ }
} else {
return
}
@@ -123,7 +130,7 @@ func (fp *FileProvider) SessionRead(sid string) (Store, error) {
err := os.MkdirAll(path.Join(fp.savePath, string(sid[0]), string(sid[1])), 0777)
if err != nil {
- println(err.Error())
+ SLogger.Println(err.Error())
}
_, err = os.Stat(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
var f *os.File
@@ -134,6 +141,9 @@ func (fp *FileProvider) SessionRead(sid string) (Store, error) {
} else {
return nil, err
}
+
+ defer f.Close()
+
os.Chtimes(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid), time.Now(), time.Now())
var kv map[interface{}]interface{}
b, err := ioutil.ReadAll(f)
@@ -148,7 +158,7 @@ func (fp *FileProvider) SessionRead(sid string) (Store, error) {
return nil, err
}
}
- f.Close()
+
ss := &FileSessionStore{sid: sid, values: kv}
return ss, nil
}
@@ -160,10 +170,7 @@ func (fp *FileProvider) SessionExist(sid string) bool {
defer filepder.lock.Unlock()
_, err := os.Stat(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
- if err == nil {
- return true
- }
- return false
+ return err == nil
}
// SessionDestroy Remove all files in this save path
@@ -191,7 +198,7 @@ func (fp *FileProvider) SessionAll() int {
return a.visit(path, f, err)
})
if err != nil {
- fmt.Printf("filepath.Walk() returned %v\n", err)
+ SLogger.Printf("filepath.Walk() returned %v\n", err)
return 0
}
return a.total
@@ -203,49 +210,58 @@ func (fp *FileProvider) SessionRegenerate(oldsid, sid string) (Store, error) {
filepder.lock.Lock()
defer filepder.lock.Unlock()
- err := os.MkdirAll(path.Join(fp.savePath, string(oldsid[0]), string(oldsid[1])), 0777)
- if err != nil {
- println(err.Error())
- }
- err = os.MkdirAll(path.Join(fp.savePath, string(sid[0]), string(sid[1])), 0777)
- if err != nil {
- println(err.Error())
- }
- _, err = os.Stat(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
- var newf *os.File
+ oldPath := path.Join(fp.savePath, string(oldsid[0]), string(oldsid[1]))
+ oldSidFile := path.Join(oldPath, oldsid)
+ newPath := path.Join(fp.savePath, string(sid[0]), string(sid[1]))
+ newSidFile := path.Join(newPath, sid)
+
+ // new sid file is exist
+ _, err := os.Stat(newSidFile)
if err == nil {
- return nil, errors.New("newsid exist")
- } else if os.IsNotExist(err) {
- newf, err = os.Create(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
+ return nil, fmt.Errorf("newsid %s exist", newSidFile)
}
- _, err = os.Stat(path.Join(fp.savePath, string(oldsid[0]), string(oldsid[1]), oldsid))
- var f *os.File
- if err == nil {
- f, err = os.OpenFile(path.Join(fp.savePath, string(oldsid[0]), string(oldsid[1]), oldsid), os.O_RDWR, 0777)
- io.Copy(newf, f)
- } else if os.IsNotExist(err) {
- newf, err = os.Create(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
- } else {
- return nil, err
- }
- f.Close()
- os.Remove(path.Join(fp.savePath, string(oldsid[0]), string(oldsid[1])))
- os.Chtimes(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid), time.Now(), time.Now())
- var kv map[interface{}]interface{}
- b, err := ioutil.ReadAll(newf)
+ err = os.MkdirAll(newPath, 0777)
if err != nil {
- return nil, err
+ SLogger.Println(err.Error())
}
- if len(b) == 0 {
- kv = make(map[interface{}]interface{})
- } else {
- kv, err = DecodeGob(b)
+
+ // if old sid file exist
+ // 1.read and parse file content
+ // 2.write content to new sid file
+ // 3.remove old sid file, change new sid file atime and ctime
+ // 4.return FileSessionStore
+ _, err = os.Stat(oldSidFile)
+ if err == nil {
+ b, err := ioutil.ReadFile(oldSidFile)
if err != nil {
return nil, err
}
+
+ var kv map[interface{}]interface{}
+ if len(b) == 0 {
+ kv = make(map[interface{}]interface{})
+ } else {
+ kv, err = DecodeGob(b)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ ioutil.WriteFile(newSidFile, b, 0777)
+ os.Remove(oldSidFile)
+ os.Chtimes(newSidFile, time.Now(), time.Now())
+ ss := &FileSessionStore{sid: sid, values: kv}
+ return ss, nil
}
- ss := &FileSessionStore{sid: sid, values: kv}
+
+ // if old sid file not exist, just create new sid file and return
+ newf, err := os.Create(newSidFile)
+ if err != nil {
+ return nil, err
+ }
+ newf.Close()
+ ss := &FileSessionStore{sid: sid, values: make(map[interface{}]interface{})}
return ss, nil
}
diff --git a/src/vendor/github.com/astaxie/beego/session/sess_mem_test.go b/src/vendor/github.com/astaxie/beego/session/sess_mem_test.go
index 43f5b0a90..2e8934b82 100644
--- a/src/vendor/github.com/astaxie/beego/session/sess_mem_test.go
+++ b/src/vendor/github.com/astaxie/beego/session/sess_mem_test.go
@@ -15,6 +15,7 @@
package session
import (
+ "encoding/json"
"net/http"
"net/http/httptest"
"strings"
@@ -22,7 +23,12 @@ import (
)
func TestMem(t *testing.T) {
- globalSessions, _ := NewManager("memory", `{"cookieName":"gosessionid","gclifetime":10}`)
+ config := `{"cookieName":"gosessionid","gclifetime":10, "enableSetCookie":true}`
+ conf := new(ManagerConfig)
+ if err := json.Unmarshal([]byte(config), conf); err != nil {
+ t.Fatal("json decode error", err)
+ }
+ globalSessions, _ := NewManager("memory", conf)
go globalSessions.GC()
r, _ := http.NewRequest("GET", "/", nil)
w := httptest.NewRecorder()
diff --git a/src/vendor/github.com/astaxie/beego/session/sess_test.go b/src/vendor/github.com/astaxie/beego/session/sess_test.go
index 5ba910f25..906abec2c 100644
--- a/src/vendor/github.com/astaxie/beego/session/sess_test.go
+++ b/src/vendor/github.com/astaxie/beego/session/sess_test.go
@@ -74,8 +74,7 @@ func TestCookieEncodeDecode(t *testing.T) {
if err != nil {
t.Fatal("encodeCookie:", err)
}
- dst := make(map[interface{}]interface{})
- dst, err = decodeCookie(block, hashKey, securityName, str, 3600)
+ dst, err := decodeCookie(block, hashKey, securityName, str, 3600)
if err != nil {
t.Fatal("decodeCookie", err)
}
@@ -89,7 +88,7 @@ func TestCookieEncodeDecode(t *testing.T) {
func TestParseConfig(t *testing.T) {
s := `{"cookieName":"gosessionid","gclifetime":3600}`
- cf := new(managerConfig)
+ cf := new(ManagerConfig)
cf.EnableSetCookie = true
err := json.Unmarshal([]byte(s), cf)
if err != nil {
@@ -103,7 +102,7 @@ func TestParseConfig(t *testing.T) {
}
cc := `{"cookieName":"gosessionid","enableSetCookie":false,"gclifetime":3600,"ProviderConfig":"{\"cookieName\":\"gosessionid\",\"securityKey\":\"beegocookiehashkey\"}"}`
- cf2 := new(managerConfig)
+ cf2 := new(ManagerConfig)
cf2.EnableSetCookie = true
err = json.Unmarshal([]byte(cc), cf2)
if err != nil {
@@ -115,7 +114,7 @@ func TestParseConfig(t *testing.T) {
if cf2.Gclifetime != 3600 {
t.Fatal("parseconfig get gclifetime error")
}
- if cf2.EnableSetCookie != false {
+ if cf2.EnableSetCookie {
t.Fatal("parseconfig get enableSetCookie error")
}
cconfig := new(cookieConfig)
diff --git a/src/vendor/github.com/astaxie/beego/session/session.go b/src/vendor/github.com/astaxie/beego/session/session.go
index 9fe99a174..cf647521a 100644
--- a/src/vendor/github.com/astaxie/beego/session/session.go
+++ b/src/vendor/github.com/astaxie/beego/session/session.go
@@ -30,10 +30,14 @@ package session
import (
"crypto/rand"
"encoding/hex"
- "encoding/json"
+ "errors"
"fmt"
+ "io"
+ "log"
"net/http"
+ "net/textproto"
"net/url"
+ "os"
"time"
)
@@ -61,6 +65,9 @@ type Provider interface {
var provides = make(map[string]Provider)
+// SLogger a helpful variable to log information about session
+var SLogger = NewSessionLog(os.Stderr)
+
// Register makes a session provide available by the provided name.
// If Register is called twice with the same name or if driver is nil,
// it panics.
@@ -74,22 +81,27 @@ func Register(name string, provide Provider) {
provides[name] = provide
}
-type managerConfig struct {
- CookieName string `json:"cookieName"`
- EnableSetCookie bool `json:"enableSetCookie,omitempty"`
- Gclifetime int64 `json:"gclifetime"`
- Maxlifetime int64 `json:"maxLifetime"`
- Secure bool `json:"secure"`
- CookieLifeTime int `json:"cookieLifeTime"`
- ProviderConfig string `json:"providerConfig"`
- Domain string `json:"domain"`
- SessionIDLength int64 `json:"sessionIDLength"`
+// ManagerConfig define the session config
+type ManagerConfig struct {
+ CookieName string `json:"cookieName"`
+ EnableSetCookie bool `json:"enableSetCookie,omitempty"`
+ Gclifetime int64 `json:"gclifetime"`
+ Maxlifetime int64 `json:"maxLifetime"`
+ DisableHTTPOnly bool `json:"disableHTTPOnly"`
+ Secure bool `json:"secure"`
+ CookieLifeTime int `json:"cookieLifeTime"`
+ ProviderConfig string `json:"providerConfig"`
+ Domain string `json:"domain"`
+ SessionIDLength int64 `json:"sessionIDLength"`
+ EnableSidInHTTPHeader bool `json:"EnableSidInHTTPHeader"`
+ SessionNameInHTTPHeader string `json:"SessionNameInHTTPHeader"`
+ EnableSidInURLQuery bool `json:"EnableSidInURLQuery"`
}
// Manager contains Provider and its configuration.
type Manager struct {
provider Provider
- config *managerConfig
+ config *ManagerConfig
}
// NewManager Create new Manager with provider name and json config string.
@@ -104,21 +116,29 @@ type Manager struct {
// 2. hashfunc default sha1
// 3. hashkey default beegosessionkey
// 4. maxage default is none
-func NewManager(provideName, config string) (*Manager, error) {
+func NewManager(provideName string, cf *ManagerConfig) (*Manager, error) {
provider, ok := provides[provideName]
if !ok {
return nil, fmt.Errorf("session: unknown provide %q (forgotten import?)", provideName)
}
- cf := new(managerConfig)
- cf.EnableSetCookie = true
- err := json.Unmarshal([]byte(config), cf)
- if err != nil {
- return nil, err
- }
+
if cf.Maxlifetime == 0 {
cf.Maxlifetime = cf.Gclifetime
}
- err = provider.SessionInit(cf.Maxlifetime, cf.ProviderConfig)
+
+ if cf.EnableSidInHTTPHeader {
+ if cf.SessionNameInHTTPHeader == "" {
+ panic(errors.New("SessionNameInHTTPHeader is empty"))
+ }
+
+ strMimeHeader := textproto.CanonicalMIMEHeaderKey(cf.SessionNameInHTTPHeader)
+ if cf.SessionNameInHTTPHeader != strMimeHeader {
+ strErrMsg := "SessionNameInHTTPHeader (" + cf.SessionNameInHTTPHeader + ") has the wrong format, it should be like this : " + strMimeHeader
+ panic(errors.New(strErrMsg))
+ }
+ }
+
+ err := provider.SessionInit(cf.Maxlifetime, cf.ProviderConfig)
if err != nil {
return nil, err
}
@@ -142,13 +162,25 @@ func NewManager(provideName, config string) (*Manager, error) {
// otherwise return an valid session id.
func (manager *Manager) getSid(r *http.Request) (string, error) {
cookie, errs := r.Cookie(manager.config.CookieName)
- if errs != nil || cookie.Value == "" || cookie.MaxAge < 0 {
- errs := r.ParseForm()
- if errs != nil {
- return "", errs
+ if errs != nil || cookie.Value == "" {
+ var sid string
+ if manager.config.EnableSidInURLQuery {
+ errs := r.ParseForm()
+ if errs != nil {
+ return "", errs
+ }
+
+ sid = r.FormValue(manager.config.CookieName)
+ }
+
+ // if not found in Cookie / param, then read it from request headers
+ if manager.config.EnableSidInHTTPHeader && sid == "" {
+ sids, isFound := r.Header[manager.config.SessionNameInHTTPHeader]
+ if isFound && len(sids) != 0 {
+ return sids[0], nil
+ }
}
- sid := r.FormValue(manager.config.CookieName)
return sid, nil
}
@@ -175,11 +207,14 @@ func (manager *Manager) SessionStart(w http.ResponseWriter, r *http.Request) (se
}
session, err = manager.provider.SessionRead(sid)
+ if err != nil {
+ return nil, err
+ }
cookie := &http.Cookie{
Name: manager.config.CookieName,
Value: url.QueryEscape(sid),
Path: "/",
- HttpOnly: true,
+ HttpOnly: !manager.config.DisableHTTPOnly,
Secure: manager.isSecure(r),
Domain: manager.config.Domain,
}
@@ -192,11 +227,21 @@ func (manager *Manager) SessionStart(w http.ResponseWriter, r *http.Request) (se
}
r.AddCookie(cookie)
+ if manager.config.EnableSidInHTTPHeader {
+ r.Header.Set(manager.config.SessionNameInHTTPHeader, sid)
+ w.Header().Set(manager.config.SessionNameInHTTPHeader, sid)
+ }
+
return
}
// SessionDestroy Destroy session by its id in http request cookie.
func (manager *Manager) SessionDestroy(w http.ResponseWriter, r *http.Request) {
+ if manager.config.EnableSidInHTTPHeader {
+ r.Header.Del(manager.config.SessionNameInHTTPHeader)
+ w.Header().Del(manager.config.SessionNameInHTTPHeader)
+ }
+
cookie, err := r.Cookie(manager.config.CookieName)
if err != nil || cookie.Value == "" {
return
@@ -208,7 +253,7 @@ func (manager *Manager) SessionDestroy(w http.ResponseWriter, r *http.Request) {
expiration := time.Now()
cookie = &http.Cookie{Name: manager.config.CookieName,
Path: "/",
- HttpOnly: true,
+ HttpOnly: !manager.config.DisableHTTPOnly,
Expires: expiration,
MaxAge: -1}
@@ -242,7 +287,7 @@ func (manager *Manager) SessionRegenerateID(w http.ResponseWriter, r *http.Reque
cookie = &http.Cookie{Name: manager.config.CookieName,
Value: url.QueryEscape(sid),
Path: "/",
- HttpOnly: true,
+ HttpOnly: !manager.config.DisableHTTPOnly,
Secure: manager.isSecure(r),
Domain: manager.config.Domain,
}
@@ -261,6 +306,12 @@ func (manager *Manager) SessionRegenerateID(w http.ResponseWriter, r *http.Reque
http.SetCookie(w, cookie)
}
r.AddCookie(cookie)
+
+ if manager.config.EnableSidInHTTPHeader {
+ r.Header.Set(manager.config.SessionNameInHTTPHeader, sid)
+ w.Header().Set(manager.config.SessionNameInHTTPHeader, sid)
+ }
+
return
}
@@ -278,7 +329,7 @@ func (manager *Manager) sessionID() (string, error) {
b := make([]byte, manager.config.SessionIDLength)
n, err := rand.Read(b)
if n != len(b) || err != nil {
- return "", fmt.Errorf("Could not successfully read from the system CSPRNG.")
+ return "", fmt.Errorf("Could not successfully read from the system CSPRNG")
}
return hex.EncodeToString(b), nil
}
@@ -296,3 +347,15 @@ func (manager *Manager) isSecure(req *http.Request) bool {
}
return true
}
+
+// Log implement the log.Logger
+type Log struct {
+ *log.Logger
+}
+
+// NewSessionLog set io.Writer to create a Logger for session.
+func NewSessionLog(out io.Writer) *Log {
+ sl := new(Log)
+ sl.Logger = log.New(out, "[SESSION]", 1e9)
+ return sl
+}
diff --git a/src/vendor/github.com/astaxie/beego/session/ssdb/sess_ssdb.go b/src/vendor/github.com/astaxie/beego/session/ssdb/sess_ssdb.go
new file mode 100644
index 000000000..de0c6360c
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/session/ssdb/sess_ssdb.go
@@ -0,0 +1,199 @@
+package ssdb
+
+import (
+ "errors"
+ "net/http"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/astaxie/beego/session"
+ "github.com/ssdb/gossdb/ssdb"
+)
+
+var ssdbProvider = &Provider{}
+
+// Provider holds ssdb client and configs
+type Provider struct {
+ client *ssdb.Client
+ host string
+ port int
+ maxLifetime int64
+}
+
+func (p *Provider) connectInit() error {
+ var err error
+ if p.host == "" || p.port == 0 {
+ return errors.New("SessionInit First")
+ }
+ p.client, err = ssdb.Connect(p.host, p.port)
+ return err
+}
+
+// SessionInit init the ssdb with the config
+func (p *Provider) SessionInit(maxLifetime int64, savePath string) error {
+ p.maxLifetime = maxLifetime
+ address := strings.Split(savePath, ":")
+ p.host = address[0]
+
+ var err error
+ if p.port, err = strconv.Atoi(address[1]); err != nil {
+ return err
+ }
+ return p.connectInit()
+}
+
+// SessionRead return a ssdb client session Store
+func (p *Provider) SessionRead(sid string) (session.Store, error) {
+ if p.client == nil {
+ if err := p.connectInit(); err != nil {
+ return nil, err
+ }
+ }
+ var kv map[interface{}]interface{}
+ value, err := p.client.Get(sid)
+ if err != nil {
+ return nil, err
+ }
+ if value == nil || len(value.(string)) == 0 {
+ kv = make(map[interface{}]interface{})
+ } else {
+ kv, err = session.DecodeGob([]byte(value.(string)))
+ if err != nil {
+ return nil, err
+ }
+ }
+ rs := &SessionStore{sid: sid, values: kv, maxLifetime: p.maxLifetime, client: p.client}
+ return rs, nil
+}
+
+// SessionExist judged whether sid is exist in session
+func (p *Provider) SessionExist(sid string) bool {
+ if p.client == nil {
+ if err := p.connectInit(); err != nil {
+ panic(err)
+ }
+ }
+ value, err := p.client.Get(sid)
+ if err != nil {
+ panic(err)
+ }
+ if value == nil || len(value.(string)) == 0 {
+ return false
+ }
+ return true
+}
+
+// SessionRegenerate regenerate session with new sid and delete oldsid
+func (p *Provider) SessionRegenerate(oldsid, sid string) (session.Store, error) {
+ //conn.Do("setx", key, v, ttl)
+ if p.client == nil {
+ if err := p.connectInit(); err != nil {
+ return nil, err
+ }
+ }
+ value, err := p.client.Get(oldsid)
+ if err != nil {
+ return nil, err
+ }
+ var kv map[interface{}]interface{}
+ if value == nil || len(value.(string)) == 0 {
+ kv = make(map[interface{}]interface{})
+ } else {
+ kv, err = session.DecodeGob([]byte(value.(string)))
+ if err != nil {
+ return nil, err
+ }
+ _, err = p.client.Del(oldsid)
+ if err != nil {
+ return nil, err
+ }
+ }
+ _, e := p.client.Do("setx", sid, value, p.maxLifetime)
+ if e != nil {
+ return nil, e
+ }
+ rs := &SessionStore{sid: sid, values: kv, maxLifetime: p.maxLifetime, client: p.client}
+ return rs, nil
+}
+
+// SessionDestroy destroy the sid
+func (p *Provider) SessionDestroy(sid string) error {
+ if p.client == nil {
+ if err := p.connectInit(); err != nil {
+ return err
+ }
+ }
+ _, err := p.client.Del(sid)
+ return err
+}
+
+// SessionGC not implemented
+func (p *Provider) SessionGC() {
+}
+
+// SessionAll not implemented
+func (p *Provider) SessionAll() int {
+ return 0
+}
+
+// SessionStore holds the session information which stored in ssdb
+type SessionStore struct {
+ sid string
+ lock sync.RWMutex
+ values map[interface{}]interface{}
+ maxLifetime int64
+ client *ssdb.Client
+}
+
+// Set the key and value
+func (s *SessionStore) Set(key, value interface{}) error {
+ s.lock.Lock()
+ defer s.lock.Unlock()
+ s.values[key] = value
+ return nil
+}
+
+// Get return the value by the key
+func (s *SessionStore) Get(key interface{}) interface{} {
+ s.lock.Lock()
+ defer s.lock.Unlock()
+ if value, ok := s.values[key]; ok {
+ return value
+ }
+ return nil
+}
+
+// Delete the key in session store
+func (s *SessionStore) Delete(key interface{}) error {
+ s.lock.Lock()
+ defer s.lock.Unlock()
+ delete(s.values, key)
+ return nil
+}
+
+// Flush delete all keys and values
+func (s *SessionStore) Flush() error {
+ s.lock.Lock()
+ defer s.lock.Unlock()
+ s.values = make(map[interface{}]interface{})
+ return nil
+}
+
+// SessionID return the sessionID
+func (s *SessionStore) SessionID() string {
+ return s.sid
+}
+
+// SessionRelease Store the keyvalues into ssdb
+func (s *SessionStore) SessionRelease(w http.ResponseWriter) {
+ b, err := session.EncodeGob(s.values)
+ if err != nil {
+ return
+ }
+ s.client.Do("setx", s.sid, string(b), s.maxLifetime)
+}
+
+func init() {
+ session.Register("ssdb", ssdbProvider)
+}
diff --git a/src/vendor/github.com/astaxie/beego/staticfile.go b/src/vendor/github.com/astaxie/beego/staticfile.go
index 0aad2c81e..bbb2a1fbf 100644
--- a/src/vendor/github.com/astaxie/beego/staticfile.go
+++ b/src/vendor/github.com/astaxie/beego/staticfile.go
@@ -27,6 +27,7 @@ import (
"time"
"github.com/astaxie/beego/context"
+ "github.com/astaxie/beego/logs"
)
var errNotStaticRequest = errors.New("request not a static file request")
@@ -48,14 +49,23 @@ func serverStaticRouter(ctx *context.Context) {
if filePath == "" || fileInfo == nil {
if BConfig.RunMode == DEV {
- Warn("Can't find/open the file:", filePath, err)
+ logs.Warn("Can't find/open the file:", filePath, err)
}
http.NotFound(ctx.ResponseWriter, ctx.Request)
return
}
if fileInfo.IsDir() {
- //serveFile will list dir
- http.ServeFile(ctx.ResponseWriter, ctx.Request, filePath)
+ requestURL := ctx.Input.URL()
+ if requestURL[len(requestURL)-1] != '/' {
+ redirectURL := requestURL + "/"
+ if ctx.Request.URL.RawQuery != "" {
+ redirectURL = redirectURL + "?" + ctx.Request.URL.RawQuery
+ }
+ ctx.Redirect(302, redirectURL)
+ } else {
+ //serveFile will list dir
+ http.ServeFile(ctx.ResponseWriter, ctx.Request, filePath)
+ }
return
}
@@ -67,7 +77,7 @@ func serverStaticRouter(ctx *context.Context) {
b, n, sch, err := openFile(filePath, fileInfo, acceptEncoding)
if err != nil {
if BConfig.RunMode == DEV {
- Warn("Can't compress the file:", filePath, err)
+ logs.Warn("Can't compress the file:", filePath, err)
}
http.NotFound(ctx.ResponseWriter, ctx.Request)
return
@@ -80,8 +90,6 @@ func serverStaticRouter(ctx *context.Context) {
}
http.ServeContent(ctx.ResponseWriter, ctx.Request, filePath, sch.modTime, sch)
- return
-
}
type serveContentHolder struct {
@@ -99,14 +107,14 @@ var (
func openFile(filePath string, fi os.FileInfo, acceptEncoding string) (bool, string, *serveContentHolder, error) {
mapKey := acceptEncoding + ":" + filePath
mapLock.RLock()
- mapFile, _ := staticFileMap[mapKey]
+ mapFile := staticFileMap[mapKey]
mapLock.RUnlock()
if isOk(mapFile, fi) {
return mapFile.encoding != "", mapFile.encoding, mapFile, nil
}
mapLock.Lock()
defer mapLock.Unlock()
- if mapFile, _ = staticFileMap[mapKey]; !isOk(mapFile, fi) {
+ if mapFile = staticFileMap[mapKey]; !isOk(mapFile, fi) {
file, err := os.Open(filePath)
if err != nil {
return false, "", nil, err
@@ -157,13 +165,10 @@ func searchFile(ctx *context.Context) (string, os.FileInfo, error) {
return filePath, fi, nil
}
}
- return "", nil, errors.New(requestPath + " file not find")
+ return "", nil, errNotStaticRequest
}
for prefix, staticDir := range BConfig.WebConfig.StaticDir {
- if len(prefix) == 0 {
- continue
- }
if !strings.Contains(requestPath, prefix) {
continue
}
@@ -189,9 +194,11 @@ func lookupFile(ctx *context.Context) (bool, string, os.FileInfo, error) {
if !fi.IsDir() {
return false, fp, fi, err
}
- ifp := filepath.Join(fp, "index.html")
- if ifi, _ := os.Stat(ifp); ifi != nil && ifi.Mode().IsRegular() {
- return false, ifp, ifi, err
+ if requestURL := ctx.Input.URL(); requestURL[len(requestURL)-1] == '/' {
+ ifp := filepath.Join(fp, "index.html")
+ if ifi, _ := os.Stat(ifp); ifi != nil && ifi.Mode().IsRegular() {
+ return false, ifp, ifi, err
+ }
}
return !BConfig.WebConfig.DirectoryIndex, fp, fi, err
}
diff --git a/src/vendor/github.com/astaxie/beego/swagger/docs_spec.go b/src/vendor/github.com/astaxie/beego/swagger/docs_spec.go
deleted file mode 100644
index 680324dc0..000000000
--- a/src/vendor/github.com/astaxie/beego/swagger/docs_spec.go
+++ /dev/null
@@ -1,160 +0,0 @@
-// Copyright 2014 beego Author. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package swagger struct definition
-package swagger
-
-// SwaggerVersion show the current swagger version
-const SwaggerVersion = "1.2"
-
-// ResourceListing list the resource
-type ResourceListing struct {
- APIVersion string `json:"apiVersion"`
- SwaggerVersion string `json:"swaggerVersion"` // e.g 1.2
- // BasePath string `json:"basePath"` obsolete in 1.1
- APIs []APIRef `json:"apis"`
- Info Information `json:"info"`
-}
-
-// APIRef description the api path and description
-type APIRef struct {
- Path string `json:"path"` // relative or absolute, must start with /
- Description string `json:"description"`
-}
-
-// Information show the API Information
-type Information struct {
- Title string `json:"title,omitempty"`
- Description string `json:"description,omitempty"`
- Contact string `json:"contact,omitempty"`
- TermsOfServiceURL string `json:"termsOfServiceUrl,omitempty"`
- License string `json:"license,omitempty"`
- LicenseURL string `json:"licenseUrl,omitempty"`
-}
-
-// APIDeclaration see https://github.com/wordnik/swagger-core/blob/scala_2.10-1.3-RC3/schemas/api-declaration-schema.json
-type APIDeclaration struct {
- APIVersion string `json:"apiVersion"`
- SwaggerVersion string `json:"swaggerVersion"`
- BasePath string `json:"basePath"`
- ResourcePath string `json:"resourcePath"` // must start with /
- Consumes []string `json:"consumes,omitempty"`
- Produces []string `json:"produces,omitempty"`
- APIs []API `json:"apis,omitempty"`
- Models map[string]Model `json:"models,omitempty"`
-}
-
-// API show tha API struct
-type API struct {
- Path string `json:"path"` // relative or absolute, must start with /
- Description string `json:"description"`
- Operations []Operation `json:"operations,omitempty"`
-}
-
-// Operation desc the Operation
-type Operation struct {
- HTTPMethod string `json:"httpMethod"`
- Nickname string `json:"nickname"`
- Type string `json:"type"` // in 1.1 = DataType
- // ResponseClass string `json:"responseClass"` obsolete in 1.2
- Summary string `json:"summary,omitempty"`
- Notes string `json:"notes,omitempty"`
- Parameters []Parameter `json:"parameters,omitempty"`
- ResponseMessages []ResponseMessage `json:"responseMessages,omitempty"` // optional
- Consumes []string `json:"consumes,omitempty"`
- Produces []string `json:"produces,omitempty"`
- Authorizations []Authorization `json:"authorizations,omitempty"`
- Protocols []Protocol `json:"protocols,omitempty"`
-}
-
-// Protocol support which Protocol
-type Protocol struct {
-}
-
-// ResponseMessage Show the
-type ResponseMessage struct {
- Code int `json:"code"`
- Message string `json:"message"`
- ResponseModel string `json:"responseModel"`
-}
-
-// Parameter desc the request parameters
-type Parameter struct {
- ParamType string `json:"paramType"` // path,query,body,header,form
- Name string `json:"name"`
- Description string `json:"description"`
- DataType string `json:"dataType"` // 1.2 needed?
- Type string `json:"type"` // integer
- Format string `json:"format"` // int64
- AllowMultiple bool `json:"allowMultiple"`
- Required bool `json:"required"`
- Minimum int `json:"minimum"`
- Maximum int `json:"maximum"`
-}
-
-// ErrorResponse desc response
-type ErrorResponse struct {
- Code int `json:"code"`
- Reason string `json:"reason"`
-}
-
-// Model define the data model
-type Model struct {
- ID string `json:"id"`
- Required []string `json:"required,omitempty"`
- Properties map[string]ModelProperty `json:"properties"`
-}
-
-// ModelProperty define the properties
-type ModelProperty struct {
- Type string `json:"type"`
- Description string `json:"description"`
- Items map[string]string `json:"items,omitempty"`
- Format string `json:"format"`
-}
-
-// Authorization see https://github.com/wordnik/swagger-core/wiki/authorizations
-type Authorization struct {
- LocalOAuth OAuth `json:"local-oauth"`
- APIKey APIKey `json:"apiKey"`
-}
-
-// OAuth see https://github.com/wordnik/swagger-core/wiki/authorizations
-type OAuth struct {
- Type string `json:"type"` // e.g. oauth2
- Scopes []string `json:"scopes"` // e.g. PUBLIC
- GrantTypes map[string]GrantType `json:"grantTypes"`
-}
-
-// GrantType see https://github.com/wordnik/swagger-core/wiki/authorizations
-type GrantType struct {
- LoginEndpoint Endpoint `json:"loginEndpoint"`
- TokenName string `json:"tokenName"` // e.g. access_code
- TokenRequestEndpoint Endpoint `json:"tokenRequestEndpoint"`
- TokenEndpoint Endpoint `json:"tokenEndpoint"`
-}
-
-// Endpoint see https://github.com/wordnik/swagger-core/wiki/authorizations
-type Endpoint struct {
- URL string `json:"url"`
- ClientIDName string `json:"clientIdName"`
- ClientSecretName string `json:"clientSecretName"`
- TokenName string `json:"tokenName"`
-}
-
-// APIKey see https://github.com/wordnik/swagger-core/wiki/authorizations
-type APIKey struct {
- Type string `json:"type"` // e.g. apiKey
- PassAs string `json:"passAs"` // e.g. header
-}
diff --git a/src/vendor/github.com/astaxie/beego/swagger/swagger.go b/src/vendor/github.com/astaxie/beego/swagger/swagger.go
new file mode 100644
index 000000000..035d5a497
--- /dev/null
+++ b/src/vendor/github.com/astaxie/beego/swagger/swagger.go
@@ -0,0 +1,172 @@
+// Copyright 2014 beego Author. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// Swagger™ is a project used to describe and document RESTful APIs.
+//
+// The Swagger specification defines a set of files required to describe such an API. These files can then be used by the Swagger-UI project to display the API and Swagger-Codegen to generate clients in various languages. Additional utilities can also take advantage of the resulting files, such as testing tools.
+// Now in version 2.0, Swagger is more enabling than ever. And it's 100% open source software.
+
+// Package swagger struct definition
+package swagger
+
+// Swagger list the resource
+type Swagger struct {
+ SwaggerVersion string `json:"swagger,omitempty" yaml:"swagger,omitempty"`
+ Infos Information `json:"info" yaml:"info"`
+ Host string `json:"host,omitempty" yaml:"host,omitempty"`
+ BasePath string `json:"basePath,omitempty" yaml:"basePath,omitempty"`
+ Schemes []string `json:"schemes,omitempty" yaml:"schemes,omitempty"`
+ Consumes []string `json:"consumes,omitempty" yaml:"consumes,omitempty"`
+ Produces []string `json:"produces,omitempty" yaml:"produces,omitempty"`
+ Paths map[string]*Item `json:"paths" yaml:"paths"`
+ Definitions map[string]Schema `json:"definitions,omitempty" yaml:"definitions,omitempty"`
+ SecurityDefinitions map[string]Security `json:"securityDefinitions,omitempty" yaml:"securityDefinitions,omitempty"`
+ Security []map[string][]string `json:"security,omitempty" yaml:"security,omitempty"`
+ Tags []Tag `json:"tags,omitempty" yaml:"tags,omitempty"`
+ ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+}
+
+// Information Provides metadata about the API. The metadata can be used by the clients if needed.
+type Information struct {
+ Title string `json:"title,omitempty" yaml:"title,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Version string `json:"version,omitempty" yaml:"version,omitempty"`
+ TermsOfService string `json:"termsOfService,omitempty" yaml:"termsOfService,omitempty"`
+
+ Contact Contact `json:"contact,omitempty" yaml:"contact,omitempty"`
+ License *License `json:"license,omitempty" yaml:"license,omitempty"`
+}
+
+// Contact information for the exposed API.
+type Contact struct {
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ URL string `json:"url,omitempty" yaml:"url,omitempty"`
+ EMail string `json:"email,omitempty" yaml:"email,omitempty"`
+}
+
+// License information for the exposed API.
+type License struct {
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ URL string `json:"url,omitempty" yaml:"url,omitempty"`
+}
+
+// Item Describes the operations available on a single path.
+type Item struct {
+ Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"`
+ Get *Operation `json:"get,omitempty" yaml:"get,omitempty"`
+ Put *Operation `json:"put,omitempty" yaml:"put,omitempty"`
+ Post *Operation `json:"post,omitempty" yaml:"post,omitempty"`
+ Delete *Operation `json:"delete,omitempty" yaml:"delete,omitempty"`
+ Options *Operation `json:"options,omitempty" yaml:"options,omitempty"`
+ Head *Operation `json:"head,omitempty" yaml:"head,omitempty"`
+ Patch *Operation `json:"patch,omitempty" yaml:"patch,omitempty"`
+}
+
+// Operation Describes a single API operation on a path.
+type Operation struct {
+ Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"`
+ Summary string `json:"summary,omitempty" yaml:"summary,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ OperationID string `json:"operationId,omitempty" yaml:"operationId,omitempty"`
+ Consumes []string `json:"consumes,omitempty" yaml:"consumes,omitempty"`
+ Produces []string `json:"produces,omitempty" yaml:"produces,omitempty"`
+ Schemes []string `json:"schemes,omitempty" yaml:"schemes,omitempty"`
+ Parameters []Parameter `json:"parameters,omitempty" yaml:"parameters,omitempty"`
+ Responses map[string]Response `json:"responses,omitempty" yaml:"responses,omitempty"`
+ Security []map[string][]string `json:"security,omitempty" yaml:"security,omitempty"`
+ Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"`
+}
+
+// Parameter Describes a single operation parameter.
+type Parameter struct {
+ In string `json:"in,omitempty" yaml:"in,omitempty"`
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Required bool `json:"required,omitempty" yaml:"required,omitempty"`
+ Schema *Schema `json:"schema,omitempty" yaml:"schema,omitempty"`
+ Type string `json:"type,omitempty" yaml:"type,omitempty"`
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ Items *ParameterItems `json:"items,omitempty" yaml:"items,omitempty"`
+ Default interface{} `json:"default,omitempty" yaml:"default,omitempty"`
+}
+
+// ParameterItems A limited subset of JSON-Schema's items object. It is used by parameter definitions that are not located in "body".
+// http://swagger.io/specification/#itemsObject
+type ParameterItems struct {
+ Type string `json:"type,omitempty" yaml:"type,omitempty"`
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ Items []*ParameterItems `json:"items,omitempty" yaml:"items,omitempty"` //Required if type is "array". Describes the type of items in the array.
+ CollectionFormat string `json:"collectionFormat,omitempty" yaml:"collectionFormat,omitempty"`
+ Default string `json:"default,omitempty" yaml:"default,omitempty"`
+}
+
+// Schema Object allows the definition of input and output data types.
+type Schema struct {
+ Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"`
+ Title string `json:"title,omitempty" yaml:"title,omitempty"`
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Required []string `json:"required,omitempty" yaml:"required,omitempty"`
+ Type string `json:"type,omitempty" yaml:"type,omitempty"`
+ Items *Schema `json:"items,omitempty" yaml:"items,omitempty"`
+ Properties map[string]Propertie `json:"properties,omitempty" yaml:"properties,omitempty"`
+}
+
+// Propertie are taken from the JSON Schema definition but their definitions were adjusted to the Swagger Specification
+type Propertie struct {
+ Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"`
+ Title string `json:"title,omitempty" yaml:"title,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Default interface{} `json:"default,omitempty" yaml:"default,omitempty"`
+ Type string `json:"type,omitempty" yaml:"type,omitempty"`
+ Example string `json:"example,omitempty" yaml:"example,omitempty"`
+ Required []string `json:"required,omitempty" yaml:"required,omitempty"`
+ Format string `json:"format,omitempty" yaml:"format,omitempty"`
+ ReadOnly bool `json:"readOnly,omitempty" yaml:"readOnly,omitempty"`
+ Properties map[string]Propertie `json:"properties,omitempty" yaml:"properties,omitempty"`
+ Items *Propertie `json:"items,omitempty" yaml:"items,omitempty"`
+ AdditionalProperties *Propertie `json:"additionalProperties,omitempty" yaml:"additionalProperties,omitempty"`
+}
+
+// Response as they are returned from executing this operation.
+type Response struct {
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Schema *Schema `json:"schema,omitempty" yaml:"schema,omitempty"`
+ Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"`
+}
+
+// Security Allows the definition of a security scheme that can be used by the operations
+type Security struct {
+ Type string `json:"type,omitempty" yaml:"type,omitempty"` // Valid values are "basic", "apiKey" or "oauth2".
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ In string `json:"in,omitempty" yaml:"in,omitempty"` // Valid values are "query" or "header".
+ Flow string `json:"flow,omitempty" yaml:"flow,omitempty"` // Valid values are "implicit", "password", "application" or "accessCode".
+ AuthorizationURL string `json:"authorizationUrl,omitempty" yaml:"authorizationUrl,omitempty"`
+ TokenURL string `json:"tokenUrl,omitempty" yaml:"tokenUrl,omitempty"`
+ Scopes map[string]string `json:"scopes,omitempty" yaml:"scopes,omitempty"` // The available scopes for the OAuth2 security scheme.
+}
+
+// Tag Allows adding meta data to a single tag that is used by the Operation Object
+type Tag struct {
+ Name string `json:"name,omitempty" yaml:"name,omitempty"`
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"`
+}
+
+// ExternalDocs include Additional external documentation
+type ExternalDocs struct {
+ Description string `json:"description,omitempty" yaml:"description,omitempty"`
+ URL string `json:"url,omitempty" yaml:"url,omitempty"`
+}
diff --git a/src/vendor/github.com/astaxie/beego/template.go b/src/vendor/github.com/astaxie/beego/template.go
index e6c43f871..d4859cd70 100644
--- a/src/vendor/github.com/astaxie/beego/template.go
+++ b/src/vendor/github.com/astaxie/beego/template.go
@@ -26,31 +26,53 @@ import (
"strings"
"sync"
+ "github.com/astaxie/beego/logs"
"github.com/astaxie/beego/utils"
)
var (
- beegoTplFuncMap = make(template.FuncMap)
- // beeTemplates caching map and supported template file extensions.
- beeTemplates = make(map[string]*template.Template)
- templatesLock sync.RWMutex
+ beegoTplFuncMap = make(template.FuncMap)
+ beeViewPathTemplateLocked = false
+ // beeViewPathTemplates caching map and supported template file extensions per view
+ beeViewPathTemplates = make(map[string]map[string]*template.Template)
+ templatesLock sync.RWMutex
// beeTemplateExt stores the template extension which will build
beeTemplateExt = []string{"tpl", "html"}
+ // beeTemplatePreprocessors stores associations of extension -> preprocessor handler
+ beeTemplateEngines = map[string]templatePreProcessor{}
)
-func executeTemplate(wr io.Writer, name string, data interface{}) error {
+// ExecuteTemplate applies the template with name to the specified data object,
+// writing the output to wr.
+// A template will be executed safely in parallel.
+func ExecuteTemplate(wr io.Writer, name string, data interface{}) error {
+ return ExecuteViewPathTemplate(wr, name, BConfig.WebConfig.ViewsPath, data)
+}
+
+// ExecuteViewPathTemplate applies the template with name and from specific viewPath to the specified data object,
+// writing the output to wr.
+// A template will be executed safely in parallel.
+func ExecuteViewPathTemplate(wr io.Writer, name string, viewPath string, data interface{}) error {
if BConfig.RunMode == DEV {
templatesLock.RLock()
defer templatesLock.RUnlock()
}
- if t, ok := beeTemplates[name]; ok {
- err := t.ExecuteTemplate(wr, name, data)
- if err != nil {
- Trace("template Execute err:", err)
+ if beeTemplates, ok := beeViewPathTemplates[viewPath]; ok {
+ if t, ok := beeTemplates[name]; ok {
+ var err error
+ if t.Lookup(name) != nil {
+ err = t.ExecuteTemplate(wr, name, data)
+ } else {
+ err = t.Execute(wr, data)
+ }
+ if err != nil {
+ logs.Trace("template Execute err:", err)
+ }
+ return err
}
- return err
+ panic("can't find templatefile in the path:" + viewPath + "/" + name)
}
- panic("can't find templatefile in the path:" + name)
+ panic("Unknown view path:" + viewPath)
}
func init() {
@@ -88,6 +110,8 @@ func AddFuncMap(key string, fn interface{}) error {
return nil
}
+type templatePreProcessor func(root, path string, funcs template.FuncMap) (*template.Template, error)
+
type templateFile struct {
root string
files map[string][]string
@@ -136,6 +160,24 @@ func AddTemplateExt(ext string) {
beeTemplateExt = append(beeTemplateExt, ext)
}
+// AddViewPath adds a new path to the supported view paths.
+//Can later be used by setting a controller ViewPath to this folder
+//will panic if called after beego.Run()
+func AddViewPath(viewPath string) error {
+ if beeViewPathTemplateLocked {
+ if _, exist := beeViewPathTemplates[viewPath]; exist {
+ return nil //Ignore if viewpath already exists
+ }
+ panic("Can not add new view paths after beego.Run()")
+ }
+ beeViewPathTemplates[viewPath] = make(map[string]*template.Template)
+ return BuildTemplate(viewPath)
+}
+
+func lockViewPaths() {
+ beeViewPathTemplateLocked = true
+}
+
// BuildTemplate will build all template files in a directory.
// it makes beego can render any template file in view directory.
func BuildTemplate(dir string, files ...string) error {
@@ -145,6 +187,10 @@ func BuildTemplate(dir string, files ...string) error {
}
return errors.New("dir open err")
}
+ beeTemplates, ok := beeViewPathTemplates[dir]
+ if !ok {
+ panic("Unknown view path: " + dir)
+ }
self := &templateFile{
root: dir,
files: make(map[string][]string),
@@ -156,13 +202,22 @@ func BuildTemplate(dir string, files ...string) error {
fmt.Printf("filepath.Walk() returned %v\n", err)
return err
}
+ buildAllFiles := len(files) == 0
for _, v := range self.files {
for _, file := range v {
- if len(files) == 0 || utils.InSlice(file, files) {
+ if buildAllFiles || utils.InSlice(file, files) {
templatesLock.Lock()
- t, err := getTemplate(self.root, file, v...)
+ ext := filepath.Ext(file)
+ var t *template.Template
+ if len(ext) == 0 {
+ t, err = getTemplate(self.root, file, v...)
+ } else if fn, ok := beeTemplateEngines[ext[1:]]; ok {
+ t, err = fn(self.root, file, beegoTplFuncMap)
+ } else {
+ t, err = getTemplate(self.root, file, v...)
+ }
if err != nil {
- Trace("parse template err:", file, err)
+ logs.Error("parse template err:", file, err)
} else {
beeTemplates[file] = t
}
@@ -175,9 +230,12 @@ func BuildTemplate(dir string, files ...string) error {
func getTplDeep(root, file, parent string, t *template.Template) (*template.Template, [][]string, error) {
var fileAbsPath string
+ var rParent string
if filepath.HasPrefix(file, "../") {
+ rParent = filepath.Join(filepath.Dir(parent), file)
fileAbsPath = filepath.Join(root, filepath.Dir(parent), file)
} else {
+ rParent = file
fileAbsPath = filepath.Join(root, file)
}
if e := utils.FileExists(fileAbsPath); !e {
@@ -202,7 +260,7 @@ func getTplDeep(root, file, parent string, t *template.Template) (*template.Temp
if !HasTemplateExt(m[1]) {
continue
}
- t, _, err = getTplDeep(root, m[1], file, t)
+ _, _, err = getTplDeep(root, m[1], rParent, t)
if err != nil {
return nil, [][]string{}, err
}
@@ -240,8 +298,8 @@ func _getTemplate(t0 *template.Template, root string, subMods [][]string, others
var subMods1 [][]string
t, subMods1, err = getTplDeep(root, otherFile, "", t)
if err != nil {
- Trace("template parse file err:", err)
- } else if subMods1 != nil && len(subMods1) > 0 {
+ logs.Trace("template parse file err:", err)
+ } else if len(subMods1) > 0 {
t, err = _getTemplate(t, root, subMods1, others...)
}
break
@@ -249,8 +307,9 @@ func _getTemplate(t0 *template.Template, root string, subMods [][]string, others
}
//second check define
for _, otherFile := range others {
+ var data []byte
fileAbsPath := filepath.Join(root, otherFile)
- data, err := ioutil.ReadFile(fileAbsPath)
+ data, err = ioutil.ReadFile(fileAbsPath)
if err != nil {
continue
}
@@ -261,8 +320,8 @@ func _getTemplate(t0 *template.Template, root string, subMods [][]string, others
var subMods1 [][]string
t, subMods1, err = getTplDeep(root, otherFile, "", t)
if err != nil {
- Trace("template parse file err:", err)
- } else if subMods1 != nil && len(subMods1) > 0 {
+ logs.Trace("template parse file err:", err)
+ } else if len(subMods1) > 0 {
t, err = _getTemplate(t, root, subMods1, others...)
}
break
@@ -305,3 +364,10 @@ func DelStaticPath(url string) *App {
delete(BConfig.WebConfig.StaticDir, url)
return BeeApp
}
+
+// AddTemplateEngine add a new templatePreProcessor which support extension
+func AddTemplateEngine(extension string, fn templatePreProcessor) *App {
+ AddTemplateExt(extension)
+ beeTemplateEngines[extension] = fn
+ return BeeApp
+}
diff --git a/src/vendor/github.com/astaxie/beego/template_test.go b/src/vendor/github.com/astaxie/beego/template_test.go
index 4f13736c4..2153ef72c 100644
--- a/src/vendor/github.com/astaxie/beego/template_test.go
+++ b/src/vendor/github.com/astaxie/beego/template_test.go
@@ -15,6 +15,7 @@
package beego
import (
+ "bytes"
"os"
"path/filepath"
"testing"
@@ -67,9 +68,10 @@ func TestTemplate(t *testing.T) {
f.Close()
}
}
- if err := BuildTemplate(dir); err != nil {
+ if err := AddViewPath(dir); err != nil {
t.Fatal(err)
}
+ beeTemplates := beeViewPathTemplates[dir]
if len(beeTemplates) != 3 {
t.Fatalf("should be 3 but got %v", len(beeTemplates))
}
@@ -103,6 +105,12 @@ var user = `
func TestRelativeTemplate(t *testing.T) {
dir := "_beeTmp"
+
+ //Just add dir to known viewPaths
+ if err := AddViewPath(dir); err != nil {
+ t.Fatal(err)
+ }
+
files := []string{
"easyui/public/menu.tpl",
"easyui/rbac/user.tpl",
@@ -126,6 +134,7 @@ func TestRelativeTemplate(t *testing.T) {
if err := BuildTemplate(dir, files[1]); err != nil {
t.Fatal(err)
}
+ beeTemplates := beeViewPathTemplates[dir]
if err := beeTemplates["easyui/rbac/user.tpl"].ExecuteTemplate(os.Stdout, "easyui/rbac/user.tpl", nil); err != nil {
t.Fatal(err)
}
@@ -134,3 +143,116 @@ func TestRelativeTemplate(t *testing.T) {
}
os.RemoveAll(dir)
}
+
+var add = `{{ template "layout_blog.tpl" . }}
+{{ define "css" }}
+
+{{ end}}
+
+
+{{ define "content" }}
+
{{ .Title }}
+
This is SomeVar: {{ .SomeVar }}
+{{ end }}
+
+{{ define "js" }}
+
+{{ end}}`
+
+var layoutBlog = `
+
+
+ Lin Li
+
+
+
+
+ {{ block "css" . }}{{ end }}
+
+
+
+