1
0
mirror of https://github.com/astaxie/beego.git synced 2024-11-25 19:10:54 +00:00

Merge pull request #2771 from astaxie/develop

v1.9.0
This commit is contained in:
astaxie 2017-07-19 00:56:48 +08:00 committed by GitHub
commit d96289a81b
26 changed files with 751 additions and 105 deletions

View File

@ -34,7 +34,7 @@ install:
- go get github.com/cloudflare/golz4 - go get github.com/cloudflare/golz4
- go get github.com/gogo/protobuf/proto - go get github.com/gogo/protobuf/proto
- go get github.com/Knetic/govaluate - go get github.com/Knetic/govaluate
- go get github.com/hsluoyz/casbin - go get github.com/casbin/casbin
- go get -u honnef.co/go/tools/cmd/gosimple - go get -u honnef.co/go/tools/cmd/gosimple
- go get -u github.com/mdempsky/unconvert - go get -u github.com/mdempsky/unconvert
- go get -u github.com/gordonklaus/ineffassign - go get -u github.com/gordonklaus/ineffassign

View File

@ -37,7 +37,7 @@ var beeAdminApp *adminApp
// FilterMonitorFunc is default monitor filter when admin module is enable. // FilterMonitorFunc is default monitor filter when admin module is enable.
// if this func returns, admin module records qbs for this request by condition of this function logic. // if this func returns, admin module records qbs for this request by condition of this function logic.
// usage: // usage:
// func MyFilterMonitor(method, requestPath string, t time.Duration) bool { // func MyFilterMonitor(method, requestPath string, t time.Duration, pattern string, statusCode int) bool {
// if method == "POST" { // if method == "POST" {
// return false // return false
// } // }
@ -50,7 +50,7 @@ var beeAdminApp *adminApp
// return true // return true
// } // }
// beego.FilterMonitorFunc = MyFilterMonitor. // beego.FilterMonitorFunc = MyFilterMonitor.
var FilterMonitorFunc func(string, string, time.Duration) bool var FilterMonitorFunc func(string, string, time.Duration, string, int) bool
func init() { func init() {
beeAdminApp = &adminApp{ beeAdminApp = &adminApp{
@ -62,7 +62,7 @@ func init() {
beeAdminApp.Route("/healthcheck", healthcheck) beeAdminApp.Route("/healthcheck", healthcheck)
beeAdminApp.Route("/task", taskStatus) beeAdminApp.Route("/task", taskStatus)
beeAdminApp.Route("/listconf", listConf) beeAdminApp.Route("/listconf", listConf)
FilterMonitorFunc = func(string, string, time.Duration) bool { return true } FilterMonitorFunc = func(string, string, time.Duration, string, int) bool { return true }
} }
// AdminIndex is the default http.Handler for admin module. // AdminIndex is the default http.Handler for admin module.

View File

@ -23,7 +23,7 @@ import (
const ( const (
// VERSION represent beego web framework version. // VERSION represent beego web framework version.
VERSION = "1.8.3" VERSION = "1.9.0"
// DEV is for develop // DEV is for develop
DEV = "dev" DEV = "dev"
@ -40,9 +40,9 @@ var (
// AddAPPStartHook is used to register the hookfunc // AddAPPStartHook is used to register the hookfunc
// The hookfuncs will run in beego.Run() // The hookfuncs will run in beego.Run()
// such as sessionInit, middlerware start, buildtemplate, admin start // such as initiating session , starting middleware , building template, starting admin control and so on.
func AddAPPStartHook(hf hookfunc) { func AddAPPStartHook(hf ...hookfunc) {
hooks = append(hooks, hf) hooks = append(hooks, hf...)
} }
// Run beego application. // Run beego application.
@ -69,12 +69,14 @@ func Run(params ...string) {
func initBeforeHTTPRun() { func initBeforeHTTPRun() {
//init hooks //init hooks
AddAPPStartHook(registerMime) AddAPPStartHook(
AddAPPStartHook(registerDefaultErrorHandler) registerMime,
AddAPPStartHook(registerSession) registerDefaultErrorHandler,
AddAPPStartHook(registerTemplate) registerSession,
AddAPPStartHook(registerAdmin) registerTemplate,
AddAPPStartHook(registerGzip) registerAdmin,
registerGzip,
)
for _, hk := range hooks { for _, hk := range hooks {
if err := hk(); err != nil { if err := hk(); err != nil {

31
cache/memory.go vendored
View File

@ -217,26 +217,31 @@ func (bc *MemoryCache) vaccuum() {
if bc.items == nil { if bc.items == nil {
return return
} }
for name := range bc.items { if keys := bc.expiredKeys(); len(keys) != 0 {
bc.itemExpired(name) bc.clearItems(keys)
} }
} }
} }
// itemExpired returns true if an item is expired. // expiredKeys returns key list which are expired.
func (bc *MemoryCache) itemExpired(name string) bool { func (bc *MemoryCache) expiredKeys() (keys []string) {
bc.RLock()
defer bc.RUnlock()
for key, itm := range bc.items {
if itm.isExpire() {
keys = append(keys, key)
}
}
return
}
// clearItems removes all the items which key in keys.
func (bc *MemoryCache) clearItems(keys []string) {
bc.Lock() bc.Lock()
defer bc.Unlock() defer bc.Unlock()
for _, key := range keys {
itm, ok := bc.items[name] delete(bc.items, key)
if !ok {
return true
} }
if itm.isExpire() {
delete(bc.items, name)
return true
}
return false
} }
func init() { func init() {

View File

@ -189,16 +189,16 @@ func ParseBool(val interface{}) (value bool, err error) {
return false, nil return false, nil
} }
case int8, int32, int64: case int8, int32, int64:
strV := fmt.Sprintf("%s", v) strV := fmt.Sprintf("%d", v)
if strV == "1" { if strV == "1" {
return true, nil return true, nil
} else if strV == "0" { } else if strV == "0" {
return false, nil return false, nil
} }
case float64: case float64:
if v == 1 { if v == 1.0 {
return true, nil return true, nil
} else if v == 0 { } else if v == 0.0 {
return false, nil return false, nil
} }
} }

View File

@ -16,9 +16,11 @@ package context
import ( import (
"bytes" "bytes"
"compress/gzip"
"errors" "errors"
"io" "io"
"io/ioutil" "io/ioutil"
"net/http"
"net/url" "net/url"
"reflect" "reflect"
"regexp" "regexp"
@ -349,11 +351,22 @@ func (input *BeegoInput) CopyBody(MaxMemory int64) []byte {
if input.Context.Request.Body == nil { if input.Context.Request.Body == nil {
return []byte{} return []byte{}
} }
var requestbody []byte
safe := &io.LimitedReader{R: input.Context.Request.Body, N: MaxMemory} safe := &io.LimitedReader{R: input.Context.Request.Body, N: MaxMemory}
requestbody, _ := ioutil.ReadAll(safe) if input.Header("Content-Encoding") == "gzip" {
reader, err := gzip.NewReader(safe)
if err != nil {
return nil
}
requestbody, _ = ioutil.ReadAll(reader)
} else {
requestbody, _ = ioutil.ReadAll(safe)
}
input.Context.Request.Body.Close() input.Context.Request.Body.Close()
bf := bytes.NewBuffer(requestbody) bf := bytes.NewBuffer(requestbody)
input.Context.Request.Body = ioutil.NopCloser(bf) input.Context.Request.Body = http.MaxBytesReader(input.Context.ResponseWriter, ioutil.NopCloser(bf), MaxMemory)
input.RequestBody = requestbody input.RequestBody = requestbody
return requestbody return requestbody
} }

View File

@ -177,7 +177,7 @@ func jsonRenderer(value interface{}) Renderer {
func errorRenderer(err error) Renderer { func errorRenderer(err error) Renderer {
return rendererFunc(func(ctx *Context) { return rendererFunc(func(ctx *Context) {
ctx.Output.SetStatus(500) ctx.Output.SetStatus(500)
ctx.WriteString(err.Error()) ctx.Output.Body([]byte(err.Error()))
}) })
} }

View File

@ -55,6 +55,13 @@ type ControllerComments struct {
MethodParams []*param.MethodParam MethodParams []*param.MethodParam
} }
// ControllerCommentsSlice implements the sort interface
type ControllerCommentsSlice []ControllerComments
func (p ControllerCommentsSlice) Len() int { return len(p) }
func (p ControllerCommentsSlice) Less(i, j int) bool { return p[i].Router < p[j].Router }
func (p ControllerCommentsSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// Controller defines some basic http request handler operations, such as // Controller defines some basic http request handler operations, such as
// http context, template and view, session and xsrf. // http context, template and view, session and xsrf.
type Controller struct { type Controller struct {

View File

@ -52,7 +52,7 @@ func TestErrorCode_01(t *testing.T) {
if w.Code != code { if w.Code != code {
t.Fail() t.Fail()
} }
if !strings.Contains(string(w.Body.Bytes()), http.StatusText(code)) { if !strings.Contains(w.Body.String(), http.StatusText(code)) {
t.Fail() t.Fail()
} }
} }
@ -82,7 +82,7 @@ func TestErrorCode_03(t *testing.T) {
if w.Code != 200 { if w.Code != 200 {
t.Fail() t.Fail()
} }
if string(w.Body.Bytes()) != parseCodeError { if w.Body.String() != parseCodeError {
t.Fail() t.Fail()
} }
} }

View File

@ -3,14 +3,17 @@ package grace
import ( import (
"errors" "errors"
"net" "net"
"sync"
) )
type graceConn struct { type graceConn struct {
net.Conn net.Conn
server *Server server *Server
m sync.Mutex
closed bool
} }
func (c graceConn) Close() (err error) { func (c *graceConn) Close() (err error) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
switch x := r.(type) { switch x := r.(type) {
@ -23,6 +26,14 @@ func (c graceConn) Close() (err error) {
} }
} }
}() }()
c.m.Lock()
if c.closed {
c.m.Unlock()
return
}
c.server.wg.Done() c.server.wg.Done()
c.closed = true
c.m.Unlock()
return c.Conn.Close() return c.Conn.Close()
} }

View File

@ -37,7 +37,7 @@ func (gl *graceListener) Accept() (c net.Conn, err error) {
tc.SetKeepAlive(true) tc.SetKeepAlive(true)
tc.SetKeepAlivePeriod(3 * time.Minute) tc.SetKeepAlivePeriod(3 * time.Minute)
c = graceConn{ c = &graceConn{
Conn: tc, Conn: tc,
server: gl.server, server: gl.server,
} }

View File

@ -56,6 +56,8 @@ type fileLogWriter struct {
Perm string `json:"perm"` Perm string `json:"perm"`
RotatePerm string `json:"rotateperm"`
fileNameOnly, suffix string // like "project.log", project is fileNameOnly and .log is suffix fileNameOnly, suffix string // like "project.log", project is fileNameOnly and .log is suffix
} }
@ -65,6 +67,7 @@ func newFileWriter() Logger {
Daily: true, Daily: true,
MaxDays: 7, MaxDays: 7,
Rotate: true, Rotate: true,
RotatePerm: "0440",
Level: LevelTrace, Level: LevelTrace,
Perm: "0660", Perm: "0660",
} }
@ -237,8 +240,12 @@ func (w *fileLogWriter) doRotate(logTime time.Time) error {
// Find the next available number // Find the next available number
num := 1 num := 1
fName := "" fName := ""
rotatePerm, err := strconv.ParseInt(w.RotatePerm, 8, 64)
if err != nil {
return err
}
_, err := os.Lstat(w.Filename) _, err = os.Lstat(w.Filename)
if err != nil { if err != nil {
//even if the file is not exist or other ,we should RESTART the logger //even if the file is not exist or other ,we should RESTART the logger
goto RESTART_LOGGER goto RESTART_LOGGER
@ -271,8 +278,9 @@ func (w *fileLogWriter) doRotate(logTime time.Time) error {
if err != nil { if err != nil {
goto RESTART_LOGGER goto RESTART_LOGGER
} }
err = os.Chmod(fName, os.FileMode(0440))
// re-start logger err = os.Chmod(fName, os.FileMode(rotatePerm))
RESTART_LOGGER: RESTART_LOGGER:
startLoggerErr := w.startLogger() startLoggerErr := w.startLogger()

View File

@ -190,6 +190,7 @@ func testFileRotate(t *testing.T, fn1, fn2 string) {
Rotate: true, Rotate: true,
Level: LevelTrace, Level: LevelTrace,
Perm: "0660", Perm: "0660",
RotatePerm: "0440",
} }
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1)) fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour) fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
@ -213,6 +214,7 @@ func testFileDailyRotate(t *testing.T, fn1, fn2 string) {
Rotate: true, Rotate: true,
Level: LevelTrace, Level: LevelTrace,
Perm: "0660", Perm: "0660",
RotatePerm: "0440",
} }
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1)) fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour) fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)

View File

@ -14,40 +14,382 @@
package migration package migration
// Table store the tablename and Column import (
type Table struct { "fmt"
TableName string
"github.com/astaxie/beego"
)
// Index struct defines the structure of Index Columns
type Index struct {
Name string
}
// Unique struct defines a single unique key combination
type Unique struct {
Definition string
Columns []*Column Columns []*Column
} }
// Create return the create sql //Column struct defines a single column of a table
func (t *Table) Create() string {
return ""
}
// Drop return the drop sql
func (t *Table) Drop() string {
return ""
}
// Column define the columns name type and Default
type Column struct { type Column struct {
Name string Name string
Type string Inc string
Default interface{} Null string
Default string
Unsign string
DataType string
remove bool
Modify bool
} }
// Create return create sql with the provided tbname and columns // Foreign struct defines a single foreign relationship
func Create(tbname string, columns ...Column) string { type Foreign struct {
return "" ForeignTable string
ForeignColumn string
OnDelete string
OnUpdate string
Column
} }
// Drop return the drop sql with the provided tbname and columns // RenameColumn struct allows renaming of columns
func Drop(tbname string, columns ...Column) string { type RenameColumn struct {
return "" OldName string
OldNull string
OldDefault string
OldUnsign string
OldDataType string
NewName string
Column
} }
// TableDDL is still in think // CreateTable creates the table on system
func TableDDL(tbname string, columns ...Column) string { func (m *Migration) CreateTable(tablename, engine, charset string, p ...func()) {
return "" m.TableName = tablename
m.Engine = engine
m.Charset = charset
m.ModifyType = "create"
}
// AlterTable set the ModifyType to alter
func (m *Migration) AlterTable(tablename string) {
m.TableName = tablename
m.ModifyType = "alter"
}
// NewCol creates a new standard column and attaches it to m struct
func (m *Migration) NewCol(name string) *Column {
col := &Column{Name: name}
m.AddColumns(col)
return col
}
//PriCol creates a new primary column and attaches it to m struct
func (m *Migration) PriCol(name string) *Column {
col := &Column{Name: name}
m.AddColumns(col)
m.AddPrimary(col)
return col
}
//UniCol creates / appends columns to specified unique key and attaches it to m struct
func (m *Migration) UniCol(uni, name string) *Column {
col := &Column{Name: name}
m.AddColumns(col)
uniqueOriginal := &Unique{}
for _, unique := range m.Uniques {
if unique.Definition == uni {
unique.AddColumnsToUnique(col)
uniqueOriginal = unique
}
}
if uniqueOriginal.Definition == "" {
unique := &Unique{Definition: uni}
unique.AddColumnsToUnique(col)
m.AddUnique(unique)
}
return col
}
//ForeignCol creates a new foreign column and returns the instance of column
func (m *Migration) ForeignCol(colname, foreigncol, foreigntable string) (foreign *Foreign) {
foreign = &Foreign{ForeignColumn: foreigncol, ForeignTable: foreigntable}
foreign.Name = colname
m.AddForeign(foreign)
return foreign
}
//SetOnDelete sets the on delete of foreign
func (foreign *Foreign) SetOnDelete(del string) *Foreign {
foreign.OnDelete = "ON DELETE" + del
return foreign
}
//SetOnUpdate sets the on update of foreign
func (foreign *Foreign) SetOnUpdate(update string) *Foreign {
foreign.OnUpdate = "ON UPDATE" + update
return foreign
}
//Remove marks the columns to be removed.
//it allows reverse m to create the column.
func (c *Column) Remove() {
c.remove = true
}
//SetAuto enables auto_increment of column (can be used once)
func (c *Column) SetAuto(inc bool) *Column {
if inc {
c.Inc = "auto_increment"
}
return c
}
//SetNullable sets the column to be null
func (c *Column) SetNullable(null bool) *Column {
if null {
c.Null = ""
} else {
c.Null = "NOT NULL"
}
return c
}
//SetDefault sets the default value, prepend with "DEFAULT "
func (c *Column) SetDefault(def string) *Column {
c.Default = "DEFAULT " + def
return c
}
//SetUnsigned sets the column to be unsigned int
func (c *Column) SetUnsigned(unsign bool) *Column {
if unsign {
c.Unsign = "UNSIGNED"
}
return c
}
//SetDataType sets the dataType of the column
func (c *Column) SetDataType(dataType string) *Column {
c.DataType = dataType
return c
}
//SetOldNullable allows reverting to previous nullable on reverse ms
func (c *RenameColumn) SetOldNullable(null bool) *RenameColumn {
if null {
c.OldNull = ""
} else {
c.OldNull = "NOT NULL"
}
return c
}
//SetOldDefault allows reverting to previous default on reverse ms
func (c *RenameColumn) SetOldDefault(def string) *RenameColumn {
c.OldDefault = def
return c
}
//SetOldUnsigned allows reverting to previous unsgined on reverse ms
func (c *RenameColumn) SetOldUnsigned(unsign bool) *RenameColumn {
if unsign {
c.OldUnsign = "UNSIGNED"
}
return c
}
//SetOldDataType allows reverting to previous datatype on reverse ms
func (c *RenameColumn) SetOldDataType(dataType string) *RenameColumn {
c.OldDataType = dataType
return c
}
//SetPrimary adds the columns to the primary key (can only be used any number of times in only one m)
func (c *Column) SetPrimary(m *Migration) *Column {
m.Primary = append(m.Primary, c)
return c
}
//AddColumnsToUnique adds the columns to Unique Struct
func (unique *Unique) AddColumnsToUnique(columns ...*Column) *Unique {
unique.Columns = append(unique.Columns, columns...)
return unique
}
//AddColumns adds columns to m struct
func (m *Migration) AddColumns(columns ...*Column) *Migration {
m.Columns = append(m.Columns, columns...)
return m
}
//AddPrimary adds the column to primary in m struct
func (m *Migration) AddPrimary(primary *Column) *Migration {
m.Primary = append(m.Primary, primary)
return m
}
//AddUnique adds the column to unique in m struct
func (m *Migration) AddUnique(unique *Unique) *Migration {
m.Uniques = append(m.Uniques, unique)
return m
}
//AddForeign adds the column to foreign in m struct
func (m *Migration) AddForeign(foreign *Foreign) *Migration {
m.Foreigns = append(m.Foreigns, foreign)
return m
}
//AddIndex adds the column to index in m struct
func (m *Migration) AddIndex(index *Index) *Migration {
m.Indexes = append(m.Indexes, index)
return m
}
//RenameColumn allows renaming of columns
func (m *Migration) RenameColumn(from, to string) *RenameColumn {
rename := &RenameColumn{OldName: from, NewName: to}
m.Renames = append(m.Renames, rename)
return rename
}
//GetSQL returns the generated sql depending on ModifyType
func (m *Migration) GetSQL() (sql string) {
sql = ""
switch m.ModifyType {
case "create":
{
sql += fmt.Sprintf("CREATE TABLE `%s` (", m.TableName)
for index, column := range m.Columns {
sql += fmt.Sprintf("\n `%s` %s %s %s %s %s", column.Name, column.DataType, column.Unsign, column.Null, column.Inc, column.Default)
if len(m.Columns) > index+1 {
sql += ","
}
}
if len(m.Primary) > 0 {
sql += fmt.Sprintf(",\n PRIMARY KEY( ")
}
for index, column := range m.Primary {
sql += fmt.Sprintf(" `%s`", column.Name)
if len(m.Primary) > index+1 {
sql += ","
}
}
if len(m.Primary) > 0 {
sql += fmt.Sprintf(")")
}
for _, unique := range m.Uniques {
sql += fmt.Sprintf(",\n UNIQUE KEY `%s`( ", unique.Definition)
for index, column := range unique.Columns {
sql += fmt.Sprintf(" `%s`", column.Name)
if len(unique.Columns) > index+1 {
sql += ","
}
}
sql += fmt.Sprintf(")")
}
for _, foreign := range m.Foreigns {
sql += fmt.Sprintf(",\n `%s` %s %s %s %s %s", foreign.Name, foreign.DataType, foreign.Unsign, foreign.Null, foreign.Inc, foreign.Default)
sql += fmt.Sprintf(",\n KEY `%s_%s_foreign`(`%s`),", m.TableName, foreign.Column.Name, foreign.Column.Name)
sql += fmt.Sprintf("\n CONSTRAINT `%s_%s_foreign` FOREIGN KEY (`%s`) REFERENCES `%s` (`%s`) %s %s", m.TableName, foreign.Column.Name, foreign.Column.Name, foreign.ForeignTable, foreign.ForeignColumn, foreign.OnDelete, foreign.OnUpdate)
}
sql += fmt.Sprintf(")ENGINE=%s DEFAULT CHARSET=%s;", m.Engine, m.Charset)
break
}
case "alter":
{
sql += fmt.Sprintf("ALTER TABLE `%s` ", m.TableName)
for index, column := range m.Columns {
if !column.remove {
beego.BeeLogger.Info("col")
sql += fmt.Sprintf("\n ADD `%s` %s %s %s %s %s", column.Name, column.DataType, column.Unsign, column.Null, column.Inc, column.Default)
} else {
sql += fmt.Sprintf("\n DROP COLUMN `%s`", column.Name)
}
if len(m.Columns) > index {
sql += ","
}
}
for index, column := range m.Renames {
sql += fmt.Sprintf("CHANGE COLUMN `%s` `%s` %s %s %s %s %s", column.OldName, column.NewName, column.DataType, column.Unsign, column.Null, column.Inc, column.Default)
if len(m.Renames) > index+1 {
sql += ","
}
}
for index, foreign := range m.Foreigns {
sql += fmt.Sprintf("ADD `%s` %s %s %s %s %s", foreign.Name, foreign.DataType, foreign.Unsign, foreign.Null, foreign.Inc, foreign.Default)
sql += fmt.Sprintf(",\n ADD KEY `%s_%s_foreign`(`%s`)", m.TableName, foreign.Column.Name, foreign.Column.Name)
sql += fmt.Sprintf(",\n ADD CONSTRAINT `%s_%s_foreign` FOREIGN KEY (`%s`) REFERENCES `%s` (`%s`) %s %s", m.TableName, foreign.Column.Name, foreign.Column.Name, foreign.ForeignTable, foreign.ForeignColumn, foreign.OnDelete, foreign.OnUpdate)
if len(m.Foreigns) > index+1 {
sql += ","
}
}
sql += ";"
break
}
case "reverse":
{
sql += fmt.Sprintf("ALTER TABLE `%s`", m.TableName)
for index, column := range m.Columns {
if column.remove {
sql += fmt.Sprintf("\n ADD `%s` %s %s %s %s %s", column.Name, column.DataType, column.Unsign, column.Null, column.Inc, column.Default)
} else {
sql += fmt.Sprintf("\n DROP COLUMN `%s`", column.Name)
}
if len(m.Columns) > index {
sql += ","
}
}
if len(m.Primary) > 0 {
sql += fmt.Sprintf("\n DROP PRIMARY KEY,")
}
for index, unique := range m.Uniques {
sql += fmt.Sprintf("\n DROP KEY `%s`", unique.Definition)
if len(m.Uniques) > index {
sql += ","
}
}
for index, column := range m.Renames {
sql += fmt.Sprintf("\n CHANGE COLUMN `%s` `%s` %s %s %s %s", column.NewName, column.OldName, column.OldDataType, column.OldUnsign, column.OldNull, column.OldDefault)
if len(m.Renames) > index {
sql += ","
}
}
for _, foreign := range m.Foreigns {
sql += fmt.Sprintf("\n DROP KEY `%s_%s_foreign`", m.TableName, foreign.Column.Name)
sql += fmt.Sprintf(",\n DROP FOREIGN KEY `%s_%s_foreign`", m.TableName, foreign.Column.Name)
sql += fmt.Sprintf(",\n DROP COLUMN `%s`", foreign.Name)
}
sql += ";"
}
case "delete":
{
sql += fmt.Sprintf("DROP TABLE IF EXISTS `%s`;", m.TableName)
}
}
return
} }

32
migration/doc.go Normal file
View File

@ -0,0 +1,32 @@
// Package migration enables you to generate migrations back and forth. It generates both migrations.
//
// //Creates a table
// m.CreateTable("tablename","InnoDB","utf8");
//
// //Alter a table
// m.AlterTable("tablename")
//
// Standard Column Methods
// * SetDataType
// * SetNullable
// * SetDefault
// * SetUnsigned (use only on integer types unless produces error)
//
// //Sets a primary column, multiple calls allowed, standard column methods available
// m.PriCol("id").SetAuto(true).SetNullable(false).SetDataType("INT(10)").SetUnsigned(true)
//
// //UniCol Can be used multiple times, allows standard Column methods. Use same "index" string to add to same index
// m.UniCol("index","column")
//
// //Standard Column Initialisation, can call .Remove() after NewCol("") on alter to remove
// m.NewCol("name").SetDataType("VARCHAR(255) COLLATE utf8_unicode_ci").SetNullable(false)
// m.NewCol("value").SetDataType("DOUBLE(8,2)").SetNullable(false)
//
// //Rename Columns , only use with Alter table, doesn't works with Create, prefix standard column methods with "Old" to
// //create a true reversible migration eg: SetOldDataType("DOUBLE(12,3)")
// m.RenameColumn("from","to")...
//
// //Foreign Columns, single columns are only supported, SetOnDelete & SetOnUpdate are available, call appropriately.
// //Supports standard column methods, automatic reverse.
// m.ForeignCol("local_col","foreign_col","foreign_table")
package migration

View File

@ -52,6 +52,26 @@ type Migrationer interface {
GetCreated() int64 GetCreated() int64
} }
//Migration defines the migrations by either SQL or DDL
type Migration struct {
sqls []string
Created string
TableName string
Engine string
Charset string
ModifyType string
Columns []*Column
Indexes []*Index
Primary []*Column
Uniques []*Unique
Foreigns []*Foreign
Renames []*RenameColumn
RemoveColumns []*Column
RemoveIndexes []*Index
RemoveUniques []*Unique
RemoveForeigns []*Foreign
}
var ( var (
migrationMap map[string]Migrationer migrationMap map[string]Migrationer
) )
@ -60,20 +80,34 @@ func init() {
migrationMap = make(map[string]Migrationer) migrationMap = make(map[string]Migrationer)
} }
// Migration the basic type which will implement the basic type
type Migration struct {
sqls []string
Created string
}
// Up implement in the Inheritance struct for upgrade // Up implement in the Inheritance struct for upgrade
func (m *Migration) Up() { func (m *Migration) Up() {
switch m.ModifyType {
case "reverse":
m.ModifyType = "alter"
case "delete":
m.ModifyType = "create"
}
m.sqls = append(m.sqls, m.GetSQL())
} }
// Down implement in the Inheritance struct for down // Down implement in the Inheritance struct for down
func (m *Migration) Down() { func (m *Migration) Down() {
switch m.ModifyType {
case "alter":
m.ModifyType = "reverse"
case "create":
m.ModifyType = "delete"
}
m.sqls = append(m.sqls, m.GetSQL())
}
//Migrate adds the SQL to the execution list
func (m *Migration) Migrate(migrationType string) {
m.ModifyType = migrationType
m.sqls = append(m.sqls, m.GetSQL())
} }
// SQL add sql want to execute // SQL add sql want to execute

View File

@ -94,3 +94,43 @@ func (d *dbBaseOracle) IndexExists(db dbQuerier, table string, name string) bool
row.Scan(&cnt) row.Scan(&cnt)
return cnt > 0 return cnt > 0
} }
// execute insert sql with given struct and given values.
// insert the given values, not the field values in struct.
func (d *dbBaseOracle) InsertValue(q dbQuerier, mi *modelInfo, isMulti bool, names []string, values []interface{}) (int64, error) {
Q := d.ins.TableQuote()
marks := make([]string, len(names))
for i := range marks {
marks[i] = ":" + names[i]
}
sep := fmt.Sprintf("%s, %s", Q, Q)
qmarks := strings.Join(marks, ", ")
columns := strings.Join(names, sep)
multi := len(values) / len(names)
if isMulti {
qmarks = strings.Repeat(qmarks+"), (", multi-1) + qmarks
}
query := fmt.Sprintf("INSERT INTO %s%s%s (%s%s%s) VALUES (%s)", Q, mi.table, Q, Q, columns, Q, qmarks)
d.ins.ReplaceMarks(&query)
if isMulti || !d.ins.HasReturningID(mi, &query) {
res, err := q.Exec(query, values...)
if err == nil {
if isMulti {
return res.RowsAffected()
}
return res.LastInsertId()
}
return 0, err
}
row := q.QueryRow(query, values...)
var id int64
err := row.Scan(&id)
return id, err
}

View File

@ -135,15 +135,16 @@ func parserComments(f *ast.FuncDecl, controllerName, pkgpath string) error {
func buildMethodParams(funcParams []*ast.Field, pc *parsedComment) []*param.MethodParam { func buildMethodParams(funcParams []*ast.Field, pc *parsedComment) []*param.MethodParam {
result := make([]*param.MethodParam, 0, len(funcParams)) result := make([]*param.MethodParam, 0, len(funcParams))
for _, fparam := range funcParams { for _, fparam := range funcParams {
methodParam := buildMethodParam(fparam, pc) for _, pName := range fparam.Names {
methodParam := buildMethodParam(fparam, pName.Name, pc)
result = append(result, methodParam) result = append(result, methodParam)
} }
}
return result return result
} }
func buildMethodParam(fparam *ast.Field, pc *parsedComment) *param.MethodParam { func buildMethodParam(fparam *ast.Field, name string, pc *parsedComment) *param.MethodParam {
options := []param.MethodParamOption{} options := []param.MethodParamOption{}
name := fparam.Names[0].Name
if cparam, ok := pc.params[name]; ok { if cparam, ok := pc.params[name]; ok {
//Build param from comment info //Build param from comment info
name = cparam.name name = cparam.name
@ -274,6 +275,7 @@ func genRouterCode(pkgRealpath string) {
sort.Strings(sortKey) sort.Strings(sortKey)
for _, k := range sortKey { for _, k := range sortKey {
cList := genInfoList[k] cList := genInfoList[k]
sort.Sort(ControllerCommentsSlice(cList))
for _, c := range cList { for _, c := range cList {
allmethod := "nil" allmethod := "nil"
if len(c.AllowHTTPMethods) > 0 { if len(c.AllowHTTPMethods) > 0 {

View File

@ -17,7 +17,7 @@
// import( // import(
// "github.com/astaxie/beego" // "github.com/astaxie/beego"
// "github.com/astaxie/beego/plugins/authz" // "github.com/astaxie/beego/plugins/authz"
// "github.com/hsluoyz/casbin" // "github.com/casbin/casbin"
// ) // )
// //
// func main(){ // func main(){
@ -42,7 +42,7 @@ package authz
import ( import (
"github.com/astaxie/beego" "github.com/astaxie/beego"
"github.com/astaxie/beego/context" "github.com/astaxie/beego/context"
"github.com/hsluoyz/casbin" "github.com/casbin/casbin"
"net/http" "net/http"
) )

View File

@ -18,7 +18,7 @@ import (
"github.com/astaxie/beego" "github.com/astaxie/beego"
"github.com/astaxie/beego/context" "github.com/astaxie/beego/context"
"github.com/astaxie/beego/plugins/auth" "github.com/astaxie/beego/plugins/auth"
"github.com/hsluoyz/casbin" "github.com/casbin/casbin"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"testing" "testing"

View File

@ -704,7 +704,6 @@ func (p *ControllerRegister) ServeHTTP(rw http.ResponseWriter, r *http.Request)
// User can define RunController and RunMethod in filter // User can define RunController and RunMethod in filter
if context.Input.RunController != nil && context.Input.RunMethod != "" { if context.Input.RunController != nil && context.Input.RunMethod != "" {
findRouter = true findRouter = true
isRunnable = true
runMethod = context.Input.RunMethod runMethod = context.Input.RunMethod
runRouter = context.Input.RunController runRouter = context.Input.RunController
} else { } else {
@ -849,7 +848,15 @@ Admin:
//admin module record QPS //admin module record QPS
if BConfig.Listen.EnableAdmin { if BConfig.Listen.EnableAdmin {
timeDur := time.Since(startTime) timeDur := time.Since(startTime)
if FilterMonitorFunc(r.Method, r.URL.Path, timeDur) { pattern := ""
if routerInfo != nil {
pattern = routerInfo.pattern
}
statusCode := context.ResponseWriter.Status
if statusCode == 0 {
statusCode = 200
}
if FilterMonitorFunc(r.Method, r.URL.Path, timeDur, pattern, statusCode) {
if runRouter != nil { if runRouter != nil {
go toolbox.StatisticsMap.AddStatistics(r.Method, r.URL.Path, runRouter.Name(), timeDur) go toolbox.StatisticsMap.AddStatistics(r.Method, r.URL.Path, runRouter.Name(), timeDur)
} else { } else {

View File

@ -64,6 +64,9 @@ Struct Tag Use:
func main() { func main() {
valid := validation.Validation{} valid := validation.Validation{}
// ignore empty field valid
// see CanSkipFuncs
// valid := validation.Validation{RequiredFirst:true}
u := user{Name: "test", Age: 40} u := user{Name: "test", Age: 40}
b, err := valid.Valid(u) b, err := valid.Valid(u)
if err != nil { if err != nil {

View File

@ -25,6 +25,8 @@ import (
const ( const (
// ValidTag struct tag // ValidTag struct tag
ValidTag = "valid" ValidTag = "valid"
wordsize = 32 << (^uint(0) >> 32 & 1)
) )
var ( var (
@ -43,6 +45,8 @@ var (
"Valid": true, "Valid": true,
"NoMatch": true, "NoMatch": true,
} }
// ErrInt64On32 show 32 bit platform not support int64
ErrInt64On32 = fmt.Errorf("not support int64 on 32-bit platform")
) )
func init() { func init() {
@ -249,16 +253,39 @@ func parseParam(t reflect.Type, s string) (i interface{}, err error) {
switch t.Kind() { switch t.Kind() {
case reflect.Int: case reflect.Int:
i, err = strconv.Atoi(s) i, err = strconv.Atoi(s)
case reflect.Int64:
if wordsize == 32 {
return nil, ErrInt64On32
}
i, err = strconv.ParseInt(s, 10, 64)
case reflect.Int32:
var v int64
v, err = strconv.ParseInt(s, 10, 32)
if err == nil {
i = int32(v)
}
case reflect.Int16:
var v int64
v, err = strconv.ParseInt(s, 10, 16)
if err == nil {
i = int16(v)
}
case reflect.Int8:
var v int64
v, err = strconv.ParseInt(s, 10, 8)
if err == nil {
i = int8(v)
}
case reflect.String: case reflect.String:
i = s i = s
case reflect.Ptr: case reflect.Ptr:
if t.Elem().String() != "regexp.Regexp" { if t.Elem().String() != "regexp.Regexp" {
err = fmt.Errorf("does not support %s", t.Elem().String()) err = fmt.Errorf("not support %s", t.Elem().String())
return return
} }
i, err = regexp.Compile(s) i, err = regexp.Compile(s)
default: default:
err = fmt.Errorf("does not support %s", t.Kind().String()) err = fmt.Errorf("not support %s", t.Kind().String())
} }
return return
} }

View File

@ -106,6 +106,11 @@ func (r *Result) Message(message string, args ...interface{}) *Result {
// A Validation context manages data validation and error messages. // A Validation context manages data validation and error messages.
type Validation struct { type Validation struct {
// if this field set true, in struct tag valid
// if the struct field vale is empty
// it will skip those valid functions, see CanSkipFuncs
RequiredFirst bool
Errors []*Error Errors []*Error
ErrorsMap map[string]*Error ErrorsMap map[string]*Error
} }
@ -324,7 +329,19 @@ func (v *Validation) Valid(obj interface{}) (b bool, err error) {
if vfs, err = getValidFuncs(objT.Field(i)); err != nil { if vfs, err = getValidFuncs(objT.Field(i)); err != nil {
return return
} }
var hasReuired bool
for _, vf := range vfs { for _, vf := range vfs {
if vf.Name == "Required" {
hasReuired = true
}
if !hasReuired && v.RequiredFirst && len(objV.Field(i).String()) == 0 {
if _, ok := CanSkipFuncs[vf.Name]; ok {
continue
}
}
if _, err = funcs.Call(vf.Name, if _, err = funcs.Call(vf.Name,
mergeParam(v, objV.Field(i).Interface(), vf.Params)...); err != nil { mergeParam(v, objV.Field(i).Interface(), vf.Params)...); err != nil {
return return

View File

@ -391,3 +391,54 @@ func TestRecursiveValid(t *testing.T) {
t.Error("validation should not be passed") t.Error("validation should not be passed")
} }
} }
func TestSkipValid(t *testing.T) {
type User struct {
ID int
Email string `valid:"Email"`
ReqEmail string `valid:"Required;Email"`
IP string `valid:"IP"`
ReqIP string `valid:"Required;IP"`
Mobile string `valid:"Mobile"`
ReqMobile string `valid:"Required;Mobile"`
Tel string `valid:"Tel"`
ReqTel string `valid:"Required;Tel"`
Phone string `valid:"Phone"`
ReqPhone string `valid:"Required;Phone"`
ZipCode string `valid:"ZipCode"`
ReqZipCode string `valid:"Required;ZipCode"`
}
u := User{
ReqEmail: "a@a.com",
ReqIP: "127.0.0.1",
ReqMobile: "18888888888",
ReqTel: "02088888888",
ReqPhone: "02088888888",
ReqZipCode: "510000",
}
valid := Validation{}
b, err := valid.Valid(u)
if err != nil {
t.Fatal(err)
}
if b {
t.Fatal("validation should not be passed")
}
valid = Validation{RequiredFirst: true}
b, err = valid.Valid(u)
if err != nil {
t.Fatal(err)
}
if !b {
t.Fatal("validation should be passed")
}
}

View File

@ -23,6 +23,16 @@ import (
"unicode/utf8" "unicode/utf8"
) )
// CanSkipFuncs will skip valid if RequiredFirst is true and the struct field's value is empty
var CanSkipFuncs = map[string]struct{}{
"Email": {},
"IP": {},
"Mobile": {},
"Tel": {},
"Phone": {},
"ZipCode": {},
}
// MessageTmpls store commond validate template // MessageTmpls store commond validate template
var MessageTmpls = map[string]string{ var MessageTmpls = map[string]string{
"Required": "Can not be empty", "Required": "Can not be empty",
@ -166,13 +176,29 @@ type Min struct {
} }
// IsSatisfied judge whether obj is valid // IsSatisfied judge whether obj is valid
// not support int64 on 32-bit platform
func (m Min) IsSatisfied(obj interface{}) bool { func (m Min) IsSatisfied(obj interface{}) bool {
num, ok := obj.(int) var v int
if ok { switch obj.(type) {
return num >= m.Min case int64:
} if wordsize == 32 {
return false return false
} }
v = int(obj.(int64))
case int:
v = obj.(int)
case int32:
v = int(obj.(int32))
case int16:
v = int(obj.(int16))
case int8:
v = int(obj.(int8))
default:
return false
}
return v >= m.Min
}
// DefaultMessage return the default min error message // DefaultMessage return the default min error message
func (m Min) DefaultMessage() string { func (m Min) DefaultMessage() string {
@ -196,13 +222,29 @@ type Max struct {
} }
// IsSatisfied judge whether obj is valid // IsSatisfied judge whether obj is valid
// not support int64 on 32-bit platform
func (m Max) IsSatisfied(obj interface{}) bool { func (m Max) IsSatisfied(obj interface{}) bool {
num, ok := obj.(int) var v int
if ok { switch obj.(type) {
return num <= m.Max case int64:
} if wordsize == 32 {
return false return false
} }
v = int(obj.(int64))
case int:
v = obj.(int)
case int32:
v = int(obj.(int32))
case int16:
v = int(obj.(int16))
case int8:
v = int(obj.(int8))
default:
return false
}
return v <= m.Max
}
// DefaultMessage return the default max error message // DefaultMessage return the default max error message
func (m Max) DefaultMessage() string { func (m Max) DefaultMessage() string {
@ -227,6 +269,7 @@ type Range struct {
} }
// IsSatisfied judge whether obj is valid // IsSatisfied judge whether obj is valid
// not support int64 on 32-bit platform
func (r Range) IsSatisfied(obj interface{}) bool { func (r Range) IsSatisfied(obj interface{}) bool {
return r.Min.IsSatisfied(obj) && r.Max.IsSatisfied(obj) return r.Min.IsSatisfied(obj) && r.Max.IsSatisfied(obj)
} }