1
0
mirror of https://github.com/astaxie/beego.git synced 2024-11-22 00:40:56 +00:00

Format code

This commit is contained in:
Ming Deng 2020-07-22 23:00:06 +08:00
parent 9c51952db4
commit 30eb889a91
37 changed files with 133 additions and 133 deletions

View File

@ -15,11 +15,11 @@
package beego
var (
BuildVersion string
BuildVersion string
BuildGitRevision string
BuildStatus string
BuildTag string
BuildTime string
BuildStatus string
BuildTag string
BuildTime string
GoVersion string

View File

@ -57,7 +57,7 @@ type Cache struct {
maxIdle int
//the timeout to a value less than the redis server's timeout.
timeout time.Duration
timeout time.Duration
}
// NewRedisCache create new redis cache with default collection name.

View File

@ -296,7 +296,7 @@ func (c *ConfigContainer) getData(key string) (interface{}, error) {
case map[string]interface{}:
{
tmpData = v.(map[string]interface{})
if idx == len(keys) - 1 {
if idx == len(keys)-1 {
return tmpData, nil
}
}

View File

@ -16,9 +16,9 @@ package logs
import (
"bytes"
"strings"
"encoding/json"
"fmt"
"strings"
"time"
)

View File

@ -373,21 +373,21 @@ func (w *fileLogWriter) deleteOldLog() {
if info == nil {
return
}
if w.Hourly {
if !info.IsDir() && info.ModTime().Add(1 * time.Hour * time.Duration(w.MaxHours)).Before(time.Now()) {
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
strings.HasSuffix(filepath.Base(path), w.suffix) {
os.Remove(path)
}
}
} else if w.Daily {
if !info.IsDir() && info.ModTime().Add(24 * time.Hour * time.Duration(w.MaxDays)).Before(time.Now()) {
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
strings.HasSuffix(filepath.Base(path), w.suffix) {
os.Remove(path)
}
}
}
if w.Hourly {
if !info.IsDir() && info.ModTime().Add(1*time.Hour*time.Duration(w.MaxHours)).Before(time.Now()) {
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
strings.HasSuffix(filepath.Base(path), w.suffix) {
os.Remove(path)
}
}
} else if w.Daily {
if !info.IsDir() && info.ModTime().Add(24*time.Hour*time.Duration(w.MaxDays)).Before(time.Now()) {
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
strings.HasSuffix(filepath.Base(path), w.suffix) {
os.Remove(path)
}
}
}
return
})
}

View File

@ -186,7 +186,7 @@ func TestFileDailyRotate_06(t *testing.T) { //test file mode
func TestFileHourlyRotate_01(t *testing.T) {
log := NewLogger(10000)
log.SetLogger("file", `{"filename":"test3.log","hourly":true,"maxlines":4}`)
log.SetLogger("file", `{"filename":"test3.log","hourly":true,"maxlines":4}`)
log.Debug("debug")
log.Info("info")
log.Notice("notice")
@ -237,7 +237,7 @@ func TestFileHourlyRotate_05(t *testing.T) {
func TestFileHourlyRotate_06(t *testing.T) { //test file mode
log := NewLogger(10000)
log.SetLogger("file", `{"filename":"test3.log", "hourly":true, "maxlines":4}`)
log.SetLogger("file", `{"filename":"test3.log", "hourly":true, "maxlines":4}`)
log.Debug("debug")
log.Info("info")
log.Notice("notice")
@ -269,19 +269,19 @@ func testFileRotate(t *testing.T, fn1, fn2 string, daily, hourly bool) {
RotatePerm: "0440",
}
if daily {
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
fw.dailyOpenDate = fw.dailyOpenTime.Day()
}
if daily {
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
fw.dailyOpenDate = fw.dailyOpenTime.Day()
}
if hourly {
fw.Init(fmt.Sprintf(`{"filename":"%v","maxhours":1}`, fn1))
fw.hourlyOpenTime = time.Now().Add(-1 * time.Hour)
fw.hourlyOpenDate = fw.hourlyOpenTime.Day()
}
if hourly {
fw.Init(fmt.Sprintf(`{"filename":"%v","maxhours":1}`, fn1))
fw.hourlyOpenTime = time.Now().Add(-1 * time.Hour)
fw.hourlyOpenDate = fw.hourlyOpenTime.Day()
}
fw.WriteMsg(time.Now(), "this is a msg for test", LevelDebug)
fw.WriteMsg(time.Now(), "this is a msg for test", LevelDebug)
for _, file := range []string{fn1, fn2} {
_, err := os.Stat(file)
@ -328,8 +328,8 @@ func testFileDailyRotate(t *testing.T, fn1, fn2 string) {
func testFileHourlyRotate(t *testing.T, fn1, fn2 string) {
fw := &fileLogWriter{
Hourly: true,
MaxHours: 168,
Hourly: true,
MaxHours: 168,
Rotate: true,
Level: LevelTrace,
Perm: "0660",

View File

@ -57,15 +57,15 @@ func registerBuildInfo() {
Subsystem: "build_info",
Help: "The building information",
ConstLabels: map[string]string{
"appname": beego.BConfig.AppName,
"appname": beego.BConfig.AppName,
"build_version": beego.BuildVersion,
"build_revision": beego.BuildGitRevision,
"build_status": beego.BuildStatus,
"build_tag": beego.BuildTag,
"build_time": strings.Replace(beego.BuildTime, "--", " ", 1),
"build_time": strings.Replace(beego.BuildTime, "--", " ", 1),
"go_version": beego.GoVersion,
"git_branch": beego.GitBranch,
"start_time": time.Now().Format("2006-01-02 15:04:05"),
"start_time": time.Now().Format("2006-01-02 15:04:05"),
},
}, []string{})

View File

@ -197,9 +197,9 @@ func getDbCreateSQL(al *alias) (sqls []string, tableIndexes map[string][]dbIndex
if strings.Contains(column, "%COL%") {
column = strings.Replace(column, "%COL%", fi.column, -1)
}
if fi.description != "" && al.Driver!=DRSqlite {
column += " " + fmt.Sprintf("COMMENT '%s'",fi.description)
if fi.description != "" && al.Driver != DRSqlite {
column += " " + fmt.Sprintf("COMMENT '%s'", fi.description)
}
columns = append(columns, column)

View File

@ -424,7 +424,7 @@ func GetDB(aliasNames ...string) (*sql.DB, error) {
}
type stmtDecorator struct {
wg sync.WaitGroup
wg sync.WaitGroup
stmt *sql.Stmt
}

View File

@ -61,7 +61,7 @@ func debugLogQueies(alias *alias, operaton, query string, t time.Time, err error
con += " - " + err.Error()
}
logMap["sql"] = fmt.Sprintf("%s-`%s`", query, strings.Join(cons, "`, `"))
if LogFunc != nil{
if LogFunc != nil {
LogFunc(logMap)
}
DebugLog.Println(con)

View File

@ -15,11 +15,11 @@
package beego
var (
BuildVersion string
BuildVersion string
BuildGitRevision string
BuildStatus string
BuildTag string
BuildTime string
BuildStatus string
BuildTag string
BuildTime string
GoVersion string

View File

@ -57,7 +57,7 @@ type Cache struct {
maxIdle int
//the timeout to a value less than the redis server's timeout.
timeout time.Duration
timeout time.Duration
}
// NewRedisCache create new redis cache with default collection name.

View File

@ -23,7 +23,7 @@ import (
func TestKVs(t *testing.T) {
key := "my-key"
kvs := NewKVs(KV{
Key: key,
Key: key,
Value: 12,
})

View File

@ -296,7 +296,7 @@ func (c *ConfigContainer) getData(key string) (interface{}, error) {
case map[string]interface{}:
{
tmpData = v.(map[string]interface{})
if idx == len(keys) - 1 {
if idx == len(keys)-1 {
return tmpData, nil
}
}

View File

@ -16,9 +16,9 @@ package logs
import (
"bytes"
"strings"
"encoding/json"
"fmt"
"strings"
"time"
)

View File

@ -373,21 +373,21 @@ func (w *fileLogWriter) deleteOldLog() {
if info == nil {
return
}
if w.Hourly {
if !info.IsDir() && info.ModTime().Add(1 * time.Hour * time.Duration(w.MaxHours)).Before(time.Now()) {
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
strings.HasSuffix(filepath.Base(path), w.suffix) {
os.Remove(path)
}
}
} else if w.Daily {
if !info.IsDir() && info.ModTime().Add(24 * time.Hour * time.Duration(w.MaxDays)).Before(time.Now()) {
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
strings.HasSuffix(filepath.Base(path), w.suffix) {
os.Remove(path)
}
}
}
if w.Hourly {
if !info.IsDir() && info.ModTime().Add(1*time.Hour*time.Duration(w.MaxHours)).Before(time.Now()) {
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
strings.HasSuffix(filepath.Base(path), w.suffix) {
os.Remove(path)
}
}
} else if w.Daily {
if !info.IsDir() && info.ModTime().Add(24*time.Hour*time.Duration(w.MaxDays)).Before(time.Now()) {
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
strings.HasSuffix(filepath.Base(path), w.suffix) {
os.Remove(path)
}
}
}
return
})
}

View File

@ -186,7 +186,7 @@ func TestFileDailyRotate_06(t *testing.T) { //test file mode
func TestFileHourlyRotate_01(t *testing.T) {
log := NewLogger(10000)
log.SetLogger("file", `{"filename":"test3.log","hourly":true,"maxlines":4}`)
log.SetLogger("file", `{"filename":"test3.log","hourly":true,"maxlines":4}`)
log.Debug("debug")
log.Info("info")
log.Notice("notice")
@ -237,7 +237,7 @@ func TestFileHourlyRotate_05(t *testing.T) {
func TestFileHourlyRotate_06(t *testing.T) { //test file mode
log := NewLogger(10000)
log.SetLogger("file", `{"filename":"test3.log", "hourly":true, "maxlines":4}`)
log.SetLogger("file", `{"filename":"test3.log", "hourly":true, "maxlines":4}`)
log.Debug("debug")
log.Info("info")
log.Notice("notice")
@ -269,19 +269,19 @@ func testFileRotate(t *testing.T, fn1, fn2 string, daily, hourly bool) {
RotatePerm: "0440",
}
if daily {
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
fw.dailyOpenDate = fw.dailyOpenTime.Day()
}
if daily {
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
fw.dailyOpenDate = fw.dailyOpenTime.Day()
}
if hourly {
fw.Init(fmt.Sprintf(`{"filename":"%v","maxhours":1}`, fn1))
fw.hourlyOpenTime = time.Now().Add(-1 * time.Hour)
fw.hourlyOpenDate = fw.hourlyOpenTime.Day()
}
if hourly {
fw.Init(fmt.Sprintf(`{"filename":"%v","maxhours":1}`, fn1))
fw.hourlyOpenTime = time.Now().Add(-1 * time.Hour)
fw.hourlyOpenDate = fw.hourlyOpenTime.Day()
}
fw.WriteMsg(time.Now(), "this is a msg for test", LevelDebug)
fw.WriteMsg(time.Now(), "this is a msg for test", LevelDebug)
for _, file := range []string{fn1, fn2} {
_, err := os.Stat(file)
@ -328,8 +328,8 @@ func testFileDailyRotate(t *testing.T, fn1, fn2 string) {
func testFileHourlyRotate(t *testing.T, fn1, fn2 string) {
fw := &fileLogWriter{
Hourly: true,
MaxHours: 168,
Hourly: true,
MaxHours: 168,
Rotate: true,
Level: LevelTrace,
Perm: "0660",

View File

@ -57,15 +57,15 @@ func registerBuildInfo() {
Subsystem: "build_info",
Help: "The building information",
ConstLabels: map[string]string{
"appname": beego.BConfig.AppName,
"appname": beego.BConfig.AppName,
"build_version": beego.BuildVersion,
"build_revision": beego.BuildGitRevision,
"build_status": beego.BuildStatus,
"build_tag": beego.BuildTag,
"build_time": strings.Replace(beego.BuildTime, "--", " ", 1),
"build_time": strings.Replace(beego.BuildTime, "--", " ", 1),
"go_version": beego.GoVersion,
"git_branch": beego.GitBranch,
"start_time": time.Now().Format("2006-01-02 15:04:05"),
"start_time": time.Now().Format("2006-01-02 15:04:05"),
},
}, []string{})

View File

@ -197,9 +197,9 @@ func getDbCreateSQL(al *alias) (sqls []string, tableIndexes map[string][]dbIndex
if strings.Contains(column, "%COL%") {
column = strings.Replace(column, "%COL%", fi.column, -1)
}
if fi.description != "" && al.Driver!=DRSqlite {
column += " " + fmt.Sprintf("COMMENT '%s'",fi.description)
if fi.description != "" && al.Driver != DRSqlite {
column += " " + fmt.Sprintf("COMMENT '%s'", fi.description)
}
columns = append(columns, column)

View File

@ -244,7 +244,7 @@ var _ dbQuerier = new(TxDB)
var _ txEnder = new(TxDB)
func (t *TxDB) Prepare(query string) (*sql.Stmt, error) {
return t.PrepareContext(context.Background(),query)
return t.PrepareContext(context.Background(), query)
}
func (t *TxDB) PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) {
@ -260,7 +260,7 @@ func (t *TxDB) ExecContext(ctx context.Context, query string, args ...interface{
}
func (t *TxDB) Query(query string, args ...interface{}) (*sql.Rows, error) {
return t.QueryContext(context.Background(),query,args...)
return t.QueryContext(context.Background(), query, args...)
}
func (t *TxDB) QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) {
@ -268,7 +268,7 @@ func (t *TxDB) QueryContext(ctx context.Context, query string, args ...interface
}
func (t *TxDB) QueryRow(query string, args ...interface{}) *sql.Row {
return t.QueryRowContext(context.Background(),query,args...)
return t.QueryRowContext(context.Background(), query, args...)
}
func (t *TxDB) QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row {

View File

@ -490,11 +490,11 @@ func init() {
}
err := RegisterDataBase("default", DBARGS.Driver, DBARGS.Source, common.KV{
Key:MaxIdleConnsKey,
Value:20,
Key: MaxIdleConnsKey,
Value: 20,
})
if err != nil{
if err != nil {
panic(fmt.Sprintf("can not register database: %v", err))
}

View File

@ -61,7 +61,7 @@ func debugLogQueies(alias *alias, operaton, query string, t time.Time, err error
con += " - " + err.Error()
}
logMap["sql"] = fmt.Sprintf("%s-`%s`", query, strings.Join(cons, "`, `"))
if LogFunc != nil{
if LogFunc != nil {
LogFunc(logMap)
}
DebugLog.Println(con)

View File

@ -110,7 +110,7 @@ type DQL interface {
// Like Read(), but with "FOR UPDATE" clause, useful in transaction.
// Some databases are not support this feature.
ReadForUpdate( md interface{}, cols ...string) error
ReadForUpdate(md interface{}, cols ...string) error
ReadForUpdateWithCtx(ctx context.Context, md interface{}, cols ...string) error
// Try to read a row from the database, or insert one if it doesn't exist
@ -129,14 +129,14 @@ type DQL interface {
// args[2] int offset default offset 0
// args[3] string order for example : "-Id"
// make sure the relation is defined in model struct tags.
LoadRelated( md interface{}, name string, args ...interface{}) (int64, error)
LoadRelated(md interface{}, name string, args ...interface{}) (int64, error)
LoadRelatedWithCtx(ctx context.Context, md interface{}, name string, args ...interface{}) (int64, error)
// create a models to models queryer
// for example:
// post := Post{Id: 4}
// m2m := Ormer.QueryM2M(&post, "Tags")
QueryM2M( md interface{}, name string) QueryM2Mer
QueryM2M(md interface{}, name string) QueryM2Mer
QueryM2MWithCtx(ctx context.Context, md interface{}, name string) QueryM2Mer
// return a QuerySeter for table operations.

View File

@ -31,13 +31,14 @@
//
// more docs: http://beego.me/docs/module/session.md
package redis_cluster
import (
"github.com/astaxie/beego/session"
rediss "github.com/go-redis/redis"
"net/http"
"strconv"
"strings"
"sync"
"github.com/astaxie/beego/session"
rediss "github.com/go-redis/redis"
"time"
)
@ -101,7 +102,7 @@ func (rs *SessionStore) SessionRelease(w http.ResponseWriter) {
return
}
c := rs.p
c.Set(rs.sid, string(b), time.Duration(rs.maxlifetime) * time.Second)
c.Set(rs.sid, string(b), time.Duration(rs.maxlifetime)*time.Second)
}
// Provider redis_cluster session provider
@ -146,10 +147,10 @@ func (rp *Provider) SessionInit(maxlifetime int64, savePath string) error {
} else {
rp.dbNum = 0
}
rp.poollist = rediss.NewClusterClient(&rediss.ClusterOptions{
Addrs: strings.Split(rp.savePath, ";"),
Password: rp.password,
Password: rp.password,
PoolSize: rp.poolsize,
})
return rp.poollist.Ping().Err()
@ -186,15 +187,15 @@ func (rp *Provider) SessionExist(sid string) bool {
// SessionRegenerate generate new sid for redis_cluster session
func (rp *Provider) SessionRegenerate(oldsid, sid string) (session.Store, error) {
c := rp.poollist
if existed, err := c.Exists(oldsid).Result(); err != nil || existed == 0 {
// oldsid doesn't exists, set the new sid directly
// ignore error here, since if it return error
// the existed value will be 0
c.Set(sid, "", time.Duration(rp.maxlifetime) * time.Second)
c.Set(sid, "", time.Duration(rp.maxlifetime)*time.Second)
} else {
c.Rename(oldsid, sid)
c.Expire(sid, time.Duration(rp.maxlifetime) * time.Second)
c.Expire(sid, time.Duration(rp.maxlifetime)*time.Second)
}
return rp.SessionRead(sid)
}

View File

@ -369,8 +369,7 @@ func TestFileSessionStore_SessionRelease(t *testing.T) {
t.Error(err)
}
s.Set(i,i)
s.Set(i, i)
s.SessionRelease(nil)
}
@ -384,4 +383,4 @@ func TestFileSessionStore_SessionRelease(t *testing.T) {
t.Error()
}
}
}
}

View File

@ -202,7 +202,7 @@ func searchFile(ctx *context.Context) (string, os.FileInfo, error) {
if !strings.Contains(requestPath, prefix) {
continue
}
if prefix != "/" && len(requestPath) > len(prefix) && requestPath[len(prefix)] != '/' {
if prefix != "/" && len(requestPath) > len(prefix) && requestPath[len(prefix)] != '/' {
continue
}
filePath := path.Join(staticDir, requestPath[len(prefix):])

View File

@ -362,7 +362,7 @@ func parseFormToStruct(form url.Values, objT reflect.Type, objV reflect.Value) e
value = value[:25]
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
} else if strings.HasSuffix(strings.ToUpper(value), "Z") {
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
} else if len(value) >= 19 {
if strings.Contains(value, "T") {
value = value[:19]

View File

@ -113,7 +113,7 @@ type Task struct {
Next time.Time
Errlist []*taskerr // like errtime:errinfo
ErrLimit int // max length for the errlist, 0 stand for no limit
errCnt int // records the error count during the execution
errCnt int // records the error count during the execution
}
// NewTask add new task with name, time and func

View File

@ -59,12 +59,12 @@ func TestSpec(t *testing.T) {
func TestTask_Run(t *testing.T) {
cnt := -1
task := func() error {
cnt ++
cnt++
fmt.Printf("Hello, world! %d \n", cnt)
return errors.New(fmt.Sprintf("Hello, world! %d", cnt))
}
tk := NewTask("taska", "0/30 * * * * *", task)
for i := 0; i < 200 ; i ++ {
for i := 0; i < 200; i++ {
e := tk.Run()
assert.NotNil(t, e)
}

View File

@ -213,7 +213,7 @@ func parseFunc(vfunc, key string, label string) (v ValidFunc, err error) {
return
}
tParams, err := trim(name, key+"."+ name + "." + label, params)
tParams, err := trim(name, key+"."+name+"."+label, params)
if err != nil {
return
}

View File

@ -31,13 +31,14 @@
//
// more docs: http://beego.me/docs/module/session.md
package redis_cluster
import (
"github.com/astaxie/beego/session"
rediss "github.com/go-redis/redis"
"net/http"
"strconv"
"strings"
"sync"
"github.com/astaxie/beego/session"
rediss "github.com/go-redis/redis"
"time"
)
@ -101,7 +102,7 @@ func (rs *SessionStore) SessionRelease(w http.ResponseWriter) {
return
}
c := rs.p
c.Set(rs.sid, string(b), time.Duration(rs.maxlifetime) * time.Second)
c.Set(rs.sid, string(b), time.Duration(rs.maxlifetime)*time.Second)
}
// Provider redis_cluster session provider
@ -146,10 +147,10 @@ func (rp *Provider) SessionInit(maxlifetime int64, savePath string) error {
} else {
rp.dbNum = 0
}
rp.poollist = rediss.NewClusterClient(&rediss.ClusterOptions{
Addrs: strings.Split(rp.savePath, ";"),
Password: rp.password,
Password: rp.password,
PoolSize: rp.poolsize,
})
return rp.poollist.Ping().Err()
@ -186,15 +187,15 @@ func (rp *Provider) SessionExist(sid string) bool {
// SessionRegenerate generate new sid for redis_cluster session
func (rp *Provider) SessionRegenerate(oldsid, sid string) (session.Store, error) {
c := rp.poollist
if existed, err := c.Exists(oldsid).Result(); err != nil || existed == 0 {
// oldsid doesn't exists, set the new sid directly
// ignore error here, since if it return error
// the existed value will be 0
c.Set(sid, "", time.Duration(rp.maxlifetime) * time.Second)
c.Set(sid, "", time.Duration(rp.maxlifetime)*time.Second)
} else {
c.Rename(oldsid, sid)
c.Expire(sid, time.Duration(rp.maxlifetime) * time.Second)
c.Expire(sid, time.Duration(rp.maxlifetime)*time.Second)
}
return rp.SessionRead(sid)
}

View File

@ -369,8 +369,7 @@ func TestFileSessionStore_SessionRelease(t *testing.T) {
t.Error(err)
}
s.Set(i,i)
s.Set(i, i)
s.SessionRelease(nil)
}
@ -384,4 +383,4 @@ func TestFileSessionStore_SessionRelease(t *testing.T) {
t.Error()
}
}
}
}

View File

@ -202,7 +202,7 @@ func searchFile(ctx *context.Context) (string, os.FileInfo, error) {
if !strings.Contains(requestPath, prefix) {
continue
}
if prefix != "/" && len(requestPath) > len(prefix) && requestPath[len(prefix)] != '/' {
if prefix != "/" && len(requestPath) > len(prefix) && requestPath[len(prefix)] != '/' {
continue
}
filePath := path.Join(staticDir, requestPath[len(prefix):])

View File

@ -362,7 +362,7 @@ func parseFormToStruct(form url.Values, objT reflect.Type, objV reflect.Value) e
value = value[:25]
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
} else if strings.HasSuffix(strings.ToUpper(value), "Z") {
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
} else if len(value) >= 19 {
if strings.Contains(value, "T") {
value = value[:19]

View File

@ -113,7 +113,7 @@ type Task struct {
Next time.Time
Errlist []*taskerr // like errtime:errinfo
ErrLimit int // max length for the errlist, 0 stand for no limit
errCnt int // records the error count during the execution
errCnt int // records the error count during the execution
}
// NewTask add new task with name, time and func

View File

@ -59,12 +59,12 @@ func TestSpec(t *testing.T) {
func TestTask_Run(t *testing.T) {
cnt := -1
task := func() error {
cnt ++
cnt++
fmt.Printf("Hello, world! %d \n", cnt)
return errors.New(fmt.Sprintf("Hello, world! %d", cnt))
}
tk := NewTask("taska", "0/30 * * * * *", task)
for i := 0; i < 200 ; i ++ {
for i := 0; i < 200; i++ {
e := tk.Run()
assert.NotNil(t, e)
}

View File

@ -213,7 +213,7 @@ func parseFunc(vfunc, key string, label string) (v ValidFunc, err error) {
return
}
tParams, err := trim(name, key+"."+ name + "." + label, params)
tParams, err := trim(name, key+"."+name+"."+label, params)
if err != nil {
return
}