mirror of
https://github.com/astaxie/beego.git
synced 2024-11-21 20:20:55 +00:00
Format code
This commit is contained in:
parent
9c51952db4
commit
30eb889a91
@ -15,11 +15,11 @@
|
||||
package beego
|
||||
|
||||
var (
|
||||
BuildVersion string
|
||||
BuildVersion string
|
||||
BuildGitRevision string
|
||||
BuildStatus string
|
||||
BuildTag string
|
||||
BuildTime string
|
||||
BuildStatus string
|
||||
BuildTag string
|
||||
BuildTime string
|
||||
|
||||
GoVersion string
|
||||
|
||||
|
2
cache/redis/redis.go
vendored
2
cache/redis/redis.go
vendored
@ -57,7 +57,7 @@ type Cache struct {
|
||||
maxIdle int
|
||||
|
||||
//the timeout to a value less than the redis server's timeout.
|
||||
timeout time.Duration
|
||||
timeout time.Duration
|
||||
}
|
||||
|
||||
// NewRedisCache create new redis cache with default collection name.
|
||||
|
@ -296,7 +296,7 @@ func (c *ConfigContainer) getData(key string) (interface{}, error) {
|
||||
case map[string]interface{}:
|
||||
{
|
||||
tmpData = v.(map[string]interface{})
|
||||
if idx == len(keys) - 1 {
|
||||
if idx == len(keys)-1 {
|
||||
return tmpData, nil
|
||||
}
|
||||
}
|
||||
|
@ -16,9 +16,9 @@ package logs
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
|
30
logs/file.go
30
logs/file.go
@ -373,21 +373,21 @@ func (w *fileLogWriter) deleteOldLog() {
|
||||
if info == nil {
|
||||
return
|
||||
}
|
||||
if w.Hourly {
|
||||
if !info.IsDir() && info.ModTime().Add(1 * time.Hour * time.Duration(w.MaxHours)).Before(time.Now()) {
|
||||
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
|
||||
strings.HasSuffix(filepath.Base(path), w.suffix) {
|
||||
os.Remove(path)
|
||||
}
|
||||
}
|
||||
} else if w.Daily {
|
||||
if !info.IsDir() && info.ModTime().Add(24 * time.Hour * time.Duration(w.MaxDays)).Before(time.Now()) {
|
||||
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
|
||||
strings.HasSuffix(filepath.Base(path), w.suffix) {
|
||||
os.Remove(path)
|
||||
}
|
||||
}
|
||||
}
|
||||
if w.Hourly {
|
||||
if !info.IsDir() && info.ModTime().Add(1*time.Hour*time.Duration(w.MaxHours)).Before(time.Now()) {
|
||||
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
|
||||
strings.HasSuffix(filepath.Base(path), w.suffix) {
|
||||
os.Remove(path)
|
||||
}
|
||||
}
|
||||
} else if w.Daily {
|
||||
if !info.IsDir() && info.ModTime().Add(24*time.Hour*time.Duration(w.MaxDays)).Before(time.Now()) {
|
||||
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
|
||||
strings.HasSuffix(filepath.Base(path), w.suffix) {
|
||||
os.Remove(path)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
})
|
||||
}
|
||||
|
@ -186,7 +186,7 @@ func TestFileDailyRotate_06(t *testing.T) { //test file mode
|
||||
|
||||
func TestFileHourlyRotate_01(t *testing.T) {
|
||||
log := NewLogger(10000)
|
||||
log.SetLogger("file", `{"filename":"test3.log","hourly":true,"maxlines":4}`)
|
||||
log.SetLogger("file", `{"filename":"test3.log","hourly":true,"maxlines":4}`)
|
||||
log.Debug("debug")
|
||||
log.Info("info")
|
||||
log.Notice("notice")
|
||||
@ -237,7 +237,7 @@ func TestFileHourlyRotate_05(t *testing.T) {
|
||||
|
||||
func TestFileHourlyRotate_06(t *testing.T) { //test file mode
|
||||
log := NewLogger(10000)
|
||||
log.SetLogger("file", `{"filename":"test3.log", "hourly":true, "maxlines":4}`)
|
||||
log.SetLogger("file", `{"filename":"test3.log", "hourly":true, "maxlines":4}`)
|
||||
log.Debug("debug")
|
||||
log.Info("info")
|
||||
log.Notice("notice")
|
||||
@ -269,19 +269,19 @@ func testFileRotate(t *testing.T, fn1, fn2 string, daily, hourly bool) {
|
||||
RotatePerm: "0440",
|
||||
}
|
||||
|
||||
if daily {
|
||||
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
|
||||
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
|
||||
fw.dailyOpenDate = fw.dailyOpenTime.Day()
|
||||
}
|
||||
if daily {
|
||||
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
|
||||
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
|
||||
fw.dailyOpenDate = fw.dailyOpenTime.Day()
|
||||
}
|
||||
|
||||
if hourly {
|
||||
fw.Init(fmt.Sprintf(`{"filename":"%v","maxhours":1}`, fn1))
|
||||
fw.hourlyOpenTime = time.Now().Add(-1 * time.Hour)
|
||||
fw.hourlyOpenDate = fw.hourlyOpenTime.Day()
|
||||
}
|
||||
if hourly {
|
||||
fw.Init(fmt.Sprintf(`{"filename":"%v","maxhours":1}`, fn1))
|
||||
fw.hourlyOpenTime = time.Now().Add(-1 * time.Hour)
|
||||
fw.hourlyOpenDate = fw.hourlyOpenTime.Day()
|
||||
}
|
||||
|
||||
fw.WriteMsg(time.Now(), "this is a msg for test", LevelDebug)
|
||||
fw.WriteMsg(time.Now(), "this is a msg for test", LevelDebug)
|
||||
|
||||
for _, file := range []string{fn1, fn2} {
|
||||
_, err := os.Stat(file)
|
||||
@ -328,8 +328,8 @@ func testFileDailyRotate(t *testing.T, fn1, fn2 string) {
|
||||
|
||||
func testFileHourlyRotate(t *testing.T, fn1, fn2 string) {
|
||||
fw := &fileLogWriter{
|
||||
Hourly: true,
|
||||
MaxHours: 168,
|
||||
Hourly: true,
|
||||
MaxHours: 168,
|
||||
Rotate: true,
|
||||
Level: LevelTrace,
|
||||
Perm: "0660",
|
||||
|
@ -57,15 +57,15 @@ func registerBuildInfo() {
|
||||
Subsystem: "build_info",
|
||||
Help: "The building information",
|
||||
ConstLabels: map[string]string{
|
||||
"appname": beego.BConfig.AppName,
|
||||
"appname": beego.BConfig.AppName,
|
||||
"build_version": beego.BuildVersion,
|
||||
"build_revision": beego.BuildGitRevision,
|
||||
"build_status": beego.BuildStatus,
|
||||
"build_tag": beego.BuildTag,
|
||||
"build_time": strings.Replace(beego.BuildTime, "--", " ", 1),
|
||||
"build_time": strings.Replace(beego.BuildTime, "--", " ", 1),
|
||||
"go_version": beego.GoVersion,
|
||||
"git_branch": beego.GitBranch,
|
||||
"start_time": time.Now().Format("2006-01-02 15:04:05"),
|
||||
"start_time": time.Now().Format("2006-01-02 15:04:05"),
|
||||
},
|
||||
}, []string{})
|
||||
|
||||
|
@ -197,9 +197,9 @@ func getDbCreateSQL(al *alias) (sqls []string, tableIndexes map[string][]dbIndex
|
||||
if strings.Contains(column, "%COL%") {
|
||||
column = strings.Replace(column, "%COL%", fi.column, -1)
|
||||
}
|
||||
|
||||
if fi.description != "" && al.Driver!=DRSqlite {
|
||||
column += " " + fmt.Sprintf("COMMENT '%s'",fi.description)
|
||||
|
||||
if fi.description != "" && al.Driver != DRSqlite {
|
||||
column += " " + fmt.Sprintf("COMMENT '%s'", fi.description)
|
||||
}
|
||||
|
||||
columns = append(columns, column)
|
||||
|
@ -424,7 +424,7 @@ func GetDB(aliasNames ...string) (*sql.DB, error) {
|
||||
}
|
||||
|
||||
type stmtDecorator struct {
|
||||
wg sync.WaitGroup
|
||||
wg sync.WaitGroup
|
||||
stmt *sql.Stmt
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ func debugLogQueies(alias *alias, operaton, query string, t time.Time, err error
|
||||
con += " - " + err.Error()
|
||||
}
|
||||
logMap["sql"] = fmt.Sprintf("%s-`%s`", query, strings.Join(cons, "`, `"))
|
||||
if LogFunc != nil{
|
||||
if LogFunc != nil {
|
||||
LogFunc(logMap)
|
||||
}
|
||||
DebugLog.Println(con)
|
||||
|
@ -15,11 +15,11 @@
|
||||
package beego
|
||||
|
||||
var (
|
||||
BuildVersion string
|
||||
BuildVersion string
|
||||
BuildGitRevision string
|
||||
BuildStatus string
|
||||
BuildTag string
|
||||
BuildTime string
|
||||
BuildStatus string
|
||||
BuildTag string
|
||||
BuildTime string
|
||||
|
||||
GoVersion string
|
||||
|
||||
|
2
pkg/cache/redis/redis.go
vendored
2
pkg/cache/redis/redis.go
vendored
@ -57,7 +57,7 @@ type Cache struct {
|
||||
maxIdle int
|
||||
|
||||
//the timeout to a value less than the redis server's timeout.
|
||||
timeout time.Duration
|
||||
timeout time.Duration
|
||||
}
|
||||
|
||||
// NewRedisCache create new redis cache with default collection name.
|
||||
|
@ -23,7 +23,7 @@ import (
|
||||
func TestKVs(t *testing.T) {
|
||||
key := "my-key"
|
||||
kvs := NewKVs(KV{
|
||||
Key: key,
|
||||
Key: key,
|
||||
Value: 12,
|
||||
})
|
||||
|
||||
|
@ -296,7 +296,7 @@ func (c *ConfigContainer) getData(key string) (interface{}, error) {
|
||||
case map[string]interface{}:
|
||||
{
|
||||
tmpData = v.(map[string]interface{})
|
||||
if idx == len(keys) - 1 {
|
||||
if idx == len(keys)-1 {
|
||||
return tmpData, nil
|
||||
}
|
||||
}
|
||||
|
@ -16,9 +16,9 @@ package logs
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
|
@ -373,21 +373,21 @@ func (w *fileLogWriter) deleteOldLog() {
|
||||
if info == nil {
|
||||
return
|
||||
}
|
||||
if w.Hourly {
|
||||
if !info.IsDir() && info.ModTime().Add(1 * time.Hour * time.Duration(w.MaxHours)).Before(time.Now()) {
|
||||
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
|
||||
strings.HasSuffix(filepath.Base(path), w.suffix) {
|
||||
os.Remove(path)
|
||||
}
|
||||
}
|
||||
} else if w.Daily {
|
||||
if !info.IsDir() && info.ModTime().Add(24 * time.Hour * time.Duration(w.MaxDays)).Before(time.Now()) {
|
||||
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
|
||||
strings.HasSuffix(filepath.Base(path), w.suffix) {
|
||||
os.Remove(path)
|
||||
}
|
||||
}
|
||||
}
|
||||
if w.Hourly {
|
||||
if !info.IsDir() && info.ModTime().Add(1*time.Hour*time.Duration(w.MaxHours)).Before(time.Now()) {
|
||||
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
|
||||
strings.HasSuffix(filepath.Base(path), w.suffix) {
|
||||
os.Remove(path)
|
||||
}
|
||||
}
|
||||
} else if w.Daily {
|
||||
if !info.IsDir() && info.ModTime().Add(24*time.Hour*time.Duration(w.MaxDays)).Before(time.Now()) {
|
||||
if strings.HasPrefix(filepath.Base(path), filepath.Base(w.fileNameOnly)) &&
|
||||
strings.HasSuffix(filepath.Base(path), w.suffix) {
|
||||
os.Remove(path)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
})
|
||||
}
|
||||
|
@ -186,7 +186,7 @@ func TestFileDailyRotate_06(t *testing.T) { //test file mode
|
||||
|
||||
func TestFileHourlyRotate_01(t *testing.T) {
|
||||
log := NewLogger(10000)
|
||||
log.SetLogger("file", `{"filename":"test3.log","hourly":true,"maxlines":4}`)
|
||||
log.SetLogger("file", `{"filename":"test3.log","hourly":true,"maxlines":4}`)
|
||||
log.Debug("debug")
|
||||
log.Info("info")
|
||||
log.Notice("notice")
|
||||
@ -237,7 +237,7 @@ func TestFileHourlyRotate_05(t *testing.T) {
|
||||
|
||||
func TestFileHourlyRotate_06(t *testing.T) { //test file mode
|
||||
log := NewLogger(10000)
|
||||
log.SetLogger("file", `{"filename":"test3.log", "hourly":true, "maxlines":4}`)
|
||||
log.SetLogger("file", `{"filename":"test3.log", "hourly":true, "maxlines":4}`)
|
||||
log.Debug("debug")
|
||||
log.Info("info")
|
||||
log.Notice("notice")
|
||||
@ -269,19 +269,19 @@ func testFileRotate(t *testing.T, fn1, fn2 string, daily, hourly bool) {
|
||||
RotatePerm: "0440",
|
||||
}
|
||||
|
||||
if daily {
|
||||
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
|
||||
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
|
||||
fw.dailyOpenDate = fw.dailyOpenTime.Day()
|
||||
}
|
||||
if daily {
|
||||
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
|
||||
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
|
||||
fw.dailyOpenDate = fw.dailyOpenTime.Day()
|
||||
}
|
||||
|
||||
if hourly {
|
||||
fw.Init(fmt.Sprintf(`{"filename":"%v","maxhours":1}`, fn1))
|
||||
fw.hourlyOpenTime = time.Now().Add(-1 * time.Hour)
|
||||
fw.hourlyOpenDate = fw.hourlyOpenTime.Day()
|
||||
}
|
||||
if hourly {
|
||||
fw.Init(fmt.Sprintf(`{"filename":"%v","maxhours":1}`, fn1))
|
||||
fw.hourlyOpenTime = time.Now().Add(-1 * time.Hour)
|
||||
fw.hourlyOpenDate = fw.hourlyOpenTime.Day()
|
||||
}
|
||||
|
||||
fw.WriteMsg(time.Now(), "this is a msg for test", LevelDebug)
|
||||
fw.WriteMsg(time.Now(), "this is a msg for test", LevelDebug)
|
||||
|
||||
for _, file := range []string{fn1, fn2} {
|
||||
_, err := os.Stat(file)
|
||||
@ -328,8 +328,8 @@ func testFileDailyRotate(t *testing.T, fn1, fn2 string) {
|
||||
|
||||
func testFileHourlyRotate(t *testing.T, fn1, fn2 string) {
|
||||
fw := &fileLogWriter{
|
||||
Hourly: true,
|
||||
MaxHours: 168,
|
||||
Hourly: true,
|
||||
MaxHours: 168,
|
||||
Rotate: true,
|
||||
Level: LevelTrace,
|
||||
Perm: "0660",
|
||||
|
@ -57,15 +57,15 @@ func registerBuildInfo() {
|
||||
Subsystem: "build_info",
|
||||
Help: "The building information",
|
||||
ConstLabels: map[string]string{
|
||||
"appname": beego.BConfig.AppName,
|
||||
"appname": beego.BConfig.AppName,
|
||||
"build_version": beego.BuildVersion,
|
||||
"build_revision": beego.BuildGitRevision,
|
||||
"build_status": beego.BuildStatus,
|
||||
"build_tag": beego.BuildTag,
|
||||
"build_time": strings.Replace(beego.BuildTime, "--", " ", 1),
|
||||
"build_time": strings.Replace(beego.BuildTime, "--", " ", 1),
|
||||
"go_version": beego.GoVersion,
|
||||
"git_branch": beego.GitBranch,
|
||||
"start_time": time.Now().Format("2006-01-02 15:04:05"),
|
||||
"start_time": time.Now().Format("2006-01-02 15:04:05"),
|
||||
},
|
||||
}, []string{})
|
||||
|
||||
|
@ -197,9 +197,9 @@ func getDbCreateSQL(al *alias) (sqls []string, tableIndexes map[string][]dbIndex
|
||||
if strings.Contains(column, "%COL%") {
|
||||
column = strings.Replace(column, "%COL%", fi.column, -1)
|
||||
}
|
||||
|
||||
if fi.description != "" && al.Driver!=DRSqlite {
|
||||
column += " " + fmt.Sprintf("COMMENT '%s'",fi.description)
|
||||
|
||||
if fi.description != "" && al.Driver != DRSqlite {
|
||||
column += " " + fmt.Sprintf("COMMENT '%s'", fi.description)
|
||||
}
|
||||
|
||||
columns = append(columns, column)
|
||||
|
@ -244,7 +244,7 @@ var _ dbQuerier = new(TxDB)
|
||||
var _ txEnder = new(TxDB)
|
||||
|
||||
func (t *TxDB) Prepare(query string) (*sql.Stmt, error) {
|
||||
return t.PrepareContext(context.Background(),query)
|
||||
return t.PrepareContext(context.Background(), query)
|
||||
}
|
||||
|
||||
func (t *TxDB) PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) {
|
||||
@ -260,7 +260,7 @@ func (t *TxDB) ExecContext(ctx context.Context, query string, args ...interface{
|
||||
}
|
||||
|
||||
func (t *TxDB) Query(query string, args ...interface{}) (*sql.Rows, error) {
|
||||
return t.QueryContext(context.Background(),query,args...)
|
||||
return t.QueryContext(context.Background(), query, args...)
|
||||
}
|
||||
|
||||
func (t *TxDB) QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) {
|
||||
@ -268,7 +268,7 @@ func (t *TxDB) QueryContext(ctx context.Context, query string, args ...interface
|
||||
}
|
||||
|
||||
func (t *TxDB) QueryRow(query string, args ...interface{}) *sql.Row {
|
||||
return t.QueryRowContext(context.Background(),query,args...)
|
||||
return t.QueryRowContext(context.Background(), query, args...)
|
||||
}
|
||||
|
||||
func (t *TxDB) QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row {
|
||||
|
@ -490,11 +490,11 @@ func init() {
|
||||
}
|
||||
|
||||
err := RegisterDataBase("default", DBARGS.Driver, DBARGS.Source, common.KV{
|
||||
Key:MaxIdleConnsKey,
|
||||
Value:20,
|
||||
Key: MaxIdleConnsKey,
|
||||
Value: 20,
|
||||
})
|
||||
|
||||
if err != nil{
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("can not register database: %v", err))
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ func debugLogQueies(alias *alias, operaton, query string, t time.Time, err error
|
||||
con += " - " + err.Error()
|
||||
}
|
||||
logMap["sql"] = fmt.Sprintf("%s-`%s`", query, strings.Join(cons, "`, `"))
|
||||
if LogFunc != nil{
|
||||
if LogFunc != nil {
|
||||
LogFunc(logMap)
|
||||
}
|
||||
DebugLog.Println(con)
|
||||
|
@ -110,7 +110,7 @@ type DQL interface {
|
||||
|
||||
// Like Read(), but with "FOR UPDATE" clause, useful in transaction.
|
||||
// Some databases are not support this feature.
|
||||
ReadForUpdate( md interface{}, cols ...string) error
|
||||
ReadForUpdate(md interface{}, cols ...string) error
|
||||
ReadForUpdateWithCtx(ctx context.Context, md interface{}, cols ...string) error
|
||||
|
||||
// Try to read a row from the database, or insert one if it doesn't exist
|
||||
@ -129,14 +129,14 @@ type DQL interface {
|
||||
// args[2] int offset default offset 0
|
||||
// args[3] string order for example : "-Id"
|
||||
// make sure the relation is defined in model struct tags.
|
||||
LoadRelated( md interface{}, name string, args ...interface{}) (int64, error)
|
||||
LoadRelated(md interface{}, name string, args ...interface{}) (int64, error)
|
||||
LoadRelatedWithCtx(ctx context.Context, md interface{}, name string, args ...interface{}) (int64, error)
|
||||
|
||||
// create a models to models queryer
|
||||
// for example:
|
||||
// post := Post{Id: 4}
|
||||
// m2m := Ormer.QueryM2M(&post, "Tags")
|
||||
QueryM2M( md interface{}, name string) QueryM2Mer
|
||||
QueryM2M(md interface{}, name string) QueryM2Mer
|
||||
QueryM2MWithCtx(ctx context.Context, md interface{}, name string) QueryM2Mer
|
||||
|
||||
// return a QuerySeter for table operations.
|
||||
|
@ -31,13 +31,14 @@
|
||||
//
|
||||
// more docs: http://beego.me/docs/module/session.md
|
||||
package redis_cluster
|
||||
|
||||
import (
|
||||
"github.com/astaxie/beego/session"
|
||||
rediss "github.com/go-redis/redis"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"github.com/astaxie/beego/session"
|
||||
rediss "github.com/go-redis/redis"
|
||||
"time"
|
||||
)
|
||||
|
||||
@ -101,7 +102,7 @@ func (rs *SessionStore) SessionRelease(w http.ResponseWriter) {
|
||||
return
|
||||
}
|
||||
c := rs.p
|
||||
c.Set(rs.sid, string(b), time.Duration(rs.maxlifetime) * time.Second)
|
||||
c.Set(rs.sid, string(b), time.Duration(rs.maxlifetime)*time.Second)
|
||||
}
|
||||
|
||||
// Provider redis_cluster session provider
|
||||
@ -146,10 +147,10 @@ func (rp *Provider) SessionInit(maxlifetime int64, savePath string) error {
|
||||
} else {
|
||||
rp.dbNum = 0
|
||||
}
|
||||
|
||||
|
||||
rp.poollist = rediss.NewClusterClient(&rediss.ClusterOptions{
|
||||
Addrs: strings.Split(rp.savePath, ";"),
|
||||
Password: rp.password,
|
||||
Password: rp.password,
|
||||
PoolSize: rp.poolsize,
|
||||
})
|
||||
return rp.poollist.Ping().Err()
|
||||
@ -186,15 +187,15 @@ func (rp *Provider) SessionExist(sid string) bool {
|
||||
// SessionRegenerate generate new sid for redis_cluster session
|
||||
func (rp *Provider) SessionRegenerate(oldsid, sid string) (session.Store, error) {
|
||||
c := rp.poollist
|
||||
|
||||
|
||||
if existed, err := c.Exists(oldsid).Result(); err != nil || existed == 0 {
|
||||
// oldsid doesn't exists, set the new sid directly
|
||||
// ignore error here, since if it return error
|
||||
// the existed value will be 0
|
||||
c.Set(sid, "", time.Duration(rp.maxlifetime) * time.Second)
|
||||
c.Set(sid, "", time.Duration(rp.maxlifetime)*time.Second)
|
||||
} else {
|
||||
c.Rename(oldsid, sid)
|
||||
c.Expire(sid, time.Duration(rp.maxlifetime) * time.Second)
|
||||
c.Expire(sid, time.Duration(rp.maxlifetime)*time.Second)
|
||||
}
|
||||
return rp.SessionRead(sid)
|
||||
}
|
||||
|
@ -369,8 +369,7 @@ func TestFileSessionStore_SessionRelease(t *testing.T) {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
|
||||
s.Set(i,i)
|
||||
s.Set(i, i)
|
||||
s.SessionRelease(nil)
|
||||
}
|
||||
|
||||
@ -384,4 +383,4 @@ func TestFileSessionStore_SessionRelease(t *testing.T) {
|
||||
t.Error()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ func searchFile(ctx *context.Context) (string, os.FileInfo, error) {
|
||||
if !strings.Contains(requestPath, prefix) {
|
||||
continue
|
||||
}
|
||||
if prefix != "/" && len(requestPath) > len(prefix) && requestPath[len(prefix)] != '/' {
|
||||
if prefix != "/" && len(requestPath) > len(prefix) && requestPath[len(prefix)] != '/' {
|
||||
continue
|
||||
}
|
||||
filePath := path.Join(staticDir, requestPath[len(prefix):])
|
||||
|
@ -362,7 +362,7 @@ func parseFormToStruct(form url.Values, objT reflect.Type, objV reflect.Value) e
|
||||
value = value[:25]
|
||||
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
|
||||
} else if strings.HasSuffix(strings.ToUpper(value), "Z") {
|
||||
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
|
||||
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
|
||||
} else if len(value) >= 19 {
|
||||
if strings.Contains(value, "T") {
|
||||
value = value[:19]
|
||||
|
@ -113,7 +113,7 @@ type Task struct {
|
||||
Next time.Time
|
||||
Errlist []*taskerr // like errtime:errinfo
|
||||
ErrLimit int // max length for the errlist, 0 stand for no limit
|
||||
errCnt int // records the error count during the execution
|
||||
errCnt int // records the error count during the execution
|
||||
}
|
||||
|
||||
// NewTask add new task with name, time and func
|
||||
|
@ -59,12 +59,12 @@ func TestSpec(t *testing.T) {
|
||||
func TestTask_Run(t *testing.T) {
|
||||
cnt := -1
|
||||
task := func() error {
|
||||
cnt ++
|
||||
cnt++
|
||||
fmt.Printf("Hello, world! %d \n", cnt)
|
||||
return errors.New(fmt.Sprintf("Hello, world! %d", cnt))
|
||||
}
|
||||
tk := NewTask("taska", "0/30 * * * * *", task)
|
||||
for i := 0; i < 200 ; i ++ {
|
||||
for i := 0; i < 200; i++ {
|
||||
e := tk.Run()
|
||||
assert.NotNil(t, e)
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ func parseFunc(vfunc, key string, label string) (v ValidFunc, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
tParams, err := trim(name, key+"."+ name + "." + label, params)
|
||||
tParams, err := trim(name, key+"."+name+"."+label, params)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
@ -31,13 +31,14 @@
|
||||
//
|
||||
// more docs: http://beego.me/docs/module/session.md
|
||||
package redis_cluster
|
||||
|
||||
import (
|
||||
"github.com/astaxie/beego/session"
|
||||
rediss "github.com/go-redis/redis"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"github.com/astaxie/beego/session"
|
||||
rediss "github.com/go-redis/redis"
|
||||
"time"
|
||||
)
|
||||
|
||||
@ -101,7 +102,7 @@ func (rs *SessionStore) SessionRelease(w http.ResponseWriter) {
|
||||
return
|
||||
}
|
||||
c := rs.p
|
||||
c.Set(rs.sid, string(b), time.Duration(rs.maxlifetime) * time.Second)
|
||||
c.Set(rs.sid, string(b), time.Duration(rs.maxlifetime)*time.Second)
|
||||
}
|
||||
|
||||
// Provider redis_cluster session provider
|
||||
@ -146,10 +147,10 @@ func (rp *Provider) SessionInit(maxlifetime int64, savePath string) error {
|
||||
} else {
|
||||
rp.dbNum = 0
|
||||
}
|
||||
|
||||
|
||||
rp.poollist = rediss.NewClusterClient(&rediss.ClusterOptions{
|
||||
Addrs: strings.Split(rp.savePath, ";"),
|
||||
Password: rp.password,
|
||||
Password: rp.password,
|
||||
PoolSize: rp.poolsize,
|
||||
})
|
||||
return rp.poollist.Ping().Err()
|
||||
@ -186,15 +187,15 @@ func (rp *Provider) SessionExist(sid string) bool {
|
||||
// SessionRegenerate generate new sid for redis_cluster session
|
||||
func (rp *Provider) SessionRegenerate(oldsid, sid string) (session.Store, error) {
|
||||
c := rp.poollist
|
||||
|
||||
|
||||
if existed, err := c.Exists(oldsid).Result(); err != nil || existed == 0 {
|
||||
// oldsid doesn't exists, set the new sid directly
|
||||
// ignore error here, since if it return error
|
||||
// the existed value will be 0
|
||||
c.Set(sid, "", time.Duration(rp.maxlifetime) * time.Second)
|
||||
c.Set(sid, "", time.Duration(rp.maxlifetime)*time.Second)
|
||||
} else {
|
||||
c.Rename(oldsid, sid)
|
||||
c.Expire(sid, time.Duration(rp.maxlifetime) * time.Second)
|
||||
c.Expire(sid, time.Duration(rp.maxlifetime)*time.Second)
|
||||
}
|
||||
return rp.SessionRead(sid)
|
||||
}
|
||||
|
@ -369,8 +369,7 @@ func TestFileSessionStore_SessionRelease(t *testing.T) {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
|
||||
s.Set(i,i)
|
||||
s.Set(i, i)
|
||||
s.SessionRelease(nil)
|
||||
}
|
||||
|
||||
@ -384,4 +383,4 @@ func TestFileSessionStore_SessionRelease(t *testing.T) {
|
||||
t.Error()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ func searchFile(ctx *context.Context) (string, os.FileInfo, error) {
|
||||
if !strings.Contains(requestPath, prefix) {
|
||||
continue
|
||||
}
|
||||
if prefix != "/" && len(requestPath) > len(prefix) && requestPath[len(prefix)] != '/' {
|
||||
if prefix != "/" && len(requestPath) > len(prefix) && requestPath[len(prefix)] != '/' {
|
||||
continue
|
||||
}
|
||||
filePath := path.Join(staticDir, requestPath[len(prefix):])
|
||||
|
@ -362,7 +362,7 @@ func parseFormToStruct(form url.Values, objT reflect.Type, objV reflect.Value) e
|
||||
value = value[:25]
|
||||
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
|
||||
} else if strings.HasSuffix(strings.ToUpper(value), "Z") {
|
||||
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
|
||||
t, err = time.ParseInLocation(time.RFC3339, value, time.Local)
|
||||
} else if len(value) >= 19 {
|
||||
if strings.Contains(value, "T") {
|
||||
value = value[:19]
|
||||
|
@ -113,7 +113,7 @@ type Task struct {
|
||||
Next time.Time
|
||||
Errlist []*taskerr // like errtime:errinfo
|
||||
ErrLimit int // max length for the errlist, 0 stand for no limit
|
||||
errCnt int // records the error count during the execution
|
||||
errCnt int // records the error count during the execution
|
||||
}
|
||||
|
||||
// NewTask add new task with name, time and func
|
||||
|
@ -59,12 +59,12 @@ func TestSpec(t *testing.T) {
|
||||
func TestTask_Run(t *testing.T) {
|
||||
cnt := -1
|
||||
task := func() error {
|
||||
cnt ++
|
||||
cnt++
|
||||
fmt.Printf("Hello, world! %d \n", cnt)
|
||||
return errors.New(fmt.Sprintf("Hello, world! %d", cnt))
|
||||
}
|
||||
tk := NewTask("taska", "0/30 * * * * *", task)
|
||||
for i := 0; i < 200 ; i ++ {
|
||||
for i := 0; i < 200; i++ {
|
||||
e := tk.Run()
|
||||
assert.NotNil(t, e)
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ func parseFunc(vfunc, key string, label string) (v ValidFunc, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
tParams, err := trim(name, key+"."+ name + "." + label, params)
|
||||
tParams, err := trim(name, key+"."+name+"."+label, params)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user