Merge branch 'astaxie/develop' into develop

This commit is contained in:
ysqi 2016-06-01 19:54:35 +08:00
commit 2ebf3cd450
7 changed files with 318 additions and 22 deletions

View File

@ -69,10 +69,8 @@ func (ctx *Context) Reset(rw http.ResponseWriter, r *http.Request) {
}
// Redirect does redirection to localurl with http header status code.
// It sends http response header directly.
func (ctx *Context) Redirect(status int, localurl string) {
ctx.Output.Header("Location", localurl)
ctx.ResponseWriter.WriteHeader(status)
http.Redirect(ctx.ResponseWriter, ctx.Request, localurl, status)
}
// Abort stops this request.

View File

@ -22,6 +22,7 @@ import (
"io"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
"time"
@ -53,7 +54,7 @@ type fileLogWriter struct {
Level int `json:"level"`
Perm os.FileMode `json:"perm"`
Perm string `json:"perm"`
fileNameOnly, suffix string // like "project.log", project is fileNameOnly and .log is suffix
}
@ -65,7 +66,7 @@ func newFileWriter() Logger {
MaxDays: 7,
Rotate: true,
Level: LevelTrace,
Perm: 0660,
Perm: "0660",
}
return w
}
@ -79,7 +80,7 @@ func newFileWriter() Logger {
// "daily":true,
// "maxDays":15,
// "rotate":true,
// "perm":0600
// "perm":"0600"
// }
func (w *fileLogWriter) Init(jsonConfig string) error {
err := json.Unmarshal([]byte(jsonConfig), w)
@ -153,7 +154,11 @@ func (w *fileLogWriter) WriteMsg(when time.Time, msg string, level int) error {
func (w *fileLogWriter) createLogFile() (*os.File, error) {
// Open the log file
fd, err := os.OpenFile(w.Filename, os.O_WRONLY|os.O_APPEND|os.O_CREATE, w.Perm)
perm, err := strconv.ParseInt(w.Perm, 8, 64)
if err != nil {
return nil, err
}
fd, err := os.OpenFile(w.Filename, os.O_WRONLY|os.O_APPEND|os.O_CREATE, os.FileMode(perm))
return fd, err
}
@ -167,6 +172,9 @@ func (w *fileLogWriter) initFd() error {
w.dailyOpenTime = time.Now()
w.dailyOpenDate = w.dailyOpenTime.Day()
w.maxLinesCurLines = 0
if w.Daily {
go w.dailyRotate(w.dailyOpenTime)
}
if fInfo.Size() > 0 {
count, err := w.lines()
if err != nil {
@ -177,6 +185,22 @@ func (w *fileLogWriter) initFd() error {
return nil
}
func (w *fileLogWriter) dailyRotate(openTime time.Time) {
y, m, d := openTime.Add(24 * time.Hour).Date()
nextDay := time.Date(y, m, d, 0, 0, 0, 0, openTime.Location())
tm := time.NewTimer(time.Duration(nextDay.UnixNano() - openTime.UnixNano() + 100))
select {
case <-tm.C:
w.Lock()
if w.needRotate(0, time.Now().Day()) {
if err := w.doRotate(time.Now()); err != nil {
fmt.Fprintf(os.Stderr, "FileLogWriter(%q): %s\n", w.Filename, err)
}
}
w.Unlock()
}
}
func (w *fileLogWriter) lines() (int, error) {
fd, err := os.Open(w.Filename)
if err != nil {

View File

@ -17,12 +17,34 @@ package logs
import (
"bufio"
"fmt"
"io/ioutil"
"os"
"strconv"
"testing"
"time"
)
func TestFilePerm(t *testing.T) {
log := NewLogger(10000)
log.SetLogger("file", `{"filename":"test.log", "perm": "0600"}`)
log.Debug("debug")
log.Informational("info")
log.Notice("notice")
log.Warning("warning")
log.Error("error")
log.Alert("alert")
log.Critical("critical")
log.Emergency("emergency")
file, err := os.Stat("test.log")
if err != nil {
t.Fatal(err)
}
if file.Mode() != 0600 {
t.Fatal("unexpected log file permission")
}
os.Remove("test.log")
}
func TestFile1(t *testing.T) {
log := NewLogger(10000)
log.SetLogger("file", `{"filename":"test.log"}`)
@ -125,13 +147,28 @@ func TestFileRotate_03(t *testing.T) {
os.Remove(fn)
}
func TestFileRotate_04(t *testing.T) {
fn1 := "rotate_day.log"
fn2 := "rotate_day." + time.Now().Add(-24*time.Hour).Format("2006-01-02") + ".log"
testFileDailyRotate(t, fn1, fn2)
}
func TestFileRotate_05(t *testing.T) {
fn1 := "rotate_day.log"
fn := "rotate_day." + time.Now().Add(-24*time.Hour).Format("2006-01-02") + ".log"
os.Create(fn)
fn2 := "rotate_day." + time.Now().Add(-24*time.Hour).Format("2006-01-02") + ".001.log"
testFileDailyRotate(t, fn1, fn2)
os.Remove(fn)
}
func testFileRotate(t *testing.T, fn1, fn2 string) {
fw := &fileLogWriter{
Daily: true,
MaxDays: 7,
Rotate: true,
Level: LevelTrace,
Perm: 0660,
Perm: "0660",
}
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
@ -145,6 +182,38 @@ func testFileRotate(t *testing.T, fn1, fn2 string) {
}
os.Remove(file)
}
fw.Destroy()
}
func testFileDailyRotate(t *testing.T, fn1, fn2 string) {
fw := &fileLogWriter{
Daily: true,
MaxDays: 7,
Rotate: true,
Level: LevelTrace,
Perm: "0660",
}
fw.Init(fmt.Sprintf(`{"filename":"%v","maxdays":1}`, fn1))
fw.dailyOpenTime = time.Now().Add(-24 * time.Hour)
fw.dailyOpenDate = fw.dailyOpenTime.Day()
today, _ := time.ParseInLocation("2006-01-02", time.Now().Format("2006-01-02"), fw.dailyOpenTime.Location())
today = today.Add(-1 * time.Second)
fw.dailyRotate(today)
for _, file := range []string{fn1, fn2} {
_, err := os.Stat(file)
if err != nil {
t.FailNow()
}
content, err := ioutil.ReadFile(file)
if err != nil {
t.FailNow()
}
if len(content) > 0 {
t.FailNow()
}
os.Remove(file)
}
fw.Destroy()
}
func exists(path string) (bool, error) {

View File

@ -1475,7 +1475,11 @@ func (d *dbBase) ReadValues(q dbQuerier, qs *querySet, mi *modelInfo, cond *Cond
sels := strings.Join(cols, ", ")
query := fmt.Sprintf("SELECT %s FROM %s%s%s T0 %s%s%s%s%s", sels, Q, mi.table, Q, join, where, groupBy, orderBy, limit)
sqlSelect := "SELECT"
if qs.distinct {
sqlSelect += " DISTINCT"
}
query := fmt.Sprintf("%s %s FROM %s%s%s T0 %s%s%s%s%s", sqlSelect, sels, Q, mi.table, Q, join, where, groupBy, orderBy, limit)
d.ins.ReplaceMarks(&query)

View File

@ -66,7 +66,7 @@ func registerModel(prefix string, model interface{}) {
}
if info.fields.pk == nil {
fmt.Printf("<orm.RegisterModel> `%s` need a primary key field\n", name)
fmt.Printf("<orm.RegisterModel> `%s` need a primary key field, default use 'id' if not set\n", name)
os.Exit(2)
}

192
session/ssdb/sess_ssdb.go Normal file
View File

@ -0,0 +1,192 @@
package ssdb
import (
"errors"
"net/http"
"strconv"
"strings"
"sync"
"github.com/astaxie/beego/session"
"github.com/ssdb/gossdb/ssdb"
)
var ssdbProvider = &SsdbProvider{}
type SsdbProvider struct {
client *ssdb.Client
host string
port int
maxLifetime int64
}
func (p *SsdbProvider) connectInit() error {
var err error
if p.host == "" || p.port == 0 {
return errors.New("SessionInit First")
}
p.client, err = ssdb.Connect(p.host, p.port)
if err != nil {
return err
}
return nil
}
func (p *SsdbProvider) SessionInit(maxLifetime int64, savePath string) error {
var e error = nil
p.maxLifetime = maxLifetime
address := strings.Split(savePath, ":")
p.host = address[0]
p.port, e = strconv.Atoi(address[1])
if e != nil {
return e
}
err := p.connectInit()
if err != nil {
return err
}
return nil
}
func (p *SsdbProvider) SessionRead(sid string) (session.Store, error) {
if p.client == nil {
if err := p.connectInit(); err != nil {
return nil, err
}
}
var kv map[interface{}]interface{}
value, err := p.client.Get(sid)
if err != nil {
return nil, err
}
if value == nil || len(value.(string)) == 0 {
kv = make(map[interface{}]interface{})
} else {
kv, err = session.DecodeGob([]byte(value.(string)))
if err != nil {
return nil, err
}
}
rs := &SessionStore{sid: sid, values: kv, maxLifetime: p.maxLifetime, client: p.client}
return rs, nil
}
func (p *SsdbProvider) SessionExist(sid string) bool {
if p.client == nil {
if err := p.connectInit(); err != nil {
panic(err)
}
}
value, err := p.client.Get(sid)
if err != nil {
panic(err)
}
if value == nil || len(value.(string)) == 0 {
return false
}
return true
}
func (p *SsdbProvider) SessionRegenerate(oldsid, sid string) (session.Store, error) {
//conn.Do("setx", key, v, ttl)
if p.client == nil {
if err := p.connectInit(); err != nil {
return nil, err
}
}
value, err := p.client.Get(oldsid)
if err != nil {
return nil, err
}
var kv map[interface{}]interface{}
if value == nil || len(value.(string)) == 0 {
kv = make(map[interface{}]interface{})
} else {
kv, err = session.DecodeGob([]byte(value.(string)))
if err != nil {
return nil, err
}
_, err = p.client.Del(oldsid)
if err != nil {
return nil, err
}
}
_, e := p.client.Do("setx", sid, value, p.maxLifetime)
if e != nil {
return nil, e
}
rs := &SessionStore{sid: sid, values: kv, maxLifetime: p.maxLifetime, client: p.client}
return rs, nil
}
func (p *SsdbProvider) SessionDestroy(sid string) error {
if p.client == nil {
if err := p.connectInit(); err != nil {
return err
}
}
_, err := p.client.Del(sid)
if err != nil {
return err
}
return nil
}
func (p *SsdbProvider) SessionGC() {
return
}
func (p *SsdbProvider) SessionAll() int {
return 0
}
type SessionStore struct {
sid string
lock sync.RWMutex
values map[interface{}]interface{}
maxLifetime int64
client *ssdb.Client
}
func (s *SessionStore) Set(key, value interface{}) error {
s.lock.Lock()
defer s.lock.Unlock()
s.values[key] = value
return nil
}
func (s *SessionStore) Get(key interface{}) interface{} {
s.lock.Lock()
defer s.lock.Unlock()
if value, ok := s.values[key]; ok {
return value
}
return nil
}
func (s *SessionStore) Delete(key interface{}) error {
s.lock.Lock()
defer s.lock.Unlock()
delete(s.values, key)
return nil
}
func (s *SessionStore) Flush() error {
s.lock.Lock()
defer s.lock.Unlock()
s.values = make(map[interface{}]interface{})
return nil
}
func (s *SessionStore) SessionID() string {
return s.sid
}
func (s *SessionStore) SessionRelease(w http.ResponseWriter) {
b, err := session.EncodeGob(s.values)
if err != nil {
return
}
s.client.Do("setx", s.sid, string(b), s.maxLifetime)
}
func init() {
session.Register("ssdb", ssdbProvider)
}

View File

@ -33,12 +33,12 @@ import (
var (
beegoTplFuncMap = make(template.FuncMap)
// beeTemplates caching map and supported template file extensions.
beeTemplates = make(map[string]TemplateRenderer)
beeTemplates = make(map[string]*template.Template)
templatesLock sync.RWMutex
// beeTemplateExt stores the template extension which will build
beeTemplateExt = []string{"tpl", "html"}
// beeTemplatePreprocessors stores associations of extension -> preprocessor handler
beeTemplateEngines = map[string]templateHandler{}
beeTemplateEngines = map[string]templatePreProcessor{}
)
// ExecuteTemplate applies the template with name to the specified data object,
@ -50,11 +50,22 @@ func ExecuteTemplate(wr io.Writer, name string, data interface{}) error {
defer templatesLock.RUnlock()
}
if t, ok := beeTemplates[name]; ok {
err := t.ExecuteTemplate(wr, name, data)
if err != nil {
logs.Trace("template Execute err:", err)
if t.Lookup(name) != nil {
err := t.ExecuteTemplate(wr, name, data)
if err != nil {
logs.Trace("template Execute err:", err)
}
return err
} else {
err := t.Execute(wr, data)
if err != nil {
if err != nil {
logs.Trace("template Execute err:", err)
}
return err
}
}
return err
return nil
}
panic("can't find templatefile in the path:" + name)
}
@ -94,10 +105,8 @@ func AddFuncMap(key string, fn interface{}) error {
return nil
}
type templateHandler func(root, path string, funcs template.FuncMap) (TemplateRenderer, error)
type TemplateRenderer interface {
ExecuteTemplate(wr io.Writer, name string, data interface{}) error
}
type templatePreProcessor func(root, path string, funcs template.FuncMap) (*template.Template, error)
type templateFile struct {
root string
files map[string][]string
@ -172,7 +181,7 @@ func BuildTemplate(dir string, files ...string) error {
if buildAllFiles || utils.InSlice(file, files) {
templatesLock.Lock()
ext := filepath.Ext(file)
var t TemplateRenderer
var t *template.Template
if len(ext) == 0 {
t, err = getTemplate(self.root, file, v...)
} else if fn, ok := beeTemplateEngines[ext[1:]]; ok {
@ -325,7 +334,7 @@ func DelStaticPath(url string) *App {
return BeeApp
}
func AddTemplateEngine(extension string, fn templateHandler) *App {
func AddTemplateEngine(extension string, fn templatePreProcessor) *App {
AddTemplateExt(extension)
beeTemplateEngines[extension] = fn
return BeeApp