Skip to content

Commit

Permalink
增加文章列表检索、导出功能,修复了分页bug
Browse files Browse the repository at this point in the history
  • Loading branch information
qiang committed Jun 30, 2020
1 parent aee31d7 commit 33235a3
Show file tree
Hide file tree
Showing 7 changed files with 38 additions and 43 deletions.
30 changes: 8 additions & 22 deletions ctrl/article/index.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import (
"go-admin/public/common"
"strconv"
"strings"
"time"
)

func Create(c *gin.Context) {
Expand Down Expand Up @@ -205,32 +204,19 @@ func Edit(c *gin.Context) {
func Index(c *gin.Context) {
page, _ := strconv.ParseInt(c.Query("page"), 10, 64)
limit, _ := strconv.ParseInt(c.Query("limit"), 10, 64)
type filters struct {
Status int
Title string
Importance int
StartTime time.Time
EndTime time.Time
}
filter:=filters{}
var filters =map[string]string{"status":"","title":"","importance":"","start_time":"","end_time":""}

dateValues:=c.QueryArray("dateValue[]")
if len(dateValues)==2 {
filter.StartTime = common.StrToTimes(dateValues[0])
filter.EndTime = common.StrToTimes(dateValues[1])
}
status :=c.Query("status")
if status!="" {
filter.Status,_=strconv.Atoi(status)
filters["start_time"] = dateValues[0]
filters["end_time"] = dateValues[1]
}
importance :=c.Query("importance")
if importance!="" {
filter.Importance,_=strconv.Atoi(importance)
}
filter.Title = c.Query("title")

filters["status"] =c.Query("status")
filters["importance"] =c.Query("importance")
filters["title"] = c.Query("title")
paging:=&common.Paging{Page:page,PageSize:limit}
articleModel:=models.SystemArticle{}
articleArr, err := articleModel.GetAllPage(paging)
articleArr, err := articleModel.GetAllPage(paging,filters)
var articlePageArr []models.SystemArticlePage

for _,v :=range articleArr{
Expand Down
2 changes: 1 addition & 1 deletion ctrl/user/login.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ func Login(c *gin.Context) {
session := sessions.Default(c)
var data = make(map[string]interface{}, 0)
v := session.Get(conf.Cfg.Token)
fmt.Println(v)

if v == nil {
cur := time.Now()
//纳秒
Expand Down
5 changes: 2 additions & 3 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import (
"go-admin/modules/cache"
"go-admin/modules/response"
"go-admin/public/common"
"log"
"net/url"
)

Expand Down Expand Up @@ -132,9 +131,9 @@ func Auth() gin.HandlerFunc{
return
}
// access the status we are sending
status := c.Writer.Status()
//status := c.Writer.Status()
c.Next()
log.Println(status) //状态 200
//log.Println(status) //状态 200
}
}
var count = 0
Expand Down
20 changes: 17 additions & 3 deletions models/system_article.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,15 +39,29 @@ func (m *SystemArticle) Add() (int64 ,error){
func (m *SystemArticle) AddBatch(beans ...interface{}) (int64 ,error){
return mEngine.Insert(beans...)
}
func (u *SystemArticle) GetAllPage(paging *common.Paging)([]SystemArticle,error) {
func (u *SystemArticle) GetAllPage(paging *common.Paging,filters map[string]string)([]SystemArticle,error) {
var systemarticles []SystemArticle
var err error
paging.Total,err=mEngine.Where("status=?",1).Count(u)
session:=mEngine.Where("1=1")
if filters["status"]!="" {
session.Where("status=?",filters["status"])
}
if filters["importance"]!="" {
session.Where("importance=?", filters["importance"])
}
if filters["title"]!="" {
session.Where("title like ?","%"+filters["title"]+"%")
}
if filters["start_time"]!="" && filters["end_time"]!="" {
session.Where("mtime>=?",common.StrToTimes(filters["start_time"])).Where("mtime<=?",common.StrToTimes(filters["end_time"]))
}
sessionRows:=*session
paging.Total,err=session.Count(u)
paging.GetPages()
if paging.Total<1 {
return systemarticles,err
}
err=mEngine.Where("status=?",1).Limit(int(paging.PageSize),int(paging.StartNums)).Find(&systemarticles)
err=sessionRows.Limit(int(paging.PageSize),int(paging.StartNums)).Find(&systemarticles)
return systemarticles,err
}
func (a *SystemArticle) Update() error {
Expand Down
12 changes: 3 additions & 9 deletions public/common/paginator.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,21 +17,15 @@ type Paging struct {
//NumsCount int64 `json:"numscount" form:"numscount"` //总页序数
}
//获取分页信息
func( p *Paging)GetPages() *Paging {

func( p *Paging)GetPages() {
if p.Page < 1 {
p.Page = 1
}
if p.PageSize < 1 {
p.PageSize = 10
}
page_count := math.Ceil(float64(p.Total) / float64(p.PageSize))
paging := new(Paging)
paging.Page=p.Page
paging.PageSize=p.PageSize
paging.StartNums = p.PageSize*(p.Page-1)
paging.Total = p.Total
paging.PageCount = int64(page_count)
return paging
p.StartNums = p.PageSize*(p.Page-1)
p.PageCount=int64(page_count)
}

7 changes: 5 additions & 2 deletions test1.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,15 @@ package main

import (
"fmt"
"go-admin/public/common"
"strings"
)

func main() {
var strs ="hello 你好 hello world"
fmt.Println(Substr(strs,"好"))
paging:=&common.Paging{Page:1,PageSize:10}
paging.Total=100
paging.GetPages()
fmt.Println(paging)
}

func Substr(s,substr string)string{
Expand Down
5 changes: 2 additions & 3 deletions vue-element-admin/src/views/articles/list/index.vue
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,6 @@ export default {
},
methods: {
handleFilter() {
console.log(this.listQuery.dateValue)
this.listQuery.page = 1
this.getList()
},
Expand Down Expand Up @@ -171,8 +170,8 @@ export default {
handleDownload() {
this.downloadLoading = true
import('@/vendor/Export2Excel').then(excel => {
const tHeader = ['timestamp', 'title', 'type', 'importance', 'status']
const filterVal = ['timestamp', 'title', 'type', 'importance', 'status']
const tHeader = ['display_time', 'title', 'importance', 'status']
const filterVal = ['display_time', 'title', 'importance', 'status']
const data = this.formatJson(filterVal, this.list)
excel.export_json_to_excel({
header: tHeader,
Expand Down

0 comments on commit 33235a3

Please sign in to comment.