Skip to content
This repository has been archived by the owner on May 29, 2021. It is now read-only.

Commit

Permalink
Refactor feed creation to allow setting most fields via API
Browse files Browse the repository at this point in the history
Allow API clients to create disabled feeds or define field like "ignore_http_cache".
  • Loading branch information
fguillot committed Jan 3, 2021
1 parent ab82c4b commit f0610bd
Show file tree
Hide file tree
Showing 26 changed files with 370 additions and 264 deletions.
5 changes: 2 additions & 3 deletions api/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,15 @@ package api // import "miniflux.app/api"
import (
"net/http"

"miniflux.app/reader/feed"
"miniflux.app/storage"
"miniflux.app/worker"

"github.com/gorilla/mux"
)

// Serve declares API routes for the application.
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler) {
handler := &handler{store, pool, feedHandler}
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool) {
handler := &handler{store, pool}

sr := router.PathPrefix("/v1").Subrouter()
middleware := newMiddleware(store)
Expand Down
33 changes: 18 additions & 15 deletions api/feed.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (

"miniflux.app/http/request"
"miniflux.app/http/response/json"
feedHandler "miniflux.app/reader/handler"
)

func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) {
Expand Down Expand Up @@ -42,20 +43,22 @@ func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) {
return
}

feed, err := h.feedHandler.CreateFeed(
userID,
feedInfo.CategoryID,
feedInfo.FeedURL,
feedInfo.Crawler,
feedInfo.UserAgent,
feedInfo.Username,
feedInfo.Password,
feedInfo.ScraperRules,
feedInfo.RewriteRules,
feedInfo.BlocklistRules,
feedInfo.KeeplistRules,
feedInfo.FetchViaProxy,
)
feed, err := feedHandler.CreateFeed(h.store, &feedHandler.FeedCreationArgs{
UserID: userID,
CategoryID: feedInfo.CategoryID,
FeedURL: feedInfo.FeedURL,
UserAgent: feedInfo.UserAgent,
Username: feedInfo.Username,
Password: feedInfo.Password,
Crawler: feedInfo.Crawler,
Disabled: feedInfo.Disabled,
IgnoreHTTPCache: feedInfo.IgnoreHTTPCache,
FetchViaProxy: feedInfo.FetchViaProxy,
ScraperRules: feedInfo.ScraperRules,
RewriteRules: feedInfo.RewriteRules,
BlocklistRules: feedInfo.BlocklistRules,
KeeplistRules: feedInfo.KeeplistRules,
})
if err != nil {
json.ServerError(w, r, err)
return
Expand All @@ -73,7 +76,7 @@ func (h *handler) refreshFeed(w http.ResponseWriter, r *http.Request) {
return
}

err := h.feedHandler.RefreshFeed(userID, feedID)
err := feedHandler.RefreshFeed(h.store, userID, feedID)
if err != nil {
json.ServerError(w, r, err)
return
Expand Down
6 changes: 2 additions & 4 deletions api/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,11 @@
package api // import "miniflux.app/api"

import (
"miniflux.app/reader/feed"
"miniflux.app/storage"
"miniflux.app/worker"
)

type handler struct {
store *storage.Storage
pool *worker.Pool
feedHandler *feed.Handler
store *storage.Storage
pool *worker.Pool
}
24 changes: 13 additions & 11 deletions api/payload.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,17 +48,19 @@ type feedCreationResponse struct {
}

type feedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
}

func decodeFeedCreationRequest(r io.ReadCloser) (*feedCreationRequest, error) {
Expand Down
4 changes: 3 additions & 1 deletion cli/cli.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,9 @@ func Parse() {

// Run migrations and start the deamon.
if config.Opts.RunMigrations() {
database.Migrate(db)
if err := database.Migrate(db); err != nil {
logger.Fatal(`%v`, err)
}
}

if err := database.IsSchemaUpToDate(db); err != nil {
Expand Down
6 changes: 2 additions & 4 deletions cli/daemon.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ import (
"miniflux.app/config"
"miniflux.app/logger"
"miniflux.app/metric"
"miniflux.app/reader/feed"
"miniflux.app/service/httpd"
"miniflux.app/service/scheduler"
"miniflux.app/storage"
Expand All @@ -29,16 +28,15 @@ func startDaemon(store *storage.Storage) {
signal.Notify(stop, os.Interrupt)
signal.Notify(stop, syscall.SIGTERM)

feedHandler := feed.NewFeedHandler(store)
pool := worker.NewPool(feedHandler, config.Opts.WorkerPoolSize())
pool := worker.NewPool(store, config.Opts.WorkerPoolSize())

if config.Opts.HasSchedulerService() && !config.Opts.HasMaintenanceMode() {
scheduler.Serve(store, pool)
}

var httpServer *http.Server
if config.Opts.HasHTTPService() {
httpServer = httpd.Serve(store, pool, feedHandler)
httpServer = httpd.Serve(store, pool)
}

if config.Opts.HasMetricsCollector() {
Expand Down
11 changes: 4 additions & 7 deletions client/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ func (c *Client) CreateUser(username, password string, isAdmin bool) (*User, err
}

// UpdateUser updates a user in the system.
func (c *Client) UpdateUser(userID int64, userChanges *UserModification) (*User, error) {
func (c *Client) UpdateUser(userID int64, userChanges *UserModificationRequest) (*User, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/users/%d", userID), userChanges)
if err != nil {
return nil, err
Expand Down Expand Up @@ -285,11 +285,8 @@ func (c *Client) Feed(feedID int64) (*Feed, error) {
}

// CreateFeed creates a new feed.
func (c *Client) CreateFeed(url string, categoryID int64) (int64, error) {
body, err := c.request.Post("/v1/feeds", map[string]interface{}{
"feed_url": url,
"category_id": categoryID,
})
func (c *Client) CreateFeed(feedCreationRequest *FeedCreationRequest) (int64, error) {
body, err := c.request.Post("/v1/feeds", feedCreationRequest)
if err != nil {
return 0, err
}
Expand All @@ -309,7 +306,7 @@ func (c *Client) CreateFeed(url string, categoryID int64) (int64, error) {
}

// UpdateFeed updates a feed.
func (c *Client) UpdateFeed(feedID int64, feedChanges *FeedModification) (*Feed, error) {
func (c *Client) UpdateFeed(feedID int64, feedChanges *FeedModificationRequest) (*Feed, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/feeds/%d", feedID), feedChanges)
if err != nil {
return nil, err
Expand Down
28 changes: 24 additions & 4 deletions client/core.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ type UserCreationRequest struct {
OpenIDConnectID string `json:"openid_connect_id"`
}

// UserModification represents the request to update a user.
type UserModification struct {
// UserModificationRequest represents the request to update a user.
type UserModificationRequest struct {
Username *string `json:"username"`
Password *string `json:"password"`
IsAdmin *bool `json:"is_admin"`
Expand Down Expand Up @@ -110,6 +110,9 @@ type Feed struct {
LastModifiedHeader string `json:"last_modified_header,omitempty"`
ParsingErrorMsg string `json:"parsing_error_message,omitempty"`
ParsingErrorCount int `json:"parsing_error_count,omitempty"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
Expand All @@ -121,8 +124,25 @@ type Feed struct {
Category *Category `json:"category,omitempty"`
}

// FeedModification represents changes for a feed.
type FeedModification struct {
// FeedCreationRequest represents the request to create a feed.
type FeedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
}

// FeedModificationRequest represents the request to update a feed.
type FeedModificationRequest struct {
FeedURL *string `json:"feed_url"`
SiteURL *string `json:"site_url"`
Title *string `json:"title"`
Expand Down
16 changes: 8 additions & 8 deletions database/database.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@ import (

// Postgresql driver import
_ "github.com/lib/pq"

"miniflux.app/logger"
)

// NewConnectionPool configures the database connection pool.
Expand All @@ -28,7 +26,7 @@ func NewConnectionPool(dsn string, minConnections, maxConnections int) (*sql.DB,
}

// Migrate executes database migrations.
func Migrate(db *sql.DB) {
func Migrate(db *sql.DB) error {
var currentVersion int
db.QueryRow(`SELECT version FROM schema_version`).Scan(&currentVersion)

Expand All @@ -41,28 +39,30 @@ func Migrate(db *sql.DB) {

tx, err := db.Begin()
if err != nil {
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}

if err := migrations[version](tx); err != nil {
tx.Rollback()
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}

if _, err := tx.Exec(`DELETE FROM schema_version`); err != nil {
tx.Rollback()
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}

if _, err := tx.Exec(`INSERT INTO schema_version (version) VALUES ($1)`, newVersion); err != nil {
tx.Rollback()
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}

if err := tx.Commit(); err != nil {
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
}

return nil
}

// IsSchemaUpToDate checks if the database schema is up to date.
Expand Down
13 changes: 0 additions & 13 deletions model/feed.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,19 +73,6 @@ func (f *Feed) WithCategoryID(categoryID int64) {
f.Category = &Category{ID: categoryID}
}

// WithBrowsingParameters defines browsing parameters.
func (f *Feed) WithBrowsingParameters(crawler bool, userAgent, username, password, scraperRules, rewriteRules, blocklistRules, keeplistRules string, fetchViaProxy bool) {
f.Crawler = crawler
f.UserAgent = userAgent
f.Username = username
f.Password = password
f.ScraperRules = scraperRules
f.RewriteRules = rewriteRules
f.FetchViaProxy = fetchViaProxy
f.BlocklistRules = blocklistRules
f.KeeplistRules = keeplistRules
}

// WithError adds a new error message and increment the error counter.
func (f *Feed) WithError(message string) {
f.ParsingErrorCount++
Expand Down
51 changes: 0 additions & 51 deletions model/feed_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,57 +46,6 @@ func TestFeedCategorySetter(t *testing.T) {
}
}

func TestFeedBrowsingParams(t *testing.T) {
feed := &Feed{}
feed.WithBrowsingParameters(
true,
"Custom User Agent",
"Username",
"Secret",
"Scraper Rule",
"Rewrite Rule",
"Block Rule",
"Allow Rule",
true,
)

if !feed.Crawler {
t.Error(`The crawler must be activated`)
}

if feed.UserAgent != "Custom User Agent" {
t.Error(`The user agent must be set`)
}

if feed.Username != "Username" {
t.Error(`The username must be set`)
}

if feed.Password != "Secret" {
t.Error(`The password must be set`)
}

if feed.ScraperRules != "Scraper Rule" {
t.Errorf(`The scraper rules must be set`)
}

if feed.RewriteRules != "Rewrite Rule" {
t.Errorf(`The rewrite rules must be set`)
}

if feed.BlocklistRules != "Block Rule" {
t.Errorf(`The block list rules must be set`)
}

if feed.KeeplistRules != "Allow Rule" {
t.Errorf(`The keep list rules must be set`)
}

if !feed.FetchViaProxy {
t.Errorf(`The fetch via proxy is no set`)
}
}

func TestFeedErrorCounter(t *testing.T) {
feed := &Feed{}
feed.WithError("Some Error")
Expand Down
10 changes: 0 additions & 10 deletions reader/feed/doc.go

This file was deleted.

Loading

0 comments on commit f0610bd

Please sign in to comment.