Skip to content

Commit

Permalink
extract changelog & article loading to Loader struct & add rss feed
Browse files Browse the repository at this point in the history
  • Loading branch information
Jonas Hiltl committed Aug 26, 2024
1 parent 8192e78 commit cff3cf2
Show file tree
Hide file tree
Showing 17 changed files with 532 additions and 185 deletions.
8 changes: 6 additions & 2 deletions cmd/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,12 @@ import (
"net/http"
"os"

"github.com/jonashiltl/openchangelog/internal/changelog"
"github.com/jonashiltl/openchangelog/internal/config"
"github.com/jonashiltl/openchangelog/internal/handler/rest"
"github.com/jonashiltl/openchangelog/internal/handler/rss"
"github.com/jonashiltl/openchangelog/internal/handler/web"
"github.com/jonashiltl/openchangelog/internal/store"
"github.com/jonashiltl/openchangelog/parse"
"github.com/jonashiltl/openchangelog/render"
"github.com/naveensrinivasan/httpcache"
"github.com/naveensrinivasan/httpcache/diskcache"
Expand All @@ -39,8 +40,11 @@ func main() {
os.Exit(1)
}

loader := changelog.NewLoader(cfg, st, cache)

rest.RegisterRestHandler(mux, rest.NewEnv(st))
web.RegisterWebHandler(mux, web.NewEnv(cfg, st, render.New(cfg), parse.NewParser(), cache))
web.RegisterWebHandler(mux, web.NewEnv(cfg, loader, render.New(cfg)))
rss.RegisterRSSHandler(mux, rss.NewEnv(cfg, loader))

fmt.Printf("Starting server at http://%s\n", cfg.Addr)
log.Fatal(http.ListenAndServe(cfg.Addr, mux))
Expand Down
138 changes: 138 additions & 0 deletions internal/changelog/changelog.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
package changelog

import (
"context"

"github.com/jonashiltl/openchangelog/internal/config"
"github.com/jonashiltl/openchangelog/internal/store"
"github.com/naveensrinivasan/httpcache"
)

// Groups multiple ways of loading a changelog. Either from the config, by it's subdomain or workspace.
// After loading the changelog it can easily be parsed.
type Loader struct {
cfg config.Config
store store.Store
cache httpcache.Cache
parser Parser
}

func NewLoader(cfg config.Config, store store.Store, cache httpcache.Cache) *Loader {
return &Loader{
cfg: cfg,
store: store,
cache: cache,
parser: NewParser(),
}
}

func (l *Loader) FromConfig(ctx context.Context, page Pagination) (*LoadedChangelog, error) {
store := store.NewConfigStore(l.cfg)
cl, err := store.GetChangelog(ctx, "", "")
if err != nil {
return nil, err
}

res, err := l.load(ctx, cl, page)
if err != nil {
return nil, err
}

return &LoadedChangelog{
cl: cl,
res: res,
parser: l.parser,
}, nil
}

func (l *Loader) FromSubdomain(ctx context.Context, subdomain string, page Pagination) (*LoadedChangelog, error) {
cl, err := l.store.GetChangelogBySubdomain(ctx, subdomain)
if err != nil {
return nil, err
}

res, err := l.load(ctx, cl, page)
if err != nil {
return nil, err
}

return &LoadedChangelog{
cl: cl,
res: res,
parser: l.parser,
}, nil
}

func (l *Loader) FromWorkspace(ctx context.Context, wID, cID string, page Pagination) (*LoadedChangelog, error) {
parsedWID, err := store.ParseWID(wID)
if err != nil {
return nil, err
}

parsedCID, err := store.ParseCID(cID)
if err != nil {
return nil, err
}
cl, err := l.store.GetChangelog(ctx, parsedWID, parsedCID)
if err != nil {
return nil, err
}

res, err := l.load(ctx, cl, page)
if err != nil {
return nil, err
}

return &LoadedChangelog{
cl: cl,
res: res,
parser: l.parser,
}, nil
}

func (l *Loader) load(ctx context.Context, cl store.Changelog, page Pagination) (LoadResult, error) {
var source Source
if cl.LocalSource.Valid {
source = newLocalSourceFromStore(cl.LocalSource.ValueOrZero())
} else if cl.GHSource.Valid {
s, err := newGHSourceFromStore(l.cfg, cl.GHSource.ValueOrZero(), l.cache)
if err != nil {
return LoadResult{}, err
}
source = s
}

if source != nil {
res, err := source.Load(ctx, page)
if err != nil {
return LoadResult{}, err
}
return res, nil
}
return LoadResult{}, nil
}

type LoadedChangelog struct {
cl store.Changelog
res LoadResult
parser Parser
}

type ParsedChangelog struct {
CL store.Changelog
Articles []ParsedArticle
HasMore bool
}

func (c *LoadedChangelog) Parse(ctx context.Context) (ParsedChangelog, error) {
parsed, err := c.parser.Parse(ctx, c.res.Articles)
if err != nil {
return ParsedChangelog{}, err
}

return ParsedChangelog{
CL: c.cl,
Articles: parsed,
HasMore: c.res.HasMore,
}, nil
}
48 changes: 21 additions & 27 deletions internal/github.go → internal/changelog/github.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package internal
package changelog

import (
"context"
Expand All @@ -18,22 +18,15 @@ import (
"github.com/naveensrinivasan/httpcache"
)

type GHSourceArgs struct {
// The account name of the owner of the repository
Owner string
// The repository which holds the markdown files
Repository string
// The path to the root of the directory which holds all markdown files
Path string
AppID int64
AppPrivateKey string
AppInstallationId int64
AccessToken string

Cache httpcache.Cache
type ghSource struct {
client *github.Client
Owner string
Repo string
Path string
InstallationID int64
}

func NewGHSourceFromStore(cfg config.Config, gh store.GHSource, cache httpcache.Cache) (Source, error) {
func newGHSourceFromStore(cfg config.Config, gh store.GHSource, cache httpcache.Cache) (Source, error) {
tr := http.DefaultTransport

if !cfg.HasGithubAuth() {
Expand Down Expand Up @@ -69,17 +62,9 @@ func NewGHSourceFromStore(cfg config.Config, gh store.GHSource, cache httpcache.
}, nil
}

type ghSource struct {
client *github.Client
Owner string
Repo string
Path string
InstallationID int64
}

func (s *ghSource) Load(ctx context.Context, page Pagination) (LoadResult, error) {
// sanitize params
if page.PageSize() < 1 {
if page.IsDefined() && page.PageSize() < 1 {
return LoadResult{}, nil
}

Expand Down Expand Up @@ -109,7 +94,16 @@ func (s *ghSource) loadDir(ctx context.Context, files []*github.RepositoryConten
return filepath.Ext(f.GetName()) == ".md"
})

if page.StartIdx() >= len(files) {
startIdx := page.StartIdx()
endIdx := page.EndIdx()

// If pagination is not applied, process all files
if !page.IsDefined() {
startIdx = 0
endIdx = len(files) - 1
}

if startIdx >= len(files) {
return LoadResult{
Articles: []RawArticle{},
HasMore: false,
Expand All @@ -125,7 +119,7 @@ func (s *ghSource) loadDir(ctx context.Context, files []*github.RepositoryConten
articles := make([]RawArticle, 0, page.PageSize())
mutex := &sync.Mutex{}

for i := page.StartIdx(); i <= page.EndIdx() && i < len(files); i++ {
for i := startIdx; i <= endIdx && i < len(files); i++ {
wg.Add(1)
go func(name string) {
defer wg.Done()
Expand All @@ -144,7 +138,7 @@ func (s *ghSource) loadDir(ctx context.Context, files []*github.RepositoryConten

return LoadResult{
Articles: articles,
HasMore: page.EndIdx()+1 < len(files),
HasMore: endIdx+1 < len(files),
}, nil
}

Expand Down
17 changes: 9 additions & 8 deletions parse/goldmark.go → internal/changelog/goldmark.go
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
package parse
package changelog

import (
"bytes"
"context"
"fmt"
"io"
"sort"
"sync"

"github.com/jonashiltl/openchangelog/internal"
enclave "github.com/quail-ink/goldmark-enclave"
"github.com/yuin/goldmark"
"github.com/yuin/goldmark/extension"
Expand Down Expand Up @@ -45,14 +45,14 @@ func NewParser() Parser {
}
}

func (g *gmark) Parse(ctx context.Context, raw []internal.RawArticle) (ParseResult, error) {
func (g *gmark) Parse(ctx context.Context, raw []RawArticle) ([]ParsedArticle, error) {
var wg sync.WaitGroup
result := make([]ParsedArticle, 0, len(raw))
mutex := &sync.Mutex{}

for _, a := range raw {
wg.Add(1)
go func(a internal.RawArticle) {
go func(a RawArticle) {
defer wg.Done()
parsed, err := g.parseArticle(a)
if err != nil {
Expand All @@ -69,12 +69,10 @@ func (g *gmark) Parse(ctx context.Context, raw []internal.RawArticle) (ParseResu
return result[i].Meta.PublishedAt.After(result[j].Meta.PublishedAt)
})

return ParseResult{
Articles: result,
}, nil
return result, nil
}

func (g *gmark) parseArticle(raw internal.RawArticle) (ParsedArticle, error) {
func (g *gmark) parseArticle(raw RawArticle) (ParsedArticle, error) {
ctx := parser.NewContext()

defer raw.Content.Close()
Expand All @@ -100,6 +98,9 @@ func (g *gmark) parseArticle(raw internal.RawArticle) (ParsedArticle, error) {
if err != nil {
return ParsedArticle{}, err
}

meta.ID = fmt.Sprint(meta.PublishedAt.Unix())

return ParsedArticle{
Meta: meta,
Content: &target,
Expand Down
33 changes: 21 additions & 12 deletions internal/local.go → internal/changelog/local.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package internal
package changelog

import (
"context"
Expand All @@ -15,15 +15,15 @@ type localSource struct {
path string
}

func NewLocalSourceFromStore(s store.LocalSource) Source {
func newLocalSourceFromStore(s store.LocalSource) Source {
return localSource{
path: s.Path,
}
}

func (s localSource) Load(ctx context.Context, page Pagination) (LoadResult, error) {
// sanitize params
if page.PageSize() < 1 {
if page.IsDefined() && page.PageSize() < 1 {
return LoadResult{}, nil
}

Expand All @@ -39,33 +39,42 @@ func (s localSource) Load(ctx context.Context, page Pagination) (LoadResult, err
}
}

func loadDir(path string, params Pagination) (LoadResult, error) {
func loadDir(path string, page Pagination) (LoadResult, error) {
files, err := os.ReadDir(path)
if err != nil {
return LoadResult{}, err
}

if params.StartIdx() >= len(files) {
files = filter(files, func(f fs.DirEntry) bool {
return filepath.Ext(f.Name()) == ".md"
})

startIdx := page.StartIdx()
endIdx := page.EndIdx()

// If pagination is not applied, process all files
if !page.IsDefined() {
startIdx = 0
endIdx = len(files) - 1
}

if startIdx >= len(files) {
return LoadResult{
Articles: []RawArticle{},
HasMore: false,
}, nil
}

files = filter(files, func(f fs.DirEntry) bool {
return filepath.Ext(f.Name()) == ".md"
})

// sort files in descending order by filename
sort.Slice(files, func(i, j int) bool {
return files[i].Name() >= files[j].Name()
})

var wg sync.WaitGroup
results := make([]RawArticle, 0, params.PageSize())
results := make([]RawArticle, 0, page.PageSize())
mutex := &sync.Mutex{}

for i := params.StartIdx(); i <= params.EndIdx() && i < len(files); i++ {
for i := startIdx; i <= endIdx && i < len(files); i++ {
wg.Add(1)
go func(name string) {
defer wg.Done()
Expand All @@ -85,7 +94,7 @@ func loadDir(path string, params Pagination) (LoadResult, error) {

return LoadResult{
Articles: results,
HasMore: params.EndIdx()+1 < len(files),
HasMore: endIdx+1 < len(files),
}, nil
}

Expand Down
Loading

0 comments on commit cff3cf2

Please sign in to comment.