Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Retry up to N times in case of 429 response #34

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Support retries
  • Loading branch information
flimzy committed Dec 20, 2016
commit b37bcddfca32bb45e88c0e7af8c59ce8d04ced27
47 changes: 30 additions & 17 deletions sentry.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,9 @@ type SentryHook struct {
// consider the message correctly sent
Timeout time.Duration
StacktraceConfiguration StackTraceConfiguration
// If Retries is non-zero, packets will be resent in case of a 429 error,
// up to this many times, or until the timeout is reached.
// If Retries is non-zero, packets will be resent in case of a 429 error
// (too many requests) up to this many times, or until the timeout is
// reached.
Retries uint8

client *raven.Client
Expand Down Expand Up @@ -247,29 +248,41 @@ func (hook *SentryHook) Flush() {
}

func (hook *SentryHook) sendPacket(packet *raven.Packet) error {
_, errCh := hook.client.Capture(packet, nil)
cases := []reflect.SelectCase{
reflect.SelectCase{
Dir: reflect.SelectRecv,
Chan: reflect.ValueOf(errCh),
},
}
cases := make([]reflect.SelectCase, 1, 2)
timeout := hook.Timeout
if timeout > 0 {
cases = append(cases, reflect.SelectCase{
Dir: reflect.SelectRecv,
Chan: reflect.ValueOf(time.After(timeout)),
})
}
chosen, recv, _ := reflect.Select(cases)
switch chosen {
case 0:
err, _ := recv.Interface().(error)
return err
case 1:
return fmt.Errorf("no response from sentry server in %s", timeout)
var err error
for i := 0; i < int(hook.Retries)+1; i++ {
_, errCh := hook.client.Capture(packet, nil)
cases[0] = reflect.SelectCase{
Dir: reflect.SelectRecv,
Chan: reflect.ValueOf(errCh),
}

chosen, recv, _ := reflect.Select(cases)
switch chosen {
case 0:
var ok bool
err, ok = recv.Interface().(error)
if !ok {
// Success!
return nil
}
if err.Error() == "raven: got http status 429" { // Too many requests
continue
}
return err
case 1:
return fmt.Errorf("no response from sentry server in %s", timeout)
}
}
return nil
// Retries count exceeded, return the error
return err
}

func (hook *SentryHook) findStacktraceAndCause(err error) (*raven.Stacktrace, error) {
Expand Down
45 changes: 45 additions & 0 deletions sentry_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -450,3 +450,48 @@ func (myStacktracerError) GetStacktrace() *raven.Stacktrace {
},
}
}

func TestRetries(t *testing.T) {
failures := 8
s := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
// This HTTP handler will respond with a 429 3 times, then success,
// ignoring the request otherwise.
defer req.Body.Close()
if failures > 0 {
rw.WriteHeader(429) // Too many requests
failures--
return
}
rw.WriteHeader(http.StatusOK)
}))
defer s.Close()
fragments := strings.SplitN(s.URL, "://", 2)
dsn := fmt.Sprintf(
"%s://public:secret@%s/sentry/project-id",
fragments[0],
fragments[1],
)
logger := getTestLogger()

hook, err := NewSentryHook(dsn, []logrus.Level{
logrus.ErrorLevel,
})

if err != nil {
t.Fatal(err.Error())
}
logger.Hooks.Add(hook)
hook.Retries = 5
if err := hook.Fire(&logrus.Entry{}); err == nil {
t.Errorf("Expected failure")
}
if failures != 2 { // Ensure the failure counter was properly decremented
t.Errorf("Expected failure counter to be 3, got %d", failures)
}
if err := hook.Fire(&logrus.Entry{}); err != nil {
t.Errorf("Expected success, got: %s", err)
}
if failures != 0 { // Ensure the failure counter was properly decremented
t.Errorf("Expected failure counter to be 3, got %d", failures)
}
}