-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcandidates.go
120 lines (96 loc) · 2.88 KB
/
candidates.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
package greenhouseio
import (
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"time"
"github.com/Grayscale-Labs/greenhouse-io-go/models"
)
const (
candidatesURL = baseURL + "/candidates?per_page=10"
)
type CandidatesRequest struct {
client *Client
queryBuilder *strings.Builder
}
// Candidates returns a candidates request builder.
func (c *Client) Candidates() *CandidatesRequest {
return &CandidatesRequest{
client: c,
queryBuilder: &strings.Builder{},
}
}
// Fetch gets a slice of candidates using the built query params.
func (r *CandidatesRequest) Fetch() ([]*models.Candidate, error) {
//nolint:bodyclose // The fetchCandidates function will close the body.
candidates, _, err := r.client.fetchCandidates(candidatesURL + r.queryBuilder.String())
return candidates, err
}
// Stream fetches a page of candidates, outputs the them into the given consumer channel,
// and attempts to fetch the next page.
func (r *CandidatesRequest) Stream(consumer chan *models.Candidate, closeSignal chan error) {
// This gets set to the "next" page URL (if it exists).
currentURL := candidatesURL + r.queryBuilder.String()
closeStream := func(err error) {
closeSignal <- err
close(consumer)
close(closeSignal)
}
for currentURL != "" {
candidates, res, err := r.client.fetchCandidates(currentURL)
if err != nil {
closeStream(err)
break
}
for _, candidate := range candidates {
// Due to the nature of channels, this operation blocks until the consumer is ready to accept another candidate.
consumer <- candidate
}
nextURL := parseNextPageLink(res)
if nextURL == "" {
closeStream(nil)
break
}
currentURL = nextURL
}
}
func (r *CandidatesRequest) CreatedBefore(timestamp time.Time) *CandidatesRequest {
addPrefixToken(r.queryBuilder)
r.queryBuilder.WriteString(fmt.Sprintf(
"created_before=%s",
timestamp.Format(time.RFC3339),
))
return r
}
// fetchCandidates fetches candidates from the given URL.
func (c *Client) fetchCandidates(url string) ([]*models.Candidate, *http.Response, error) {
// Create request with given URL.
req, err := c.generateHTTPRequest("GET", url)
if err != nil {
return nil, nil, fmt.Errorf("generating request: %w", err)
}
// Make request.
res, err := c.httpClient.Do(req)
if err != nil {
return nil, nil, fmt.Errorf("making request: %w", err)
}
// Defer body close.
defer res.Body.Close()
// Error on non-OK status.
if res.StatusCode != http.StatusOK {
return nil, nil, fmt.Errorf("status code %v", res.StatusCode)
}
// Read body into slice of bytes.
data, err := io.ReadAll(res.Body)
if err != nil {
return nil, nil, fmt.Errorf("reading body: %w", err)
}
// Parse bytes as JSON into slice of candidates.
var candidates []*models.Candidate
if err := json.Unmarshal(data, &candidates); err != nil {
return nil, nil, fmt.Errorf("unmarshaling response: %w", err)
}
return candidates, res, nil
}