Skip to content

Commit

Permalink
fix: mark archived envelopes as archived (#984)
Browse files Browse the repository at this point in the history
  • Loading branch information
morremeyer authored Mar 1, 2024
1 parent 40b0f17 commit 1718110
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 5 deletions.
3 changes: 3 additions & 0 deletions api/docs.go
Original file line number Diff line number Diff line change
Expand Up @@ -4608,6 +4608,9 @@ const docTemplate = `{
"v4.RecentEnvelope": {
"type": "object",
"properties": {
"archived": {
"type": "boolean"
},
"id": {
"type": "string"
},
Expand Down
3 changes: 3 additions & 0 deletions api/swagger.json
Original file line number Diff line number Diff line change
Expand Up @@ -4597,6 +4597,9 @@
"v4.RecentEnvelope": {
"type": "object",
"properties": {
"archived": {
"type": "boolean"
},
"id": {
"type": "string"
},
Expand Down
2 changes: 2 additions & 0 deletions api/swagger.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1145,6 +1145,8 @@ definitions:
type: object
v4.RecentEnvelope:
properties:
archived:
type: boolean
id:
type: string
name:
Expand Down
4 changes: 2 additions & 2 deletions pkg/controllers/v4/account.go
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ func GetAccountRecentEnvelopes(c *gin.Context) {
latest := models.DB.
Model(&models.Transaction{}).
Joins("LEFT JOIN envelopes ON envelopes.id = transactions.envelope_id AND envelopes.deleted_at IS NULL").
Select("envelopes.id as e_id, envelopes.name as name, datetime(envelopes.created_at) as created").
Select("envelopes.id as e_id, envelopes.name as name, datetime(envelopes.created_at) as created, envelopes.archived as archived").
Where(&models.Transaction{
DestinationAccountID: account.ID,
}).
Expand All @@ -286,7 +286,7 @@ func GetAccountRecentEnvelopes(c *gin.Context) {
err = models.DB.
Table("(?)", latest).
// Set the nil UUID as ID if the envelope ID is NULL, since count() only counts non-null values
Select("IIF(e_id IS NOT NULL, e_id, NULL) as id, name").
Select("IIF(e_id IS NOT NULL, e_id, NULL) as id, name, archived").
Group("id").
Order("count(IIF(e_id IS NOT NULL, e_id, '0')) DESC"). // Order with a different IIF since NULL is ignored for count
Order("created ASC").
Expand Down
12 changes: 11 additions & 1 deletion pkg/controllers/v4/account_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -608,9 +608,15 @@ func (suite *TestSuiteStandard) TestAccountRecentEnvelopes() {

envelopeIDs := []*uuid.UUID{}
for i := 0; i < 3; i++ {
archived := false
if i%2 == 0 {
archived = true
}

envelope := createTestEnvelope(suite.T(), v4.EnvelopeEditable{
CategoryID: category.Data.ID,
Name: strconv.Itoa(i),
Archived: archived,
})

envelopeIDs = append(envelopeIDs, &envelope.Data.ID)
Expand Down Expand Up @@ -668,10 +674,14 @@ func (suite *TestSuiteStandard) TestAccountRecentEnvelopes() {
// The last envelope needs to be the first in the sort since it
// has been the most common one
suite.Assert().Equal(envelopeIDs[2], data[0].ID)
suite.Assert().Equal(true, data[0].Archived)

// Income is the second one since it appears three times
var nilUUIDPointer *uuid.UUID
suite.Assert().Equal(nilUUIDPointer, data[1].ID)
suite.Assert().Equal(false, data[1].Archived)

// Order for envelopes with the same frequency is undefined
// Order for envelopes with the same frequency is undefined and therefore not tested
// Only one of the two is archived, but since the order is undefined we XOR them
suite.Assert().Equal(true, data[2].Archived != data[3].Archived)
}
5 changes: 3 additions & 2 deletions pkg/controllers/v4/account_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,9 @@ type RecentEnvelopesResponse struct {
}

type RecentEnvelope struct {
Name string `json:"name"`
ID *uuid.UUID `json:"id"`
ID *uuid.UUID `json:"id"`
Name string `json:"name"`
Archived bool `json:"archived"`
}

type AccountComputedRequest struct {
Expand Down

0 comments on commit 1718110

Please sign in to comment.