Fix Task.tasks field type: rename Children []*Task to ChildIDs []int
The Checkvist API returns the 'tasks' field as an array of child task IDs (integers), not as full Task objects. This was causing JSON unmarshal errors. BREAKING CHANGE: Task.Children []*Task renamed to Task.ChildIDs []int
This commit is contained in:
parent
536ccf6ccf
commit
d92a1b90c2
5 changed files with 66 additions and 5 deletions
|
|
@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **BREAKING**: `Task.Children []*Task` renamed to `Task.ChildIDs []int` to match API response format (API returns array of child task IDs, not full task objects)
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
- **Tasks**: Fix `Close()`, `Reopen()`, and `Invalidate()` to handle API array response format
|
- **Tasks**: Fix `Close()`, `Reopen()`, and `Invalidate()` to handle API array response format
|
||||||
|
|
|
||||||
|
|
@ -140,8 +140,8 @@ type Task struct {
|
||||||
UpdatedAt APITime `json:"updated_at"`
|
UpdatedAt APITime `json:"updated_at"`
|
||||||
// CreatedAt is the timestamp when the task was created.
|
// CreatedAt is the timestamp when the task was created.
|
||||||
CreatedAt APITime `json:"created_at"`
|
CreatedAt APITime `json:"created_at"`
|
||||||
// Children contains nested child tasks (when fetched with tree structure).
|
// ChildIDs contains IDs of child tasks (returned by API as array of integers).
|
||||||
Children []*Task `json:"tasks,omitempty"`
|
ChildIDs []int `json:"tasks,omitempty"`
|
||||||
// Notes contains the comments/notes attached to this task.
|
// Notes contains the comments/notes attached to this task.
|
||||||
Notes []Note `json:"notes,omitempty"`
|
Notes []Note `json:"notes,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -73,6 +73,60 @@ func TestTasks_Get(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTask_ChildIDs_Unmarshal(t *testing.T) {
|
||||||
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
switch r.URL.Path {
|
||||||
|
case "/auth/login.json":
|
||||||
|
json.NewEncoder(w).Encode(map[string]string{"token": "test-token"})
|
||||||
|
case "/checklists/1/tasks/101.json":
|
||||||
|
w.Write(loadFixture(t, "testdata/tasks/single.json"))
|
||||||
|
case "/checklists/1/tasks.json":
|
||||||
|
w.Write(loadFixture(t, "testdata/tasks/list.json"))
|
||||||
|
default:
|
||||||
|
t.Errorf("unexpected path: %s", r.URL.Path)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
defer server.Close()
|
||||||
|
|
||||||
|
client := NewClient("user@example.com", "api-key", WithBaseURL(server.URL))
|
||||||
|
|
||||||
|
t.Run("single task with child IDs", func(t *testing.T) {
|
||||||
|
task, err := client.Tasks(1).Get(context.Background(), 101)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
expectedChildIDs := []int{201, 202, 203}
|
||||||
|
if len(task.ChildIDs) != len(expectedChildIDs) {
|
||||||
|
t.Fatalf("expected %d child IDs, got %d", len(expectedChildIDs), len(task.ChildIDs))
|
||||||
|
}
|
||||||
|
for i, id := range expectedChildIDs {
|
||||||
|
if task.ChildIDs[i] != id {
|
||||||
|
t.Errorf("expected ChildIDs[%d] = %d, got %d", i, id, task.ChildIDs[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("list with mixed child IDs", func(t *testing.T) {
|
||||||
|
tasks, err := client.Tasks(1).List(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
// First task has child IDs [201, 202]
|
||||||
|
if len(tasks[0].ChildIDs) != 2 {
|
||||||
|
t.Errorf("expected 2 child IDs for first task, got %d", len(tasks[0].ChildIDs))
|
||||||
|
}
|
||||||
|
if tasks[0].ChildIDs[0] != 201 || tasks[0].ChildIDs[1] != 202 {
|
||||||
|
t.Errorf("expected ChildIDs [201, 202], got %v", tasks[0].ChildIDs)
|
||||||
|
}
|
||||||
|
// Second task has empty child IDs
|
||||||
|
if len(tasks[1].ChildIDs) != 0 {
|
||||||
|
t.Errorf("expected 0 child IDs for second task, got %d", len(tasks[1].ChildIDs))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestTasks_Create(t *testing.T) {
|
func TestTasks_Create(t *testing.T) {
|
||||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
w.Header().Set("Content-Type", "application/json")
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
|
|
||||||
6
testdata/tasks/list.json
vendored
6
testdata/tasks/list.json
vendored
|
|
@ -13,7 +13,8 @@
|
||||||
"comments_count": 0,
|
"comments_count": 0,
|
||||||
"update_line": "",
|
"update_line": "",
|
||||||
"updated_at": "2026/01/14 10:00:00 +0000",
|
"updated_at": "2026/01/14 10:00:00 +0000",
|
||||||
"created_at": "2026/01/10 09:00:00 +0000"
|
"created_at": "2026/01/10 09:00:00 +0000",
|
||||||
|
"tasks": [201, 202]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": 102,
|
"id": 102,
|
||||||
|
|
@ -29,6 +30,7 @@
|
||||||
"comments_count": 3,
|
"comments_count": 3,
|
||||||
"update_line": "",
|
"update_line": "",
|
||||||
"updated_at": "2026/01/14 11:00:00 +0000",
|
"updated_at": "2026/01/14 11:00:00 +0000",
|
||||||
"created_at": "2026/01/11 10:00:00 +0000"
|
"created_at": "2026/01/11 10:00:00 +0000",
|
||||||
|
"tasks": []
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
||||||
3
testdata/tasks/single.json
vendored
3
testdata/tasks/single.json
vendored
|
|
@ -12,5 +12,6 @@
|
||||||
"comments_count": 0,
|
"comments_count": 0,
|
||||||
"update_line": "",
|
"update_line": "",
|
||||||
"updated_at": "2026/01/14 10:00:00 +0000",
|
"updated_at": "2026/01/14 10:00:00 +0000",
|
||||||
"created_at": "2026/01/10 09:00:00 +0000"
|
"created_at": "2026/01/10 09:00:00 +0000",
|
||||||
|
"tasks": [201, 202, 203]
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue