diff --git a/README.md b/README.md index 1d1856f0..94237266 100644 --- a/README.md +++ b/README.md @@ -762,6 +762,7 @@ gog drive copy "Copy Name" # Upload and download gog drive upload ./path/to/file --parent +gog drive upload ./path/to/report.docx --convert gog drive download --out ./downloaded.bin gog drive download --format pdf --out ./exported.pdf gog drive download --format docx --out ./doc.docx diff --git a/docs/spec.md b/docs/spec.md index 589bfe03..f7e255d0 100644 --- a/docs/spec.md +++ b/docs/spec.md @@ -169,7 +169,7 @@ Flag aliases: - `gog drive search [--max N] [--page TOKEN]` - `gog drive get ` - `gog drive download [--out PATH]` -- `gog drive upload [--name N] [--parent ID]` +- `gog drive upload [--name N] [--parent ID] [--convert]` - `gog drive mkdir [--parent ID]` - `gog drive delete ` - `gog drive move --parent ID` diff --git a/internal/cmd/docs.go b/internal/cmd/docs.go index 1adac196..2446049a 100644 --- a/internal/cmd/docs.go +++ b/internal/cmd/docs.go @@ -10,10 +10,12 @@ import ( "os" "strings" + "github.com/alecthomas/kong" "google.golang.org/api/docs/v1" "google.golang.org/api/drive/v3" gapi "google.golang.org/api/googleapi" + "github.com/steipete/gogcli/internal/config" "github.com/steipete/gogcli/internal/googleapi" "github.com/steipete/gogcli/internal/outfmt" "github.com/steipete/gogcli/internal/ui" @@ -26,6 +28,8 @@ type DocsCmd struct { Info DocsInfoCmd `cmd:"" name:"info" help:"Get Google Doc metadata"` Create DocsCreateCmd `cmd:"" name:"create" help:"Create a Google Doc"` Copy DocsCopyCmd `cmd:"" name:"copy" help:"Copy a Google Doc"` + Write DocsWriteCmd `cmd:"" name:"write" help:"Write content to a Google Doc"` + Update DocsUpdateCmd `cmd:"" name:"update" help:"Insert text at a specific index in a Google Doc"` Cat DocsCatCmd `cmd:"" name:"cat" help:"Print a Google Doc as plain text"` } @@ -177,6 +181,214 @@ func (c *DocsCopyCmd) Run(ctx context.Context, flags *RootFlags) error { }, c.DocID, c.Title, c.Parent) } +type DocsWriteCmd struct { + DocID string `arg:"" name:"docId" help:"Doc ID"` + Text string `name:"text" help:"Text to write"` + File string `name:"file" help:"Text file path ('-' for stdin)"` + Append bool `name:"append" help:"Append instead of replacing the document body"` +} + +func (c *DocsWriteCmd) Run(ctx context.Context, kctx *kong.Context, flags *RootFlags) error { + u := ui.FromContext(ctx) + account, err := requireAccount(flags) + if err != nil { + return err + } + + id := strings.TrimSpace(c.DocID) + if id == "" { + return usage("empty docId") + } + + text, provided, err := resolveTextInput(c.Text, c.File, kctx, "text", "file") + if err != nil { + return err + } + if !provided { + return usage("required: --text or --file") + } + if text == "" { + return usage("empty text") + } + + svc, err := newDocsService(ctx, account) + if err != nil { + return err + } + + doc, err := svc.Documents.Get(id). + Fields("documentId,body/content(startIndex,endIndex)"). + Context(ctx). + Do() + if err != nil { + if isDocsNotFound(err) { + return fmt.Errorf("doc not found or not a Google Doc (id=%s)", id) + } + return err + } + if doc == nil { + return errors.New("doc not found") + } + + endIndex := docsDocumentEndIndex(doc) + insertIndex := int64(1) + if c.Append { + insertIndex = docsAppendIndex(endIndex) + } + + reqs := []*docs.Request{} + if !c.Append { + deleteEnd := endIndex - 1 + if deleteEnd > 1 { + reqs = append(reqs, &docs.Request{ + DeleteContentRange: &docs.DeleteContentRangeRequest{ + Range: &docs.Range{ + StartIndex: 1, + EndIndex: deleteEnd, + }, + }, + }) + } + } + + reqs = append(reqs, &docs.Request{ + InsertText: &docs.InsertTextRequest{ + Location: &docs.Location{Index: insertIndex}, + Text: text, + }, + }) + + resp, err := svc.Documents.BatchUpdate(id, &docs.BatchUpdateDocumentRequest{Requests: reqs}). + Context(ctx). + Do() + if err != nil { + if isDocsNotFound(err) { + return fmt.Errorf("doc not found or not a Google Doc (id=%s)", id) + } + return err + } + + if outfmt.IsJSON(ctx) { + payload := map[string]any{ + "documentId": resp.DocumentId, + "requests": len(reqs), + "append": c.Append, + "index": insertIndex, + } + if resp.WriteControl != nil { + payload["writeControl"] = resp.WriteControl + } + return outfmt.WriteJSON(os.Stdout, payload) + } + + u.Out().Printf("id\t%s", resp.DocumentId) + u.Out().Printf("requests\t%d", len(reqs)) + u.Out().Printf("append\t%t", c.Append) + u.Out().Printf("index\t%d", insertIndex) + if resp.WriteControl != nil && resp.WriteControl.RequiredRevisionId != "" { + u.Out().Printf("revision\t%s", resp.WriteControl.RequiredRevisionId) + } + return nil +} + +type DocsUpdateCmd struct { + DocID string `arg:"" name:"docId" help:"Doc ID"` + Text string `name:"text" help:"Text to insert"` + File string `name:"file" help:"Text file path ('-' for stdin)"` + Index int64 `name:"index" help:"Insert index (default: end of document)"` +} + +func (c *DocsUpdateCmd) Run(ctx context.Context, kctx *kong.Context, flags *RootFlags) error { + u := ui.FromContext(ctx) + account, err := requireAccount(flags) + if err != nil { + return err + } + + id := strings.TrimSpace(c.DocID) + if id == "" { + return usage("empty docId") + } + + text, provided, err := resolveTextInput(c.Text, c.File, kctx, "text", "file") + if err != nil { + return err + } + if !provided { + return usage("required: --text or --file") + } + if text == "" { + return usage("empty text") + } + + if flagProvided(kctx, "index") && c.Index <= 0 { + return usage("invalid --index (must be >= 1)") + } + + svc, err := newDocsService(ctx, account) + if err != nil { + return err + } + + insertIndex := c.Index + if insertIndex <= 0 { + var doc *docs.Document + doc, err = svc.Documents.Get(id). + Fields("documentId,body/content(startIndex,endIndex)"). + Context(ctx). + Do() + if err != nil { + if isDocsNotFound(err) { + return fmt.Errorf("doc not found or not a Google Doc (id=%s)", id) + } + return err + } + if doc == nil { + return errors.New("doc not found") + } + insertIndex = docsAppendIndex(docsDocumentEndIndex(doc)) + } + + reqs := []*docs.Request{ + { + InsertText: &docs.InsertTextRequest{ + Location: &docs.Location{Index: insertIndex}, + Text: text, + }, + }, + } + + resp, err := svc.Documents.BatchUpdate(id, &docs.BatchUpdateDocumentRequest{Requests: reqs}). + Context(ctx). + Do() + if err != nil { + if isDocsNotFound(err) { + return fmt.Errorf("doc not found or not a Google Doc (id=%s)", id) + } + return err + } + + if outfmt.IsJSON(ctx) { + payload := map[string]any{ + "documentId": resp.DocumentId, + "requests": len(reqs), + "index": insertIndex, + } + if resp.WriteControl != nil { + payload["writeControl"] = resp.WriteControl + } + return outfmt.WriteJSON(os.Stdout, payload) + } + + u.Out().Printf("id\t%s", resp.DocumentId) + u.Out().Printf("requests\t%d", len(reqs)) + u.Out().Printf("index\t%d", insertIndex) + if resp.WriteControl != nil && resp.WriteControl.RequiredRevisionId != "" { + u.Out().Printf("revision\t%s", resp.WriteControl.RequiredRevisionId) + } + return nil +} + type DocsCatCmd struct { DocID string `arg:"" name:"docId" help:"Doc ID"` MaxBytes int64 `name:"max-bytes" help:"Max bytes to read (0 = unlimited)" default:"2000000"` @@ -307,6 +519,60 @@ func appendLimited(buf *bytes.Buffer, maxBytes int64, s string) bool { return true } +func resolveTextInput(text, file string, kctx *kong.Context, textFlag, fileFlag string) (string, bool, error) { + file = strings.TrimSpace(file) + textProvided := text != "" || flagProvided(kctx, textFlag) + fileProvided := file != "" || flagProvided(kctx, fileFlag) + if textProvided && fileProvided { + return "", true, usage(fmt.Sprintf("use only one of --%s or --%s", textFlag, fileFlag)) + } + if fileProvided { + b, err := readTextInput(file) + if err != nil { + return "", true, err + } + return string(b), true, nil + } + if textProvided { + return text, true, nil + } + return text, false, nil +} + +func readTextInput(path string) ([]byte, error) { + if path == "-" { + return io.ReadAll(os.Stdin) + } + expanded, err := config.ExpandPath(path) + if err != nil { + return nil, err + } + return os.ReadFile(expanded) //nolint:gosec // user-provided path +} + +func docsDocumentEndIndex(doc *docs.Document) int64 { + if doc == nil || doc.Body == nil { + return 1 + } + end := int64(1) + for _, el := range doc.Body.Content { + if el == nil { + continue + } + if el.EndIndex > end { + end = el.EndIndex + } + } + return end +} + +func docsAppendIndex(endIndex int64) int64 { + if endIndex > 1 { + return endIndex - 1 + } + return 1 +} + func isDocsNotFound(err error) bool { var apiErr *gapi.Error if !errors.As(err, &apiErr) { diff --git a/internal/cmd/docs_validation_more_test.go b/internal/cmd/docs_validation_more_test.go index d9d7cf8b..2da8431f 100644 --- a/internal/cmd/docs_validation_more_test.go +++ b/internal/cmd/docs_validation_more_test.go @@ -9,6 +9,7 @@ import ( "strings" "testing" + "github.com/alecthomas/kong" "google.golang.org/api/docs/v1" "google.golang.org/api/option" @@ -16,6 +17,20 @@ import ( "github.com/steipete/gogcli/internal/ui" ) +func parseDocsKong(t *testing.T, cmd any, args []string) *kong.Context { + t.Helper() + + parser, err := kong.New(cmd) + if err != nil { + t.Fatalf("kong new: %v", err) + } + kctx, err := parser.Parse(args) + if err != nil { + t.Fatalf("kong parse: %v", err) + } + return kctx +} + func TestDocsInfo_ValidationAndText(t *testing.T) { u, uiErr := ui.New(ui.Options{Stdout: io.Discard, Stderr: io.Discard, Color: "never"}) if uiErr != nil { @@ -86,6 +101,28 @@ func TestDocsCreateCat_ValidationErrors(t *testing.T) { } } +func TestDocsWriteUpdate_ValidationErrors(t *testing.T) { + u, uiErr := ui.New(ui.Options{Stdout: io.Discard, Stderr: io.Discard, Color: "never"}) + if uiErr != nil { + t.Fatalf("ui.New: %v", uiErr) + } + ctx := ui.WithUI(context.Background(), u) + flags := &RootFlags{Account: "a@b.com"} + + if err := (&DocsWriteCmd{}).Run(ctx, nil, flags); err == nil { + t.Fatalf("expected missing docId error") + } + if err := (&DocsWriteCmd{DocID: "doc1"}).Run(ctx, nil, flags); err == nil { + t.Fatalf("expected missing text error") + } + if err := (&DocsUpdateCmd{}).Run(ctx, nil, flags); err == nil { + t.Fatalf("expected missing docId error") + } + if err := (&DocsUpdateCmd{DocID: "doc1"}).Run(ctx, nil, flags); err == nil { + t.Fatalf("expected missing text error") + } +} + func TestDocsCat_JSON_EmptyDoc(t *testing.T) { origNew := newDocsService t.Cleanup(func() { newDocsService = origNew }) @@ -129,3 +166,34 @@ func TestDocsCat_JSON_EmptyDoc(t *testing.T) { t.Fatalf("unexpected json: %q", out) } } + +func TestDocsUpdate_InvalidIndex(t *testing.T) { + u, uiErr := ui.New(ui.Options{Stdout: io.Discard, Stderr: io.Discard, Color: "never"}) + if uiErr != nil { + t.Fatalf("ui.New: %v", uiErr) + } + ctx := ui.WithUI(context.Background(), u) + flags := &RootFlags{Account: "a@b.com"} + + tests := []struct { + name string + args []string + }{ + {"zero index", []string{"doc1", "--text", "hello", "--index", "0"}}, + {"negative index", []string{"doc1", "--text", "hello", "--index=-1"}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cmd := &DocsUpdateCmd{} + kctx := parseDocsKong(t, cmd, tt.args) + err := cmd.Run(ctx, kctx, flags) + if err == nil { + t.Fatalf("expected invalid --index error for %s", tt.name) + } + if !strings.Contains(err.Error(), "invalid --index") { + t.Fatalf("expected 'invalid --index' error, got: %v", err) + } + }) + } +} diff --git a/internal/cmd/docs_write_update_test.go b/internal/cmd/docs_write_update_test.go new file mode 100644 index 00000000..587584a1 --- /dev/null +++ b/internal/cmd/docs_write_update_test.go @@ -0,0 +1,329 @@ +package cmd + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "strings" + "testing" + + "google.golang.org/api/docs/v1" + "google.golang.org/api/option" + + "github.com/steipete/gogcli/internal/outfmt" + "github.com/steipete/gogcli/internal/ui" +) + +func TestDocsWriteUpdate_JSON(t *testing.T) { + origDocs := newDocsService + t.Cleanup(func() { newDocsService = origDocs }) + + var batchRequests [][]*docs.Request + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + path := r.URL.Path + switch { + case r.Method == http.MethodPost && strings.Contains(path, ":batchUpdate"): + var req docs.BatchUpdateDocumentRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + t.Fatalf("decode request: %v", err) + } + batchRequests = append(batchRequests, req.Requests) + id := strings.TrimSuffix(strings.TrimPrefix(path, "/v1/documents/"), ":batchUpdate") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]any{"documentId": id}) + return + case r.Method == http.MethodGet && strings.HasPrefix(path, "/v1/documents/"): + id := strings.TrimPrefix(path, "/v1/documents/") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]any{ + "documentId": id, + "body": map[string]any{ + "content": []any{ + map[string]any{"startIndex": 1, "endIndex": 12}, + }, + }, + }) + return + default: + http.NotFound(w, r) + return + } + })) + defer srv.Close() + + docSvc, err := docs.NewService(context.Background(), + option.WithoutAuthentication(), + option.WithHTTPClient(srv.Client()), + option.WithEndpoint(srv.URL+"/"), + ) + if err != nil { + t.Fatalf("NewDocsService: %v", err) + } + newDocsService = func(context.Context, string) (*docs.Service, error) { return docSvc, nil } + + flags := &RootFlags{Account: "a@b.com"} + u, uiErr := ui.New(ui.Options{Stdout: io.Discard, Stderr: io.Discard, Color: "never"}) + if uiErr != nil { + t.Fatalf("ui.New: %v", uiErr) + } + ctx := outfmt.WithMode(ui.WithUI(context.Background(), u), outfmt.Mode{JSON: true}) + + if err := runKong(t, &DocsWriteCmd{}, []string{"doc1", "--text", "hello"}, ctx, flags); err != nil { + t.Fatalf("write: %v", err) + } + if len(batchRequests) != 1 { + t.Fatalf("expected 1 batch request, got %d", len(batchRequests)) + } + if got := batchRequests[0]; len(got) != 2 || got[0].DeleteContentRange == nil || got[1].InsertText == nil { + t.Fatalf("unexpected write requests: %#v", got) + } + if got := batchRequests[0][0].DeleteContentRange.Range; got.StartIndex != 1 || got.EndIndex != 11 { + t.Fatalf("unexpected delete range: %#v", got) + } + if got := batchRequests[0][1].InsertText; got.Location.Index != 1 || got.Text != "hello" { + t.Fatalf("unexpected insert: %#v", got) + } + + if err := runKong(t, &DocsWriteCmd{}, []string{"doc1", "--text", "world", "--append"}, ctx, flags); err != nil { + t.Fatalf("write append: %v", err) + } + if len(batchRequests) != 2 { + t.Fatalf("expected 2 batch requests, got %d", len(batchRequests)) + } + if got := batchRequests[1]; len(got) != 1 || got[0].InsertText == nil { + t.Fatalf("unexpected append requests: %#v", got) + } + if got := batchRequests[1][0].InsertText; got.Location.Index != 11 || got.Text != "world" { + t.Fatalf("unexpected append insert: %#v", got) + } + + if err := runKong(t, &DocsUpdateCmd{}, []string{"doc1", "--text", "!"}, ctx, flags); err != nil { + t.Fatalf("update: %v", err) + } + if len(batchRequests) != 3 { + t.Fatalf("expected 3 batch requests, got %d", len(batchRequests)) + } + if got := batchRequests[2]; len(got) != 1 || got[0].InsertText == nil { + t.Fatalf("unexpected update requests: %#v", got) + } + if got := batchRequests[2][0].InsertText; got.Location.Index != 11 || got.Text != "!" { + t.Fatalf("unexpected update insert: %#v", got) + } + + if err := runKong(t, &DocsUpdateCmd{}, []string{"doc1", "--text", "?", "--index", "5"}, ctx, flags); err != nil { + t.Fatalf("update index: %v", err) + } + if len(batchRequests) != 4 { + t.Fatalf("expected 4 batch requests, got %d", len(batchRequests)) + } + if got := batchRequests[3]; len(got) != 1 || got[0].InsertText == nil { + t.Fatalf("unexpected update index requests: %#v", got) + } + if got := batchRequests[3][0].InsertText; got.Location.Index != 5 || got.Text != "?" { + t.Fatalf("unexpected update index insert: %#v", got) + } +} + +func TestDocsWriteUpdate_FileInput(t *testing.T) { + origDocs := newDocsService + t.Cleanup(func() { newDocsService = origDocs }) + + var batchRequests [][]*docs.Request + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + path := r.URL.Path + switch { + case r.Method == http.MethodPost && strings.Contains(path, ":batchUpdate"): + var req docs.BatchUpdateDocumentRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + t.Fatalf("decode request: %v", err) + } + batchRequests = append(batchRequests, req.Requests) + id := strings.TrimSuffix(strings.TrimPrefix(path, "/v1/documents/"), ":batchUpdate") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]any{"documentId": id}) + return + case r.Method == http.MethodGet && strings.HasPrefix(path, "/v1/documents/"): + id := strings.TrimPrefix(path, "/v1/documents/") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]any{ + "documentId": id, + "body": map[string]any{ + "content": []any{ + map[string]any{"startIndex": 1, "endIndex": 12}, + }, + }, + }) + return + default: + http.NotFound(w, r) + return + } + })) + defer srv.Close() + + docSvc, err := docs.NewService(context.Background(), + option.WithoutAuthentication(), + option.WithHTTPClient(srv.Client()), + option.WithEndpoint(srv.URL+"/"), + ) + if err != nil { + t.Fatalf("NewDocsService: %v", err) + } + newDocsService = func(context.Context, string) (*docs.Service, error) { return docSvc, nil } + + flags := &RootFlags{Account: "a@b.com"} + u, uiErr := ui.New(ui.Options{Stdout: io.Discard, Stderr: io.Discard, Color: "never"}) + if uiErr != nil { + t.Fatalf("ui.New: %v", uiErr) + } + ctx := outfmt.WithMode(ui.WithUI(context.Background(), u), outfmt.Mode{JSON: true}) + + // Create a temp file for testing --file input + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "test-input.txt") + if err := os.WriteFile(tmpFile, []byte("file content"), 0o600); err != nil { + t.Fatalf("write temp file: %v", err) + } + + // Test DocsWriteCmd with --file + if err := runKong(t, &DocsWriteCmd{}, []string{"doc1", "--file", tmpFile}, ctx, flags); err != nil { + t.Fatalf("write with file: %v", err) + } + if len(batchRequests) != 1 { + t.Fatalf("expected 1 batch request, got %d", len(batchRequests)) + } + if got := batchRequests[0]; len(got) != 2 || got[0].DeleteContentRange == nil || got[1].InsertText == nil { + t.Fatalf("unexpected write requests: %#v", got) + } + if got := batchRequests[0][1].InsertText; got.Location.Index != 1 || got.Text != "file content" { + t.Fatalf("unexpected insert from file: got Text=%q, want %q", got.Text, "file content") + } + + // Create another temp file for update test + updateFile := filepath.Join(tmpDir, "update-input.txt") + if err := os.WriteFile(updateFile, []byte("updated text"), 0o600); err != nil { + t.Fatalf("write update temp file: %v", err) + } + + // Test DocsUpdateCmd with --file + if err := runKong(t, &DocsUpdateCmd{}, []string{"doc1", "--file", updateFile}, ctx, flags); err != nil { + t.Fatalf("update with file: %v", err) + } + if len(batchRequests) != 2 { + t.Fatalf("expected 2 batch requests, got %d", len(batchRequests)) + } + if got := batchRequests[1]; len(got) != 1 || got[0].InsertText == nil { + t.Fatalf("unexpected update requests: %#v", got) + } + if got := batchRequests[1][0].InsertText; got.Location.Index != 11 || got.Text != "updated text" { + t.Fatalf("unexpected update insert from file: got Text=%q at index %d, want %q at index 11", + got.Text, got.Location.Index, "updated text") + } + + // Test DocsWriteCmd with --file and --append + appendFile := filepath.Join(tmpDir, "append-input.txt") + if err := os.WriteFile(appendFile, []byte("appended"), 0o600); err != nil { + t.Fatalf("write append temp file: %v", err) + } + if err := runKong(t, &DocsWriteCmd{}, []string{"doc1", "--file", appendFile, "--append"}, ctx, flags); err != nil { + t.Fatalf("write append with file: %v", err) + } + if len(batchRequests) != 3 { + t.Fatalf("expected 3 batch requests, got %d", len(batchRequests)) + } + if got := batchRequests[2]; len(got) != 1 || got[0].InsertText == nil { + t.Fatalf("unexpected append requests: %#v", got) + } + if got := batchRequests[2][0].InsertText; got.Location.Index != 11 || got.Text != "appended" { + t.Fatalf("unexpected append insert from file: got Text=%q at index %d, want %q at index 11", + got.Text, got.Location.Index, "appended") + } + + // Test DocsUpdateCmd with --file and --index + indexFile := filepath.Join(tmpDir, "index-input.txt") + if err := os.WriteFile(indexFile, []byte("at index 5"), 0o600); err != nil { + t.Fatalf("write index temp file: %v", err) + } + if err := runKong(t, &DocsUpdateCmd{}, []string{"doc1", "--file", indexFile, "--index", "5"}, ctx, flags); err != nil { + t.Fatalf("update with file and index: %v", err) + } + if len(batchRequests) != 4 { + t.Fatalf("expected 4 batch requests, got %d", len(batchRequests)) + } + if got := batchRequests[3]; len(got) != 1 || got[0].InsertText == nil { + t.Fatalf("unexpected update index requests: %#v", got) + } + if got := batchRequests[3][0].InsertText; got.Location.Index != 5 || got.Text != "at index 5" { + t.Fatalf("unexpected update index insert from file: got Text=%q at index %d, want %q at index 5", + got.Text, got.Location.Index, "at index 5") + } +} + +func TestDocsWriteUpdate_FileInputErrors(t *testing.T) { + origDocs := newDocsService + t.Cleanup(func() { newDocsService = origDocs }) + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.NotFound(w, r) + })) + defer srv.Close() + + docSvc, err := docs.NewService(context.Background(), + option.WithoutAuthentication(), + option.WithHTTPClient(srv.Client()), + option.WithEndpoint(srv.URL+"/"), + ) + if err != nil { + t.Fatalf("NewDocsService: %v", err) + } + newDocsService = func(context.Context, string) (*docs.Service, error) { return docSvc, nil } + + flags := &RootFlags{Account: "a@b.com"} + u, uiErr := ui.New(ui.Options{Stdout: io.Discard, Stderr: io.Discard, Color: "never"}) + if uiErr != nil { + t.Fatalf("ui.New: %v", uiErr) + } + ctx := outfmt.WithMode(ui.WithUI(context.Background(), u), outfmt.Mode{JSON: true}) + + // Test with non-existent file + err = runKong(t, &DocsWriteCmd{}, []string{"doc1", "--file", "/nonexistent/path/file.txt"}, ctx, flags) + if err == nil { + t.Fatal("expected error for non-existent file, got nil") + } + if !strings.Contains(err.Error(), "no such file") { + t.Fatalf("expected 'no such file' error, got: %v", err) + } + + // Test with empty file + tmpDir := t.TempDir() + emptyFile := filepath.Join(tmpDir, "empty.txt") + if writeErr := os.WriteFile(emptyFile, []byte(""), 0o600); writeErr != nil { + t.Fatalf("write empty temp file: %v", writeErr) + } + err = runKong(t, &DocsWriteCmd{}, []string{"doc1", "--file", emptyFile}, ctx, flags) + if err == nil { + t.Fatal("expected error for empty file, got nil") + } + if !strings.Contains(err.Error(), "empty text") { + t.Fatalf("expected 'empty text' error, got: %v", err) + } + + // Test that --text and --file are mutually exclusive + testFile := filepath.Join(tmpDir, "test.txt") + if writeErr := os.WriteFile(testFile, []byte("content"), 0o600); writeErr != nil { + t.Fatalf("write test temp file: %v", writeErr) + } + err = runKong(t, &DocsWriteCmd{}, []string{"doc1", "--text", "hello", "--file", testFile}, ctx, flags) + if err == nil { + t.Fatal("expected error for both --text and --file, got nil") + } + if !strings.Contains(err.Error(), "use only one of --text or --file") { + t.Fatalf("expected mutual exclusion error, got: %v", err) + } +} diff --git a/internal/cmd/drive.go b/internal/cmd/drive.go index 2adac673..845e3bae 100644 --- a/internal/cmd/drive.go +++ b/internal/cmd/drive.go @@ -40,6 +40,7 @@ const ( extPptx = ".pptx" extPNG = ".png" extTXT = ".txt" + formatAuto = "auto" ) type DriveCmd struct { @@ -87,11 +88,13 @@ func (c *DriveLsCmd) Run(ctx context.Context, flags *RootFlags) error { q := buildDriveListQuery(folderID, c.Query) + // Include files from shared drives, not just personal "My Drive" resp, err := svc.Files.List(). Q(q). PageSize(c.Max). PageToken(c.Page). OrderBy("modifiedTime desc"). + Corpora("allDrives"). SupportsAllDrives(true). IncludeItemsFromAllDrives(true). Fields("nextPageToken, files(id, name, mimeType, size, modifiedTime, parents, webViewLink)"). @@ -158,6 +161,7 @@ func (c *DriveSearchCmd) Run(ctx context.Context, flags *RootFlags) error { PageSize(c.Max). PageToken(c.Page). OrderBy("modifiedTime desc"). + Corpora("allDrives"). SupportsAllDrives(true). IncludeItemsFromAllDrives(true). Fields("nextPageToken, files(id, name, mimeType, size, modifiedTime, parents, webViewLink)"). @@ -319,6 +323,7 @@ type DriveUploadCmd struct { LocalPath string `arg:"" name:"localPath" help:"Path to local file"` Name string `name:"name" help:"Override filename"` Parent string `name:"parent" help:"Destination folder ID"` + Convert bool `name:"convert" help:"Convert supported uploads to Google Workspace formats"` } func (c *DriveUploadCmd) Run(ctx context.Context, flags *RootFlags) error { @@ -358,6 +363,13 @@ func (c *DriveUploadCmd) Run(ctx context.Context, flags *RootFlags) error { if parent != "" { meta.Parents = []string{parent} } + if c.Convert { + convertMimeType, convertErr := driveUploadConvertMimeType(localPath) + if convertErr != nil { + return convertErr + } + meta.MimeType = convertMimeType + } mimeType := guessMimeType(localPath) created, err := svc.Files.Create(meta). @@ -899,8 +911,34 @@ func guessMimeType(path string) string { } } +func driveUploadConvertMimeType(path string) (string, error) { + ext := strings.ToLower(filepath.Ext(path)) + switch ext { + case ".doc", extDocx: + return driveMimeGoogleDoc, nil + case ".xls", extXlsx, extCSV: + return driveMimeGoogleSheet, nil + case ".ppt", extPptx: + return driveMimeGoogleSlides, nil + default: + supported := "supported: .doc, .docx, .xls, .xlsx, .csv, .ppt, .pptx" + if ext == "" { + return "", fmt.Errorf("unsupported --convert for files without extension (%s)", supported) + } + return "", fmt.Errorf("unsupported --convert for %q (%s)", ext, supported) + } +} + func downloadDriveFile(ctx context.Context, svc *drive.Service, meta *drive.File, destPath string, format string) (string, int64, error) { isGoogleDoc := strings.HasPrefix(meta.MimeType, "application/vnd.google-apps.") + normalizedFormat := strings.ToLower(strings.TrimSpace(format)) + if normalizedFormat == formatAuto { + normalizedFormat = "" + } + + if !isGoogleDoc && normalizedFormat != "" { + return "", 0, fmt.Errorf("--format %q not supported for non-Google Workspace files (mimeType=%q); file can only be downloaded as-is", format, meta.MimeType) + } var ( resp *http.Response @@ -910,11 +948,11 @@ func downloadDriveFile(ctx context.Context, svc *drive.Service, meta *drive.File if isGoogleDoc { var exportMimeType string - if strings.TrimSpace(format) == "" { + if normalizedFormat == "" { exportMimeType = driveExportMimeType(meta.MimeType) } else { var mimeErr error - exportMimeType, mimeErr = driveExportMimeTypeForFormat(meta.MimeType, format) + exportMimeType, mimeErr = driveExportMimeTypeForFormat(meta.MimeType, normalizedFormat) if mimeErr != nil { return "", 0, mimeErr } @@ -978,7 +1016,7 @@ func driveExportMimeType(googleMimeType string) string { func driveExportMimeTypeForFormat(googleMimeType string, format string) (string, error) { format = strings.ToLower(strings.TrimSpace(format)) - if format == "" { + if format == "" || format == formatAuto { return driveExportMimeType(googleMimeType), nil } diff --git a/internal/cmd/drive_commands_more_test.go b/internal/cmd/drive_commands_more_test.go index c367cfcd..2be05db2 100644 --- a/internal/cmd/drive_commands_more_test.go +++ b/internal/cmd/drive_commands_more_test.go @@ -26,6 +26,11 @@ func TestDriveCommands_MoreCoverage(t *testing.T) { switch { case r.Method == http.MethodGet && path == "/files": q := r.URL.Query().Get("q") + if strings.Contains(q, "fullText contains") { + if got := r.URL.Query().Get("corpora"); got != "allDrives" { + t.Fatalf("expected corpora=allDrives, got: %q", r.URL.RawQuery) + } + } if strings.Contains(q, "empty") { _ = json.NewEncoder(w).Encode(map[string]any{ "files": []map[string]any{}, diff --git a/internal/cmd/drive_download_test.go b/internal/cmd/drive_download_test.go index e53e0aeb..2e06ecd5 100644 --- a/internal/cmd/drive_download_test.go +++ b/internal/cmd/drive_download_test.go @@ -57,6 +57,36 @@ func TestDownloadDriveFile_NonGoogleDoc(t *testing.T) { } } +func TestDownloadDriveFile_NonGoogleDocFormatRejected(t *testing.T) { + origDownload := driveDownload + t.Cleanup(func() { driveDownload = origDownload }) + + called := false + driveDownload = func(context.Context, *drive.Service, string) (*http.Response, error) { + called = true + return &http.Response{ + Status: "200 OK", + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader("ok")), + }, nil + } + + dest := filepath.Join(t.TempDir(), "file.html") + _, _, err := downloadDriveFile(context.Background(), &drive.Service{}, &drive.File{Id: "id1", MimeType: "application/pdf"}, dest, "html") + if err == nil { + t.Fatalf("expected error") + } + if !strings.Contains(err.Error(), "non-Google Workspace") { + t.Fatalf("unexpected error: %v", err) + } + if called { + t.Fatalf("download should not be called on format error") + } + if _, statErr := os.Stat(dest); !os.IsNotExist(statErr) { + t.Fatalf("expected no file written, stat=%v", statErr) + } +} + func TestDownloadDriveFile_GoogleDocExport(t *testing.T) { body := "exported" srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { diff --git a/internal/cmd/drive_export_format_test.go b/internal/cmd/drive_export_format_test.go index 6957dc1d..280d251b 100644 --- a/internal/cmd/drive_export_format_test.go +++ b/internal/cmd/drive_export_format_test.go @@ -28,6 +28,12 @@ func TestDriveExportMimeTypeForFormat(t *testing.T) { format: "", wantMime: "application/pdf", }, + { + name: "doc_auto", + googleMime: "application/vnd.google-apps.document", + format: "auto", + wantMime: "application/pdf", + }, { name: "doc_pdf", googleMime: "application/vnd.google-apps.document", @@ -59,6 +65,12 @@ func TestDriveExportMimeTypeForFormat(t *testing.T) { format: "", wantMime: "text/csv", }, + { + name: "sheet_auto", + googleMime: "application/vnd.google-apps.spreadsheet", + format: "auto", + wantMime: "text/csv", + }, { name: "sheet_pdf", googleMime: "application/vnd.google-apps.spreadsheet", diff --git a/internal/cmd/drive_helpers_test.go b/internal/cmd/drive_helpers_test.go index 01c7830b..16636cb9 100644 --- a/internal/cmd/drive_helpers_test.go +++ b/internal/cmd/drive_helpers_test.go @@ -96,3 +96,32 @@ func TestGuessMimeTypeMore(t *testing.T) { } } } + +func TestDriveUploadConvertMimeType(t *testing.T) { + tests := map[string]string{ + "file.doc": driveMimeGoogleDoc, + "file.docx": driveMimeGoogleDoc, + "file.xls": driveMimeGoogleSheet, + "file.xlsx": driveMimeGoogleSheet, + "file.csv": driveMimeGoogleSheet, + "file.ppt": driveMimeGoogleSlides, + "file.pptx": driveMimeGoogleSlides, + } + + for name, expected := range tests { + got, err := driveUploadConvertMimeType(name) + if err != nil { + t.Fatalf("driveUploadConvertMimeType(%q) error: %v", name, err) + } + if got != expected { + t.Fatalf("driveUploadConvertMimeType(%q) = %q, want %q", name, got, expected) + } + } + + if _, err := driveUploadConvertMimeType("file.pdf"); err == nil { + t.Fatalf("expected error for unsupported extension") + } + if _, err := driveUploadConvertMimeType("file"); err == nil { + t.Fatalf("expected error for missing extension") + } +} diff --git a/internal/cmd/drive_ls_cmd_test.go b/internal/cmd/drive_ls_cmd_test.go index 68e5ba26..9fa0adab 100644 --- a/internal/cmd/drive_ls_cmd_test.go +++ b/internal/cmd/drive_ls_cmd_test.go @@ -24,6 +24,10 @@ func TestDriveLsCmd_TextAndJSON(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch { case r.Method == http.MethodGet && (r.URL.Path == "/drive/v3/files" || r.URL.Path == "/files"): + if errMsg := driveAllDrivesQueryError(r); errMsg != "" { + http.Error(w, errMsg, http.StatusBadRequest) + return + } w.Header().Set("Content-Type", "application/json") _ = json.NewEncoder(w).Encode(map[string]any{ "files": []map[string]any{ diff --git a/internal/cmd/drive_search_more_test.go b/internal/cmd/drive_search_more_test.go index b2fd8e64..a1ca8950 100644 --- a/internal/cmd/drive_search_more_test.go +++ b/internal/cmd/drive_search_more_test.go @@ -17,6 +17,20 @@ import ( "github.com/steipete/gogcli/internal/ui" ) +func driveAllDrivesQueryError(r *http.Request) string { + q := r.URL.Query() + if q.Get("corpora") != "allDrives" { + return "missing corpora=allDrives" + } + if q.Get("supportsAllDrives") != "true" { + return "missing supportsAllDrives=true" + } + if q.Get("includeItemsFromAllDrives") != "true" { + return "missing includeItemsFromAllDrives=true" + } + return "" +} + func TestDriveSearchCmd_TextAndJSON(t *testing.T) { origNew := newDriveService t.Cleanup(func() { newDriveService = origNew }) @@ -31,6 +45,10 @@ func TestDriveSearchCmd_TextAndJSON(t *testing.T) { http.NotFound(w, r) return } + if errMsg := driveAllDrivesQueryError(r); errMsg != "" { + http.Error(w, errMsg, http.StatusBadRequest) + return + } w.Header().Set("Content-Type", "application/json") _ = json.NewEncoder(w).Encode(map[string]any{ "files": []map[string]any{ @@ -100,6 +118,10 @@ func TestDriveSearchCmd_NoResultsAndEmptyQuery(t *testing.T) { http.NotFound(w, r) return } + if errMsg := driveAllDrivesQueryError(r); errMsg != "" { + http.Error(w, errMsg, http.StatusBadRequest) + return + } w.Header().Set("Content-Type", "application/json") _ = json.NewEncoder(w).Encode(map[string]any{ "files": []map[string]any{}, diff --git a/internal/cmd/drive_upload_convert_test.go b/internal/cmd/drive_upload_convert_test.go new file mode 100644 index 00000000..abe46ae3 --- /dev/null +++ b/internal/cmd/drive_upload_convert_test.go @@ -0,0 +1,125 @@ +package cmd + +import ( + "context" + "encoding/json" + "io" + "mime" + "mime/multipart" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "strings" + "sync" + "testing" + + "google.golang.org/api/drive/v3" + "google.golang.org/api/option" + + "github.com/steipete/gogcli/internal/ui" +) + +func TestDriveUploadConvertMetadata(t *testing.T) { + origNew := newDriveService + t.Cleanup(func() { newDriveService = origNew }) + + var ( + mu sync.Mutex + gotMime string + gotParsed bool + ) + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost || !strings.Contains(r.URL.Path, "/upload/drive/v3/files") { + http.NotFound(w, r) + return + } + + mediaType, params, err := mime.ParseMediaType(r.Header.Get("Content-Type")) + if err != nil { + t.Fatalf("parse content-type: %v", err) + } + if !strings.HasPrefix(mediaType, "multipart/") { + t.Fatalf("expected multipart upload, got %q", mediaType) + } + boundary := params["boundary"] + if boundary == "" { + t.Fatalf("missing multipart boundary") + } + + reader := multipart.NewReader(r.Body, boundary) + found := false + for { + part, err := reader.NextPart() + if err == io.EOF { + break + } + if err != nil { + t.Fatalf("read multipart: %v", err) + } + if strings.Contains(part.Header.Get("Content-Type"), "application/json") { + var meta drive.File + if err := json.NewDecoder(part).Decode(&meta); err != nil { + t.Fatalf("decode metadata: %v", err) + } + mu.Lock() + gotMime = meta.MimeType + gotParsed = true + mu.Unlock() + found = true + break + } + } + if !found { + t.Fatalf("metadata part not found") + } + + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]any{ + "id": "up1", + "name": "upload.docx", + "mimeType": driveMimeGoogleDoc, + }) + })) + defer srv.Close() + + svc, err := drive.NewService(context.Background(), + option.WithoutAuthentication(), + option.WithHTTPClient(srv.Client()), + option.WithEndpoint(srv.URL+"/"), + ) + if err != nil { + t.Fatalf("NewService: %v", err) + } + newDriveService = func(context.Context, string) (*drive.Service, error) { return svc, nil } + + tmpFile := filepath.Join(t.TempDir(), "upload.docx") + if err := os.WriteFile(tmpFile, []byte("data"), 0o600); err != nil { + t.Fatalf("WriteFile: %v", err) + } + + u, uiErr := ui.New(ui.Options{Stdout: io.Discard, Stderr: io.Discard, Color: "never"}) + if uiErr != nil { + t.Fatalf("ui.New: %v", uiErr) + } + ctx := ui.WithUI(context.Background(), u) + flags := &RootFlags{Account: "a@b.com"} + + cmd := &DriveUploadCmd{LocalPath: tmpFile, Convert: true} + if err := cmd.Run(ctx, flags); err != nil { + t.Fatalf("upload: %v", err) + } + + mu.Lock() + got := gotMime + parsed := gotParsed + mu.Unlock() + + if !parsed { + t.Fatalf("expected metadata to be parsed") + } + if got != driveMimeGoogleDoc { + t.Fatalf("mimeType = %q, want %q", got, driveMimeGoogleDoc) + } +} diff --git a/internal/cmd/execute_drive_download_test.go b/internal/cmd/execute_drive_download_test.go index 513aaf73..8e5db488 100644 --- a/internal/cmd/execute_drive_download_test.go +++ b/internal/cmd/execute_drive_download_test.go @@ -159,3 +159,69 @@ func TestExecute_DriveDownload_WithOutDir_JSON(t *testing.T) { t.Fatalf("expected file at %s: %v", wantPath, statErr) } } + +func TestExecute_DriveDownload_FormatRejected_NonGoogle(t *testing.T) { + origNew := newDriveService + origDownload := driveDownload + t.Cleanup(func() { + newDriveService = origNew + driveDownload = origDownload + }) + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if !(strings.Contains(r.URL.Path, "/files/id1") && r.Method == http.MethodGet) { + http.NotFound(w, r) + return + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]any{ + "id": "id1", + "name": "Doc", + "mimeType": "text/plain", + }) + })) + defer srv.Close() + + svc, err := drive.NewService(context.Background(), + option.WithoutAuthentication(), + option.WithHTTPClient(srv.Client()), + option.WithEndpoint(srv.URL+"/"), + ) + if err != nil { + t.Fatalf("NewService: %v", err) + } + newDriveService = func(context.Context, string) (*drive.Service, error) { return svc, nil } + + called := false + driveDownload = func(context.Context, *drive.Service, string) (*http.Response, error) { + called = true + return &http.Response{ + StatusCode: http.StatusOK, + Status: "200 OK", + Body: io.NopCloser(strings.NewReader("abc")), + }, nil + } + + outPath := filepath.Join(t.TempDir(), "out.html") + var execErr error + _ = captureStderr(t, func() { + execErr = Execute([]string{ + "--account", "a@b.com", + "drive", "download", "id1", + "--format", "html", + "--out", outPath, + }) + }) + if execErr == nil { + t.Fatalf("expected error") + } + if !strings.Contains(execErr.Error(), "non-Google Workspace") { + t.Fatalf("unexpected error: %v", execErr) + } + if called { + t.Fatalf("download should not be called on format error") + } + if _, statErr := os.Stat(outPath); !os.IsNotExist(statErr) { + t.Fatalf("expected no file written, stat=%v", statErr) + } +} diff --git a/internal/cmd/gmail_thread.go b/internal/cmd/gmail_thread.go index 52ccac96..d1bc5ce9 100644 --- a/internal/cmd/gmail_thread.go +++ b/internal/cmd/gmail_thread.go @@ -14,6 +14,7 @@ import ( "path/filepath" "regexp" "strings" + "unicode/utf8" "golang.org/x/net/html/charset" "google.golang.org/api/gmail/v1" @@ -442,12 +443,18 @@ func decodePartBody(p *gmail.MessagePart) (string, error) { return "", err } + contentType := strings.TrimSpace(headerValue(p, "Content-Type")) + charsetLabel := contentTypeCharset(contentType) + decoded := raw if cte := strings.TrimSpace(headerValue(p, "Content-Transfer-Encoding")); cte != "" { decoded = decodeTransferEncoding(decoded, cte) + if isQuotedPrintableEncoding(cte) && shouldSkipQuotedPrintable(raw, decoded, charsetLabel) { + decoded = raw + } } - if contentType := strings.TrimSpace(headerValue(p, "Content-Type")); contentType != "" { + if contentType != "" { decoded = decodeBodyCharset(decoded, contentType) } @@ -472,11 +479,7 @@ func decodeTransferEncoding(data []byte, encoding string) []byte { } func decodeBodyCharset(data []byte, contentType string) []byte { - _, params, err := mime.ParseMediaType(contentType) - if err != nil { - return data - } - charsetLabel := strings.TrimSpace(params["charset"]) + charsetLabel := contentTypeCharset(contentType) if charsetLabel == "" || strings.EqualFold(charsetLabel, "utf-8") { return data } @@ -491,6 +494,49 @@ func decodeBodyCharset(data []byte, contentType string) []byte { return decoded } +func contentTypeCharset(contentType string) string { + if contentType == "" { + return "" + } + _, params, err := mime.ParseMediaType(contentType) + if err != nil { + return "" + } + return strings.TrimSpace(params["charset"]) +} + +func isQuotedPrintableEncoding(value string) bool { + value = strings.TrimSpace(value) + if value == "" { + return false + } + // Handle potential parameters defensively, though RFC 2045 doesn't define them. + if idx := strings.Index(value, ";"); idx != -1 { + value = value[:idx] + } + return strings.EqualFold(strings.TrimSpace(value), "quoted-printable") +} + +// shouldSkipQuotedPrintable returns true when QP decoding should be skipped. +// This handles emails where Content-Transfer-Encoding is declared as quoted-printable +// but the body is already decoded. Applying QP decoding to such content corrupts +// '=' characters (e.g., in URLs). We detect this by checking if raw is valid UTF-8 +// but decoded becomes invalid (indicating the decoder treated literal '=' as escapes). +func shouldSkipQuotedPrintable(raw, decoded []byte, charsetLabel string) bool { + if !isUTF8Charset(charsetLabel) { + return false + } + if !utf8.Valid(raw) { + return false + } + return !utf8.Valid(decoded) +} + +func isUTF8Charset(label string) bool { + label = strings.ToLower(strings.TrimSpace(label)) + return label == "" || label == "utf-8" || label == "us-ascii" +} + func looksLikeBase64(data []byte) bool { trimmed := bytes.TrimSpace(data) if len(trimmed) == 0 { diff --git a/internal/cmd/gmail_thread_helpers_test.go b/internal/cmd/gmail_thread_helpers_test.go index 73443d6e..44638119 100644 --- a/internal/cmd/gmail_thread_helpers_test.go +++ b/internal/cmd/gmail_thread_helpers_test.go @@ -153,6 +153,41 @@ func TestFindPartBody_DecodesQuotedPrintable(t *testing.T) { } } +func TestFindPartBody_SkipsQuotedPrintableWhenAlreadyDecoded(t *testing.T) { + body := "https://example.com/auth?token_hash=abc123&type=magiclink" + encoded := base64.RawURLEncoding.EncodeToString([]byte(body)) + part := &gmail.MessagePart{ + MimeType: "text/plain", + Headers: []*gmail.MessagePartHeader{ + {Name: "Content-Transfer-Encoding", Value: "quoted-printable"}, + {Name: "Content-Type", Value: "text/plain; charset=utf-8"}, + }, + Body: &gmail.MessagePartBody{Data: encoded}, + } + got := findPartBody(part, "text/plain") + if got != body { + t.Fatalf("unexpected decoded body: %q", got) + } +} + +func TestFindPartBody_DecodesQuotedPrintableEquals(t *testing.T) { + // In QP encoding, = is encoded as =3D, so "a=b" becomes "a=3Db" + qp := "a=3Db" + encoded := base64.RawURLEncoding.EncodeToString([]byte(qp)) + part := &gmail.MessagePart{ + MimeType: "text/plain", + Headers: []*gmail.MessagePartHeader{ + {Name: "Content-Transfer-Encoding", Value: "quoted-printable"}, + {Name: "Content-Type", Value: "text/plain; charset=utf-8"}, + }, + Body: &gmail.MessagePartBody{Data: encoded}, + } + got := findPartBody(part, "text/plain") + if got != "a=b" { + t.Fatalf("unexpected decoded body: %q, want %q", got, "a=b") + } +} + func TestFindPartBody_DecodesBase64Transfer(t *testing.T) { inner := base64.StdEncoding.EncodeToString([]byte("plain body")) encoded := base64.RawURLEncoding.EncodeToString([]byte(inner)) @@ -237,3 +272,44 @@ func TestDownloadAttachment_Cached(t *testing.T) { t.Fatalf("expected cached path %q, got %q cached=%v", outPath, gotPath, cached) } } + +func TestIsQuotedPrintableEncoding(t *testing.T) { + tests := []struct { + input string + want bool + }{ + {"quoted-printable", true}, + {"QUOTED-PRINTABLE", true}, + {"Quoted-Printable", true}, + {" quoted-printable ", true}, + {"base64", false}, + {"", false}, + } + for _, tc := range tests { + got := isQuotedPrintableEncoding(tc.input) + if got != tc.want { + t.Fatalf("isQuotedPrintableEncoding(%q) = %v, want %v", tc.input, got, tc.want) + } + } +} + +func TestIsUTF8Charset(t *testing.T) { + tests := []struct { + input string + want bool + }{ + {"", true}, + {"utf-8", true}, + {"UTF-8", true}, + {"us-ascii", true}, + {"US-ASCII", true}, + {"iso-8859-1", false}, + {"windows-1252", false}, + } + for _, tc := range tests { + got := isUTF8Charset(tc.input) + if got != tc.want { + t.Fatalf("isUTF8Charset(%q) = %v, want %v", tc.input, got, tc.want) + } + } +} diff --git a/internal/cmd/root.go b/internal/cmd/root.go index 28012deb..4e15ff7f 100644 --- a/internal/cmd/root.go +++ b/internal/cmd/root.go @@ -21,6 +21,8 @@ import ( const ( colorAuto = "auto" colorNever = "never" + boolTrue = "true" + boolFalse = "false" ) type RootFlags struct { @@ -163,9 +165,9 @@ func envOr(key, fallback string) string { func boolString(v bool) string { if v { - return "true" + return boolTrue } - return "false" + return boolFalse } func newParser(description string) (*kong.Kong, *CLI, error) { diff --git a/internal/cmd/time_helpers.go b/internal/cmd/time_helpers.go index 4d8ec9dc..5523ecde 100644 --- a/internal/cmd/time_helpers.go +++ b/internal/cmd/time_helpers.go @@ -131,10 +131,14 @@ func ResolveTimeRangeWithDefaults(ctx context.Context, svc *calendar.Service, fl switch { case flags.To != "": + toIsDayExpr := isDayExpr(flags.To, now, loc) to, err = parseTimeExpr(flags.To, now, loc) if err != nil { return nil, fmt.Errorf("invalid --to: %w", err) } + if toIsDayExpr { + to = endOfDay(to) + } case flags.From != "" && defaults.ToFromOffset != 0: to = from.Add(defaults.ToFromOffset) default: @@ -149,6 +153,27 @@ func ResolveTimeRangeWithDefaults(ctx context.Context, svc *calendar.Service, fl }, nil } +func isDayExpr(expr string, now time.Time, loc *time.Location) bool { + expr = strings.TrimSpace(expr) + if expr == "" { + return false + } + exprLower := strings.ToLower(expr) + switch exprLower { + case "today", "tomorrow", "yesterday": + return true + case "now": + return false + } + if _, ok := parseWeekday(exprLower, now); ok { + return true + } + if _, err := time.ParseInLocation("2006-01-02", expr, loc); err == nil { + return true + } + return false +} + // parseTimeExpr parses a time expression which can be: // - RFC3339: 2026-01-05T14:00:00-08:00 // - ISO 8601 with numeric timezone: 2026-01-05T14:00:00-0800 (no colon) diff --git a/internal/cmd/time_range_more_test.go b/internal/cmd/time_range_more_test.go index ad83de24..846a0443 100644 --- a/internal/cmd/time_range_more_test.go +++ b/internal/cmd/time_range_more_test.go @@ -86,6 +86,27 @@ func TestResolveTimeRangeWithDefaultsFromTo(t *testing.T) { } } +func TestResolveTimeRangeWithDefaultsToDateOnlyEndOfDay(t *testing.T) { + svc := newCalendarServiceWithTimezone(t, "UTC") + flags := TimeRangeFlags{ + From: "2025-01-05T10:00:00Z", + To: "2025-01-05", + } + tr, err := ResolveTimeRangeWithDefaults(context.Background(), svc, flags, TimeRangeDefaults{}) + if err != nil { + t.Fatalf("ResolveTimeRangeWithDefaults: %v", err) + } + + expectedFrom := time.Date(2025, 1, 5, 10, 0, 0, 0, time.UTC) + expectedTo := time.Date(2025, 1, 5, 23, 59, 59, 999999999, time.UTC) + if !tr.From.Equal(expectedFrom) { + t.Fatalf("unexpected from: %v", tr.From) + } + if !tr.To.Equal(expectedTo) { + t.Fatalf("unexpected to: %v", tr.To) + } +} + func TestResolveTimeRangeWithDefaultsFromOffset(t *testing.T) { svc := newCalendarServiceWithTimezone(t, "UTC") flags := TimeRangeFlags{From: "2025-01-05T10:00:00Z"} @@ -140,3 +161,174 @@ func TestGetUserTimezoneInvalid(t *testing.T) { t.Fatalf("expected error") } } + +func TestResolveTimeRangeWithDefaultsToTomorrowEndOfDay(t *testing.T) { + svc := newCalendarServiceWithTimezone(t, "UTC") + flags := TimeRangeFlags{ + From: "2025-01-05T10:00:00Z", + To: "tomorrow", + } + + // Capture now BEFORE calling the function to avoid midnight boundary flakiness + now := time.Now().In(time.UTC) + + tr, err := ResolveTimeRangeWithDefaults(context.Background(), svc, flags, TimeRangeDefaults{}) + if err != nil { + t.Fatalf("ResolveTimeRangeWithDefaults: %v", err) + } + + expectedFrom := time.Date(2025, 1, 5, 10, 0, 0, 0, time.UTC) + if !tr.From.Equal(expectedFrom) { + t.Fatalf("unexpected from: %v", tr.From) + } + + // "tomorrow" is relative to now, so we calculate expected tomorrow + expectedTomorrow := now.AddDate(0, 0, 1) + expectedTo := time.Date(expectedTomorrow.Year(), expectedTomorrow.Month(), expectedTomorrow.Day(), 23, 59, 59, 999999999, time.UTC) + + if !tr.To.Equal(expectedTo) { + t.Fatalf("expected --to tomorrow to expand to end-of-day %v, got %v", expectedTo, tr.To) + } +} + +func TestResolveTimeRangeWithDefaultsToNowNoExpansion(t *testing.T) { + svc := newCalendarServiceWithTimezone(t, "UTC") + flags := TimeRangeFlags{ + From: "2025-01-05T10:00:00Z", + To: "now", + } + + before := time.Now().In(time.UTC) + tr, err := ResolveTimeRangeWithDefaults(context.Background(), svc, flags, TimeRangeDefaults{}) + if err != nil { + t.Fatalf("ResolveTimeRangeWithDefaults: %v", err) + } + after := time.Now().In(time.UTC) + + expectedFrom := time.Date(2025, 1, 5, 10, 0, 0, 0, time.UTC) + if !tr.From.Equal(expectedFrom) { + t.Fatalf("unexpected from: %v", tr.From) + } + + // "now" should NOT be expanded to end-of-day; it should be the current time + if tr.To.Before(before) || tr.To.After(after) { + t.Fatalf("expected --to now to be current time (between %v and %v), got %v", before, after, tr.To) + } + + // Verify it's NOT end-of-day (23:59:59.999999999) + if tr.To.Hour() == 23 && tr.To.Minute() == 59 && tr.To.Second() == 59 && tr.To.Nanosecond() == 999999999 { + t.Fatalf("expected --to now NOT to expand to end-of-day, but got %v", tr.To) + } +} + +func TestResolveTimeRangeWithDefaultsToMondayEndOfDay(t *testing.T) { + svc := newCalendarServiceWithTimezone(t, "UTC") + flags := TimeRangeFlags{ + From: "2025-01-05T10:00:00Z", + To: "monday", + } + + // Capture now BEFORE calling the function to avoid midnight boundary flakiness + now := time.Now().In(time.UTC) + + tr, err := ResolveTimeRangeWithDefaults(context.Background(), svc, flags, TimeRangeDefaults{}) + if err != nil { + t.Fatalf("ResolveTimeRangeWithDefaults: %v", err) + } + + expectedFrom := time.Date(2025, 1, 5, 10, 0, 0, 0, time.UTC) + if !tr.From.Equal(expectedFrom) { + t.Fatalf("unexpected from: %v", tr.From) + } + + // "monday" is relative to now, so we calculate expected Monday + // parseWeekday returns the upcoming Monday (or today if already Monday) + currentDay := now.Weekday() + daysUntil := int(time.Monday) - int(currentDay) + if daysUntil < 0 { + daysUntil += 7 + } + expectedMonday := now.AddDate(0, 0, daysUntil) + expectedTo := time.Date(expectedMonday.Year(), expectedMonday.Month(), expectedMonday.Day(), 23, 59, 59, 999999999, time.UTC) + + if !tr.To.Equal(expectedTo) { + t.Fatalf("expected --to monday to expand to end-of-day %v, got %v", expectedTo, tr.To) + } +} + +func TestIsDayExpr(t *testing.T) { + loc := time.UTC + // Use a fixed reference time: Wednesday, January 15, 2025 + now := time.Date(2025, 1, 15, 10, 30, 0, 0, loc) + + tests := []struct { + name string + expr string + want bool + }{ + // Relative day keywords -> true + {"today", "today", true}, + {"tomorrow", "tomorrow", true}, + {"yesterday", "yesterday", true}, + {"today uppercase", "TODAY", true}, + {"today mixed case", "ToDay", true}, + + // "now" is a precise moment -> false + {"now", "now", false}, + {"now uppercase", "NOW", false}, + + // Weekday names -> true + {"monday", "monday", true}, + {"tuesday", "tuesday", true}, + {"wednesday", "wednesday", true}, + {"thursday", "thursday", true}, + {"friday", "friday", true}, + {"saturday", "saturday", true}, + {"sunday", "sunday", true}, + {"mon abbreviation", "mon", true}, + {"tue abbreviation", "tue", true}, + {"wed abbreviation", "wed", true}, + {"thu abbreviation", "thu", true}, + {"fri abbreviation", "fri", true}, + {"sat abbreviation", "sat", true}, + {"sun abbreviation", "sun", true}, + {"Monday uppercase", "MONDAY", true}, + {"next monday", "next monday", true}, + {"next tuesday", "next tuesday", true}, + + // ISO date (YYYY-MM-DD) -> true + {"iso date", "2025-01-05", true}, + {"iso date future", "2026-12-31", true}, + {"iso date past", "2020-01-01", true}, + + // RFC3339 timestamps -> false (precise moment, not a day) + {"rfc3339 utc", "2025-01-05T10:00:00Z", false}, + {"rfc3339 offset", "2025-01-05T10:00:00-08:00", false}, + {"rfc3339 positive offset", "2025-01-05T10:00:00+05:30", false}, + + // ISO 8601 with numeric timezone (no colon) -> false + {"iso8601 no colon", "2025-01-05T10:00:00-0800", false}, + + // Date with time but no timezone -> false (has time component) + {"datetime no tz", "2025-01-05T15:04:05", false}, + {"datetime space separator", "2025-01-05 15:04", false}, + + // Empty string -> false + {"empty string", "", false}, + {"whitespace only", " ", false}, + + // Invalid expressions -> false + {"invalid word", "notaday", false}, + {"invalid format", "01-05-2025", false}, + {"partial date", "2025-01", false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := isDayExpr(tt.expr, now, loc) + if got != tt.want { + t.Errorf("isDayExpr(%q) = %v, want %v", tt.expr, got, tt.want) + } + }) + } +}