diff --git a/github.go b/github.go
index 9d6bb40..fb8d9b2 100644
--- a/github.go
+++ b/github.go
@@ -692,28 +692,26 @@ func appendComment(cj *CritJSON, filePath string, startLine, endLine int, body,
// appendReply adds a reply to an existing comment in the CritJSON struct in memory.
// Returns an error if the comment ID is not found or is ambiguous across files.
-// Searches both file comments and review_comments (IDs starting with "r").
+// Searches both file comments and review_comments.
func appendReply(cj *CritJSON, commentID, body, author string, resolve bool, filterPath string) error {
now := time.Now().UTC().Format(time.RFC3339)
cj.UpdatedAt = now
- // Check review comments first for IDs starting with "r"
- if strings.HasPrefix(commentID, "r") {
- for i, c := range cj.ReviewComments {
- if c.ID == commentID {
- reply := Reply{
- ID: nextReplyID(commentID, c.Replies),
- Body: body,
- Author: author,
- CreatedAt: now,
- }
- cj.ReviewComments[i].Replies = append(cj.ReviewComments[i].Replies, reply)
- cj.ReviewComments[i].UpdatedAt = now
- if resolve {
- cj.ReviewComments[i].Resolved = true
- }
- return nil
+ // Check all review comments (not just those starting with "r" — web-fetched ones use "web-N").
+ for i, c := range cj.ReviewComments {
+ if c.ID == commentID {
+ reply := Reply{
+ ID: nextReplyID(commentID, c.Replies),
+ Body: body,
+ Author: author,
+ CreatedAt: now,
}
+ cj.ReviewComments[i].Replies = append(cj.ReviewComments[i].Replies, reply)
+ cj.ReviewComments[i].UpdatedAt = now
+ if resolve {
+ cj.ReviewComments[i].Resolved = true
+ }
+ return nil
}
}
diff --git a/main.go b/main.go
index 9361544..7056556 100644
--- a/main.go
+++ b/main.go
@@ -49,6 +49,8 @@ func main() {
printVersion()
case "share":
runShare(os.Args[2:])
+ case "fetch":
+ runFetch(os.Args[2:])
case "unpublish":
runUnpublish(os.Args[2:])
case "install":
@@ -227,6 +229,84 @@ func runShare(args []string) {
}
}
+func runFetch(args []string) {
+ outputDir := ""
+ for i := 0; i < len(args); i++ {
+ arg := args[i]
+ switch {
+ case arg == "--output" || arg == "-o":
+ if i+1 >= len(args) {
+ fmt.Fprintf(os.Stderr, "Error: %s requires a value\n", arg)
+ os.Exit(1)
+ }
+ i++
+ outputDir = args[i]
+ default:
+ fmt.Fprintln(os.Stderr, "Usage: crit fetch [--output
]")
+ fmt.Fprintln(os.Stderr, "")
+ fmt.Fprintln(os.Stderr, "Fetches comments added on crit-web into .crit.json.")
+ fmt.Fprintln(os.Stderr, "Requires a prior `crit share` so a share URL is recorded.")
+ os.Exit(1)
+ }
+ }
+
+ critDir, err := resolveCritDir(outputDir)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error: %v\n", err)
+ os.Exit(1)
+ }
+
+ critPath := filepath.Join(critDir, ".crit.json")
+ data, err := os.ReadFile(critPath)
+ if err != nil {
+ fmt.Fprintln(os.Stderr, "Error: no .crit.json found. Run `crit share` first.")
+ os.Exit(1)
+ }
+ var cj CritJSON
+ if err := json.Unmarshal(data, &cj); err != nil {
+ fmt.Fprintf(os.Stderr, "Error: invalid .crit.json: %v\n", err)
+ os.Exit(1)
+ }
+ if cj.ShareURL == "" {
+ fmt.Fprintln(os.Stderr, "Error: no share URL in .crit.json. Run `crit share` first.")
+ os.Exit(1)
+ }
+
+ authToken := resolveAuthToken()
+ localIDs := buildLocalIDSet(cj)
+ localFingerprints := buildLocalFingerprints(cj)
+
+ webComments, err := fetchNewWebComments(cj.ShareURL, localIDs, localFingerprints, authToken)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error fetching remote comments: %v\n", err)
+ os.Exit(1)
+ }
+
+ if len(webComments) == 0 {
+ fmt.Println("No new comments.")
+ return
+ }
+
+ if err := mergeWebComments(critDir, webComments); err != nil {
+ fmt.Fprintf(os.Stderr, "Error merging comments: %v\n", err)
+ os.Exit(1)
+ }
+
+ fmt.Printf("Fetched %d new comment(s) into .crit.json\n", len(webComments))
+ for _, wc := range webComments {
+ runes := []rune(wc.Body)
+ body := wc.Body
+ if len(runes) > 60 {
+ body = string(runes[:60]) + "..."
+ }
+ if wc.Scope == "review" || wc.FilePath == "" {
+ fmt.Printf(" [review] %s\n", body)
+ } else {
+ fmt.Printf(" [%s:%d] %s\n", wc.FilePath, wc.StartLine, body)
+ }
+ }
+}
+
func runUnpublish(args []string) {
unpubOutputDir := ""
unpubSvcURL := ""
@@ -1425,6 +1505,7 @@ Usage:
crit comment --json [--author ] [--output ] Read comments from stdin as JSON
crit comment --clear Remove all comments from .crit.json
crit share [file...] Share files to crit-web and print the URL
+ crit fetch [--output ] Fetch comments from crit-web into .crit.json
crit unpublish Remove a shared review from crit-web
crit pull [--output ] [pr-number] Fetch GitHub PR comments to .crit.json
crit push [--dry-run] [--event ] [-m ] [-o ] [pr-number] Post .crit.json comments to a GitHub PR
diff --git a/share.go b/share.go
index 49efa8a..0c8227a 100644
--- a/share.go
+++ b/share.go
@@ -12,6 +12,7 @@ import (
"path"
"path/filepath"
"sort"
+ "strconv"
"strings"
"time"
)
@@ -63,12 +64,13 @@ type shareReply struct {
// shareComment represents a comment to include in the shared review.
type shareComment struct {
- File string `json:"file"`
- StartLine int `json:"start_line"`
- EndLine int `json:"end_line"`
+ File string `json:"file,omitempty"`
+ StartLine int `json:"start_line,omitempty"`
+ EndLine int `json:"end_line,omitempty"`
Body string `json:"body"`
Quote string `json:"quote,omitempty"`
Author string `json:"author_display_name,omitempty"`
+ Scope string `json:"scope,omitempty"`
ReviewRound int `json:"review_round,omitempty"`
Replies []shareReply `json:"replies,omitempty"`
ExternalID string `json:"external_id,omitempty"`
@@ -256,6 +258,23 @@ func loadCommentsForShare(dir string, filePaths []string) ([]shareComment, int)
comments = append(comments, sc)
}
}
+ for _, c := range cj.ReviewComments {
+ if c.Resolved {
+ continue
+ }
+ sc := shareComment{
+ Body: c.Body,
+ Author: c.Author,
+ Scope: "review",
+ }
+ if c.ReviewRound >= 1 {
+ sc.ReviewRound = c.ReviewRound
+ }
+ for _, r := range c.Replies {
+ sc.Replies = append(sc.Replies, shareReply{Body: r.Body, Author: r.Author})
+ }
+ comments = append(comments, sc)
+ }
return comments, round
}
@@ -309,6 +328,23 @@ func loadAllCommentsForShare(dir string, filePaths []string) ([]shareComment, in
comments = append(comments, sc)
}
}
+ // Include review-level comments so they survive upsert round-trips.
+ for _, c := range cj.ReviewComments {
+ sc := shareComment{
+ Body: c.Body,
+ Author: c.Author,
+ Resolved: c.Resolved,
+ ExternalID: c.ID,
+ Scope: "review",
+ }
+ if c.ReviewRound >= 1 {
+ sc.ReviewRound = c.ReviewRound
+ }
+ for _, r := range c.Replies {
+ sc.Replies = append(sc.Replies, shareReply{Body: r.Body, Author: r.Author})
+ }
+ comments = append(comments, sc)
+ }
return comments, round
}
@@ -323,6 +359,7 @@ type webComment struct {
ExternalID string `json:"external_id"`
AuthorDisplayName string `json:"author_display_name"`
Quote string `json:"quote"`
+ Scope string `json:"scope"`
}
// buildLocalFingerprints returns a set of body+file+line fingerprints for all
@@ -336,6 +373,10 @@ func buildLocalFingerprints(cj CritJSON) map[string]bool {
fps[key] = true
}
}
+ for _, c := range cj.ReviewComments {
+ key := fmt.Sprintf("%s||0|0", c.Body)
+ fps[key] = true
+ }
return fps
}
@@ -515,7 +556,8 @@ func buildLocalIDSet(cj CritJSON) map[string]bool {
return ids
}
-// mergeWebComments adds web-reviewer comments into .crit.json under their respective files.
+// mergeWebComments adds web-reviewer comments into .crit.json under their respective files
+// or into review_comments for review-level (scope:"review") comments.
func mergeWebComments(dir string, newComments []webComment) error {
critPath := filepath.Join(dir, ".crit.json")
data, err := os.ReadFile(critPath)
@@ -530,23 +572,47 @@ func mergeWebComments(dir string, newComments []webComment) error {
cj.Files = make(map[string]CritJSONFile)
}
+ // Find the highest existing web-N index so new IDs are globally unique
+ // even if earlier ones were deleted from .crit.json.
+ webCount := 0
+ for _, f := range cj.Files {
+ for _, c := range f.Comments {
+ if n, err := strconv.Atoi(strings.TrimPrefix(c.ID, "web-")); err == nil && strings.HasPrefix(c.ID, "web-") && n > webCount {
+ webCount = n
+ }
+ }
+ }
+ for _, c := range cj.ReviewComments {
+ if n, err := strconv.Atoi(strings.TrimPrefix(c.ID, "web-")); err == nil && strings.HasPrefix(c.ID, "web-") && n > webCount {
+ webCount = n
+ }
+ }
+
+ now := time.Now().UTC().Format(time.RFC3339)
for _, wc := range newComments {
- entry := cj.Files[wc.FilePath]
- entry.Comments = append(entry.Comments, Comment{
- ID: fmt.Sprintf("web-%d", len(entry.Comments)+1),
+ webCount++
+ c := Comment{
+ ID: fmt.Sprintf("web-%d", webCount),
StartLine: wc.StartLine,
EndLine: wc.EndLine,
Body: wc.Body,
Quote: wc.Quote,
Author: wc.AuthorDisplayName,
+ Scope: wc.Scope,
ReviewRound: wc.ReviewRound,
- CreatedAt: time.Now().UTC().Format(time.RFC3339),
- UpdatedAt: time.Now().UTC().Format(time.RFC3339),
- })
- cj.Files[wc.FilePath] = entry
+ CreatedAt: now,
+ UpdatedAt: now,
+ }
+ if wc.Scope == "review" {
+ cj.ReviewComments = append(cj.ReviewComments, c)
+ } else {
+ entry := cj.Files[wc.FilePath]
+ entry.Comments = append(entry.Comments, c)
+ cj.Files[wc.FilePath] = entry
+ }
}
- cj.UpdatedAt = time.Now().UTC().Format(time.RFC3339)
+ cj.UpdatedAt = now
out, err := json.MarshalIndent(cj, "", " ")
if err != nil {
return err