Fixed a number of sync issues I noticed
All checks were successful
Ascently - Sync Deploy / build-and-push (push) Successful in 2m30s

This commit is contained in:
2026-01-09 14:39:28 -07:00
parent afb0456692
commit d002c703d5
6 changed files with 478 additions and 954 deletions

View File

@@ -13,7 +13,7 @@ import (
"time"
)
const VERSION = "2.3.0"
const VERSION = "2.4.0"
func min(a, b int) int {
if a < b {
@@ -22,12 +22,6 @@ func min(a, b int) int {
return b
}
type DeletedItem struct {
ID string `json:"id"`
Type string `json:"type"`
DeletedAt string `json:"deletedAt"`
}
type ClimbDataBackup struct {
ExportedAt string `json:"exportedAt"`
Version string `json:"version"`
@@ -36,7 +30,6 @@ type ClimbDataBackup struct {
Problems []BackupProblem `json:"problems"`
Sessions []BackupClimbSession `json:"sessions"`
Attempts []BackupAttempt `json:"attempts"`
DeletedItems []DeletedItem `json:"deletedItems"`
}
type DeltaSyncRequest struct {
@@ -45,16 +38,14 @@ type DeltaSyncRequest struct {
Problems []BackupProblem `json:"problems"`
Sessions []BackupClimbSession `json:"sessions"`
Attempts []BackupAttempt `json:"attempts"`
DeletedItems []DeletedItem `json:"deletedItems"`
}
type DeltaSyncResponse struct {
ServerTime string `json:"serverTime"`
Gyms []BackupGym `json:"gyms"`
Problems []BackupProblem `json:"problems"`
Sessions []BackupClimbSession `json:"sessions"`
Attempts []BackupAttempt `json:"attempts"`
DeletedItems []DeletedItem `json:"deletedItems"`
ServerTime string `json:"serverTime"`
Gyms []BackupGym `json:"gyms"`
Problems []BackupProblem `json:"problems"`
Sessions []BackupClimbSession `json:"sessions"`
Attempts []BackupAttempt `json:"attempts"`
}
type BackupGym struct {
@@ -65,6 +56,7 @@ type BackupGym struct {
DifficultySystems []string `json:"difficultySystems"`
CustomDifficultyGrades []string `json:"customDifficultyGrades"`
Notes *string `json:"notes,omitempty"`
IsDeleted bool `json:"isDeleted"`
CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updatedAt"`
}
@@ -82,6 +74,7 @@ type BackupProblem struct {
IsActive bool `json:"isActive"`
DateSet *string `json:"dateSet,omitempty"`
Notes *string `json:"notes,omitempty"`
IsDeleted bool `json:"isDeleted"`
CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updatedAt"`
}
@@ -101,6 +94,7 @@ type BackupClimbSession struct {
Duration *int64 `json:"duration,omitempty"`
Status string `json:"status"`
Notes *string `json:"notes,omitempty"`
IsDeleted bool `json:"isDeleted"`
CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updatedAt"`
}
@@ -115,7 +109,9 @@ type BackupAttempt struct {
Duration *int64 `json:"duration,omitempty"`
RestTime *int64 `json:"restTime,omitempty"`
Timestamp string `json:"timestamp"`
IsDeleted bool `json:"isDeleted"`
CreatedAt string `json:"createdAt"`
UpdatedAt *string `json:"updatedAt,omitempty"`
}
type SyncServer struct {
@@ -147,7 +143,6 @@ func (s *SyncServer) loadData() (*ClimbDataBackup, error) {
Problems: []BackupProblem{},
Sessions: []BackupClimbSession{},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
}, nil
}
@@ -158,7 +153,18 @@ func (s *SyncServer) loadData() (*ClimbDataBackup, error) {
}
log.Printf("Read %d bytes from data file", len(data))
log.Printf("File content preview: %s", string(data[:min(200, len(data))]))
// Basic check to see if we have JSON content
if len(data) == 0 {
return &ClimbDataBackup{
ExportedAt: time.Now().UTC().Format(time.RFC3339),
Version: "2.0",
FormatVersion: "2.0",
Gyms: []BackupGym{},
Problems: []BackupProblem{},
Sessions: []BackupClimbSession{},
Attempts: []BackupAttempt{},
}, nil
}
var backup ClimbDataBackup
if err := json.Unmarshal(data, &backup); err != nil {
@@ -250,7 +256,18 @@ func (s *SyncServer) mergeAttempts(existing []BackupAttempt, updates []BackupAtt
for _, attempt := range updates {
if existingAttempt, exists := attemptMap[attempt.ID]; exists {
if attempt.CreatedAt >= existingAttempt.CreatedAt {
// Resolve update time for comparison
updateTime := attempt.CreatedAt
if attempt.UpdatedAt != nil {
updateTime = *attempt.UpdatedAt
}
existingUpdateTime := existingAttempt.CreatedAt
if existingAttempt.UpdatedAt != nil {
existingUpdateTime = *existingAttempt.UpdatedAt
}
if updateTime >= existingUpdateTime {
attemptMap[attempt.ID] = attempt
}
} else {
@@ -265,89 +282,6 @@ func (s *SyncServer) mergeAttempts(existing []BackupAttempt, updates []BackupAtt
return result
}
func (s *SyncServer) mergeDeletedItems(existing []DeletedItem, updates []DeletedItem) []DeletedItem {
deletedMap := make(map[string]DeletedItem)
for _, item := range existing {
key := item.Type + ":" + item.ID
deletedMap[key] = item
}
for _, item := range updates {
key := item.Type + ":" + item.ID
if existingItem, exists := deletedMap[key]; exists {
if item.DeletedAt >= existingItem.DeletedAt {
deletedMap[key] = item
}
} else {
deletedMap[key] = item
}
}
// Clean up tombstones older than 30 days to prevent unbounded growth
cutoffTime := time.Now().UTC().Add(-30 * 24 * time.Hour)
result := make([]DeletedItem, 0, len(deletedMap))
for _, item := range deletedMap {
deletedTime, err := time.Parse(time.RFC3339, item.DeletedAt)
if err == nil && deletedTime.Before(cutoffTime) {
log.Printf("Cleaning up old deletion record: type=%s, id=%s, deletedAt=%s",
item.Type, item.ID, item.DeletedAt)
continue
}
result = append(result, item)
}
return result
}
func (s *SyncServer) applyDeletions(backup *ClimbDataBackup, deletedItems []DeletedItem) {
deletedMap := make(map[string]map[string]bool)
for _, item := range deletedItems {
if deletedMap[item.Type] == nil {
deletedMap[item.Type] = make(map[string]bool)
}
deletedMap[item.Type][item.ID] = true
}
if deletedMap["gym"] != nil {
filtered := []BackupGym{}
for _, gym := range backup.Gyms {
if !deletedMap["gym"][gym.ID] {
filtered = append(filtered, gym)
}
}
backup.Gyms = filtered
}
if deletedMap["problem"] != nil {
filtered := []BackupProblem{}
for _, problem := range backup.Problems {
if !deletedMap["problem"][problem.ID] {
filtered = append(filtered, problem)
}
}
backup.Problems = filtered
}
if deletedMap["session"] != nil {
filtered := []BackupClimbSession{}
for _, session := range backup.Sessions {
if !deletedMap["session"][session.ID] {
filtered = append(filtered, session)
}
}
backup.Sessions = filtered
}
if deletedMap["attempt"] != nil {
filtered := []BackupAttempt{}
for _, attempt := range backup.Attempts {
if !deletedMap["attempt"][attempt.ID] {
filtered = append(filtered, attempt)
}
}
backup.Attempts = filtered
}
}
func (s *SyncServer) saveData(backup *ClimbDataBackup) error {
backup.ExportedAt = time.Now().UTC().Format(time.RFC3339)
@@ -383,6 +317,8 @@ func (s *SyncServer) handleGet(w http.ResponseWriter, r *http.Request) {
return
}
log.Printf("Sending data to %s: gyms=%d, problems=%d, sessions=%d, attempts=%d",
r.RemoteAddr, len(backup.Gyms), len(backup.Problems), len(backup.Sessions), len(backup.Attempts))
w.Header().Set("Content-Type", "application/json")
@@ -527,11 +463,10 @@ func (s *SyncServer) handleDeltaSync(w http.ResponseWriter, r *http.Request) {
return
}
log.Printf("Delta sync from %s: lastSyncTime=%s, gyms=%d, problems=%d, sessions=%d, attempts=%d, deletedItems=%d",
log.Printf("Delta sync from %s: lastSyncTime=%s, gyms=%d, problems=%d, sessions=%d, attempts=%d",
r.RemoteAddr, deltaRequest.LastSyncTime,
len(deltaRequest.Gyms), len(deltaRequest.Problems),
len(deltaRequest.Sessions), len(deltaRequest.Attempts),
len(deltaRequest.DeletedItems))
len(deltaRequest.Sessions), len(deltaRequest.Attempts))
// Load current server data
serverBackup, err := s.loadData()
@@ -541,12 +476,9 @@ func (s *SyncServer) handleDeltaSync(w http.ResponseWriter, r *http.Request) {
return
}
// Merge and apply deletions first to prevent resurrection
serverBackup.DeletedItems = s.mergeDeletedItems(serverBackup.DeletedItems, deltaRequest.DeletedItems)
s.applyDeletions(serverBackup, serverBackup.DeletedItems)
log.Printf("Applied deletions: total=%d deletion records", len(serverBackup.DeletedItems))
// Merge client changes into server data
// Note: We no longer need separate deletion handling as IsDeleted is part of the struct
// and handled by standard merge logic (latest timestamp wins)
serverBackup.Gyms = s.mergeGyms(serverBackup.Gyms, deltaRequest.Gyms)
serverBackup.Problems = s.mergeProblems(serverBackup.Problems, deltaRequest.Problems)
serverBackup.Sessions = s.mergeSessions(serverBackup.Sessions, deltaRequest.Sessions)
@@ -566,28 +498,17 @@ func (s *SyncServer) handleDeltaSync(w http.ResponseWriter, r *http.Request) {
log.Printf("Warning: Could not parse lastSyncTime '%s', sending all data", deltaRequest.LastSyncTime)
}
// Build deleted item lookup map
deletedItemMap := make(map[string]bool)
for _, item := range serverBackup.DeletedItems {
key := item.Type + ":" + item.ID
deletedItemMap[key] = true
}
// Prepare response with items modified since client's last sync
response := DeltaSyncResponse{
ServerTime: time.Now().UTC().Format(time.RFC3339),
Gyms: []BackupGym{},
Problems: []BackupProblem{},
Sessions: []BackupClimbSession{},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
ServerTime: time.Now().UTC().Format(time.RFC3339),
Gyms: []BackupGym{},
Problems: []BackupProblem{},
Sessions: []BackupClimbSession{},
Attempts: []BackupAttempt{},
}
// Filter gyms modified after client's last sync
for _, gym := range serverBackup.Gyms {
if deletedItemMap["gym:"+gym.ID] {
continue
}
gymTime, err := time.Parse(time.RFC3339, gym.UpdatedAt)
if err == nil && gymTime.After(clientLastSync) {
response.Gyms = append(response.Gyms, gym)
@@ -596,9 +517,6 @@ func (s *SyncServer) handleDeltaSync(w http.ResponseWriter, r *http.Request) {
// Filter problems modified after client's last sync
for _, problem := range serverBackup.Problems {
if deletedItemMap["problem:"+problem.ID] {
continue
}
problemTime, err := time.Parse(time.RFC3339, problem.UpdatedAt)
if err == nil && problemTime.After(clientLastSync) {
response.Problems = append(response.Problems, problem)
@@ -607,39 +525,29 @@ func (s *SyncServer) handleDeltaSync(w http.ResponseWriter, r *http.Request) {
// Filter sessions modified after client's last sync
for _, session := range serverBackup.Sessions {
if deletedItemMap["session:"+session.ID] {
continue
}
sessionTime, err := time.Parse(time.RFC3339, session.UpdatedAt)
if err == nil && sessionTime.After(clientLastSync) {
response.Sessions = append(response.Sessions, session)
}
}
// Filter attempts created after client's last sync
// Filter attempts modified after client's last sync
for _, attempt := range serverBackup.Attempts {
if deletedItemMap["attempt:"+attempt.ID] {
continue
attemptTime := attempt.CreatedAt
if attempt.UpdatedAt != nil {
attemptTime = *attempt.UpdatedAt
}
attemptTime, err := time.Parse(time.RFC3339, attempt.CreatedAt)
if err == nil && attemptTime.After(clientLastSync) {
parsedTime, err := time.Parse(time.RFC3339, attemptTime)
if err == nil && parsedTime.After(clientLastSync) {
response.Attempts = append(response.Attempts, attempt)
}
}
// Filter deletions after client's last sync
for _, deletedItem := range serverBackup.DeletedItems {
deletedTime, err := time.Parse(time.RFC3339, deletedItem.DeletedAt)
if err == nil && deletedTime.After(clientLastSync) {
response.DeletedItems = append(response.DeletedItems, deletedItem)
}
}
log.Printf("Delta sync response to %s: gyms=%d, problems=%d, sessions=%d, attempts=%d, deletedItems=%d",
log.Printf("Delta sync response to %s: gyms=%d, problems=%d, sessions=%d, attempts=%d",
r.RemoteAddr,
len(response.Gyms), len(response.Problems),
len(response.Sessions), len(response.Attempts),
len(response.DeletedItems))
len(response.Sessions), len(response.Attempts))
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(response)