Compare commits

...

9 Commits

Author SHA1 Message Date
8c4a78ad50 2.2.0 - Final Builds
All checks were successful
Ascently - Sync Deploy / build-and-push (push) Successful in 2m32s
2025-10-18 23:02:31 -06:00
3b16475dc6 [Mobile] 2.2.0 - Calendar View 2025-10-18 16:26:22 -06:00
105d39689d [Mobile] 2.2.0 - Calendar View 2025-10-18 16:26:17 -06:00
d4023133b7 App version 2.1.1 - Branding updates (Logo change)
All checks were successful
Ascently - Docs Deploy / build-and-push (push) Successful in 3m59s
2025-10-17 09:46:19 -06:00
602b5f8938 Branding updates 2025-10-17 09:46:19 -06:00
8529f76c22 Fixed doc issue
All checks were successful
Ascently - Docs Deploy / build-and-push (push) Successful in 3m50s
2025-10-16 12:36:13 -06:00
879aae0721 Update docs with App Store link
All checks were successful
Ascently - Docs Deploy / build-and-push (push) Successful in 4m9s
2025-10-16 00:39:11 -06:00
6fc86558b2 Fixed docs
All checks were successful
Ascently - Docs Deploy / build-and-push (push) Successful in 3m59s
2025-10-15 18:25:48 -06:00
23de8a6fc6 [All Platforms] 2.1.0 - Sync Optimizations
All checks were successful
Ascently - Sync Deploy / build-and-push (push) Successful in 2m31s
Ascently - Docs Deploy / build-and-push (push) Successful in 3m30s
2025-10-15 18:17:19 -06:00
91 changed files with 3323 additions and 592 deletions

View File

@@ -16,8 +16,8 @@ android {
applicationId = "com.atridad.ascently" applicationId = "com.atridad.ascently"
minSdk = 31 minSdk = 31
targetSdk = 36 targetSdk = 36
versionCode = 41 versionCode = 45
versionName = "2.0.1" versionName = "2.2.0"
testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
} }

View File

@@ -32,13 +32,12 @@ data class BackupGym(
val supportedClimbTypes: List<ClimbType>, val supportedClimbTypes: List<ClimbType>,
val difficultySystems: List<DifficultySystem>, val difficultySystems: List<DifficultySystem>,
@kotlinx.serialization.SerialName("customDifficultyGrades") @kotlinx.serialization.SerialName("customDifficultyGrades")
val customDifficultyGrades: List<String> = emptyList(), val customDifficultyGrades: List<String>? = null,
val notes: String? = null, val notes: String? = null,
val createdAt: String, val createdAt: String,
val updatedAt: String val updatedAt: String
) { ) {
companion object { companion object {
/** Create BackupGym from native Android Gym model */
fun fromGym(gym: Gym): BackupGym { fun fromGym(gym: Gym): BackupGym {
return BackupGym( return BackupGym(
id = gym.id, id = gym.id,
@@ -46,7 +45,7 @@ data class BackupGym(
location = gym.location, location = gym.location,
supportedClimbTypes = gym.supportedClimbTypes, supportedClimbTypes = gym.supportedClimbTypes,
difficultySystems = gym.difficultySystems, difficultySystems = gym.difficultySystems,
customDifficultyGrades = gym.customDifficultyGrades, customDifficultyGrades = gym.customDifficultyGrades.ifEmpty { null },
notes = gym.notes, notes = gym.notes,
createdAt = gym.createdAt, createdAt = gym.createdAt,
updatedAt = gym.updatedAt updatedAt = gym.updatedAt
@@ -54,7 +53,6 @@ data class BackupGym(
} }
} }
/** Convert to native Android Gym model */
fun toGym(): Gym { fun toGym(): Gym {
return Gym( return Gym(
id = id, id = id,
@@ -62,7 +60,7 @@ data class BackupGym(
location = location, location = location,
supportedClimbTypes = supportedClimbTypes, supportedClimbTypes = supportedClimbTypes,
difficultySystems = difficultySystems, difficultySystems = difficultySystems,
customDifficultyGrades = customDifficultyGrades, customDifficultyGrades = customDifficultyGrades ?: emptyList(),
notes = notes, notes = notes,
createdAt = createdAt, createdAt = createdAt,
updatedAt = updatedAt updatedAt = updatedAt
@@ -79,7 +77,7 @@ data class BackupProblem(
val description: String? = null, val description: String? = null,
val climbType: ClimbType, val climbType: ClimbType,
val difficulty: DifficultyGrade, val difficulty: DifficultyGrade,
val tags: List<String> = emptyList(), val tags: List<String>? = null,
val location: String? = null, val location: String? = null,
val imagePaths: List<String>? = null, val imagePaths: List<String>? = null,
val isActive: Boolean = true, val isActive: Boolean = true,
@@ -89,7 +87,6 @@ data class BackupProblem(
val updatedAt: String val updatedAt: String
) { ) {
companion object { companion object {
/** Create BackupProblem from native Android Problem model */
fun fromProblem(problem: Problem): BackupProblem { fun fromProblem(problem: Problem): BackupProblem {
return BackupProblem( return BackupProblem(
id = problem.id, id = problem.id,
@@ -112,7 +109,6 @@ data class BackupProblem(
} }
} }
/** Convert to native Android Problem model */
fun toProblem(): Problem { fun toProblem(): Problem {
return Problem( return Problem(
id = id, id = id,
@@ -121,7 +117,7 @@ data class BackupProblem(
description = description, description = description,
climbType = climbType, climbType = climbType,
difficulty = difficulty, difficulty = difficulty,
tags = tags, tags = tags ?: emptyList(),
location = location, location = location,
imagePaths = imagePaths ?: emptyList(), imagePaths = imagePaths ?: emptyList(),
isActive = isActive, isActive = isActive,
@@ -132,7 +128,6 @@ data class BackupProblem(
) )
} }
/** Create a copy with updated image paths for import processing */
fun withUpdatedImagePaths(newImagePaths: List<String>): BackupProblem { fun withUpdatedImagePaths(newImagePaths: List<String>): BackupProblem {
return copy(imagePaths = newImagePaths.ifEmpty { null }) return copy(imagePaths = newImagePaths.ifEmpty { null })
} }
@@ -153,7 +148,6 @@ data class BackupClimbSession(
val updatedAt: String val updatedAt: String
) { ) {
companion object { companion object {
/** Create BackupClimbSession from native Android ClimbSession model */
fun fromClimbSession(session: ClimbSession): BackupClimbSession { fun fromClimbSession(session: ClimbSession): BackupClimbSession {
return BackupClimbSession( return BackupClimbSession(
id = session.id, id = session.id,
@@ -170,7 +164,6 @@ data class BackupClimbSession(
} }
} }
/** Convert to native Android ClimbSession model */
fun toClimbSession(): ClimbSession { fun toClimbSession(): ClimbSession {
return ClimbSession( return ClimbSession(
id = id, id = id,
@@ -203,7 +196,6 @@ data class BackupAttempt(
val updatedAt: String? = null val updatedAt: String? = null
) { ) {
companion object { companion object {
/** Create BackupAttempt from native Android Attempt model */
fun fromAttempt(attempt: Attempt): BackupAttempt { fun fromAttempt(attempt: Attempt): BackupAttempt {
return BackupAttempt( return BackupAttempt(
id = attempt.id, id = attempt.id,
@@ -221,7 +213,6 @@ data class BackupAttempt(
} }
} }
/** Convert to native Android Attempt model */
fun toAttempt(): Attempt { fun toAttempt(): Attempt {
return Attempt( return Attempt(
id = id, id = id,

View File

@@ -66,7 +66,6 @@ class HealthConnectManager(private val context: Context) {
} }
} }
/** Check if Health Connect is available on this device */
fun isHealthConnectAvailable(): Flow<Boolean> = flow { fun isHealthConnectAvailable(): Flow<Boolean> = flow {
try { try {
if (!_isCompatible.value) { if (!_isCompatible.value) {
@@ -82,10 +81,6 @@ class HealthConnectManager(private val context: Context) {
} }
} }
/**
* Enable or disable Health Connect integration and automatically request permissions if
* enabling
*/
suspend fun setEnabled(enabled: Boolean) { suspend fun setEnabled(enabled: Boolean) {
preferences.edit().putBoolean("enabled", enabled).apply() preferences.edit().putBoolean("enabled", enabled).apply()
_isEnabled.value = enabled _isEnabled.value = enabled
@@ -105,13 +100,11 @@ class HealthConnectManager(private val context: Context) {
} }
} }
/** Update the permissions granted state */
fun setPermissionsGranted(granted: Boolean) { fun setPermissionsGranted(granted: Boolean) {
preferences.edit().putBoolean("permissions", granted).apply() preferences.edit().putBoolean("permissions", granted).apply()
_hasPermissions.value = granted _hasPermissions.value = granted
} }
/** Check if all required permissions are granted */
suspend fun hasAllPermissions(): Boolean { suspend fun hasAllPermissions(): Boolean {
return try { return try {
if (!_isCompatible.value || healthConnectClient == null) { if (!_isCompatible.value || healthConnectClient == null) {
@@ -132,7 +125,6 @@ class HealthConnectManager(private val context: Context) {
} }
} }
/** Check if Health Connect is ready for use */
suspend fun isReady(): Boolean { suspend fun isReady(): Boolean {
return try { return try {
if (!_isEnabled.value || !_isCompatible.value || healthConnectClient == null) if (!_isEnabled.value || !_isCompatible.value || healthConnectClient == null)
@@ -148,12 +140,10 @@ class HealthConnectManager(private val context: Context) {
} }
} }
/** Get permission request contract */
fun getPermissionRequestContract(): ActivityResultContract<Set<String>, Set<String>> { fun getPermissionRequestContract(): ActivityResultContract<Set<String>, Set<String>> {
return PermissionController.createRequestPermissionResultContract() return PermissionController.createRequestPermissionResultContract()
} }
/** Get required permissions as strings */
fun getRequiredPermissions(): Set<String> { fun getRequiredPermissions(): Set<String> {
return try { return try {
REQUIRED_PERMISSIONS.map { it }.toSet() REQUIRED_PERMISSIONS.map { it }.toSet()
@@ -163,7 +153,6 @@ class HealthConnectManager(private val context: Context) {
} }
} }
/** Sync a completed climbing session to Health Connect (only when auto-sync is enabled) */
@SuppressLint("RestrictedApi") @SuppressLint("RestrictedApi")
suspend fun syncCompletedSession( suspend fun syncCompletedSession(
session: ClimbSession, session: ClimbSession,
@@ -271,7 +260,6 @@ class HealthConnectManager(private val context: Context) {
} }
} }
/** Auto-sync a completed session if enabled - this is the only way to sync sessions */
suspend fun autoSyncCompletedSession( suspend fun autoSyncCompletedSession(
session: ClimbSession, session: ClimbSession,
gymName: String, gymName: String,
@@ -293,7 +281,6 @@ class HealthConnectManager(private val context: Context) {
} }
} }
/** Estimate calories burned during climbing */
private fun estimateCaloriesForClimbing(durationMinutes: Long, attemptCount: Int): Double { private fun estimateCaloriesForClimbing(durationMinutes: Long, attemptCount: Int): Double {
val baseCaloriesPerMinute = 8.0 val baseCaloriesPerMinute = 8.0
val intensityMultiplier = val intensityMultiplier =
@@ -305,7 +292,6 @@ class HealthConnectManager(private val context: Context) {
return durationMinutes * baseCaloriesPerMinute * intensityMultiplier return durationMinutes * baseCaloriesPerMinute * intensityMultiplier
} }
/** Create heart rate data */
@SuppressLint("RestrictedApi") @SuppressLint("RestrictedApi")
private fun createHeartRateRecord( private fun createHeartRateRecord(
startTime: Instant, startTime: Instant,
@@ -347,9 +333,7 @@ class HealthConnectManager(private val context: Context) {
} }
} }
/** Check if ready for use */
fun isReadySync(): Boolean { fun isReadySync(): Boolean {
return _isEnabled.value && _hasPermissions.value return _isEnabled.value && _hasPermissions.value
} }
} }

View File

@@ -251,23 +251,15 @@ class ClimbRepository(database: AscentlyDatabase, private val context: Context)
} }
} }
/**
* Sets the callback for auto-sync functionality. This should be called by the SyncService to
* register itself for auto-sync triggers.
*/
fun setAutoSyncCallback(callback: (() -> Unit)?) { fun setAutoSyncCallback(callback: (() -> Unit)?) {
autoSyncCallback = callback autoSyncCallback = callback
} }
/**
* Triggers auto-sync if enabled. This is called after any data modification to keep data
* synchronized across devices automatically.
*/
private fun triggerAutoSync() { private fun triggerAutoSync() {
autoSyncCallback?.invoke() autoSyncCallback?.invoke()
} }
private fun trackDeletion(itemId: String, itemType: String) { fun trackDeletion(itemId: String, itemType: String) {
val currentDeletions = getDeletedItems().toMutableList() val currentDeletions = getDeletedItems().toMutableList()
val newDeletion = val newDeletion =
DeletedItem(id = itemId, type = itemType, deletedAt = DateFormatUtils.nowISO8601()) DeletedItem(id = itemId, type = itemType, deletedAt = DateFormatUtils.nowISO8601())

View File

@@ -0,0 +1,30 @@
package com.atridad.ascently.data.sync
import com.atridad.ascently.data.format.BackupAttempt
import com.atridad.ascently.data.format.BackupClimbSession
import com.atridad.ascently.data.format.BackupGym
import com.atridad.ascently.data.format.BackupProblem
import com.atridad.ascently.data.format.DeletedItem
import kotlinx.serialization.Serializable
/** Request structure for delta sync - sends only changes since last sync */
@Serializable
data class DeltaSyncRequest(
val lastSyncTime: String,
val gyms: List<BackupGym>,
val problems: List<BackupProblem>,
val sessions: List<BackupClimbSession>,
val attempts: List<BackupAttempt>,
val deletedItems: List<DeletedItem>
)
/** Response structure for delta sync - receives only changes from server */
@Serializable
data class DeltaSyncResponse(
val serverTime: String,
val gyms: List<BackupGym>,
val problems: List<BackupProblem>,
val sessions: List<BackupClimbSession>,
val attempts: List<BackupAttempt>,
val deletedItems: List<DeletedItem>
)

View File

@@ -19,6 +19,9 @@ import com.atridad.ascently.utils.ImageNamingUtils
import com.atridad.ascently.utils.ImageUtils import com.atridad.ascently.utils.ImageUtils
import java.io.IOException import java.io.IOException
import java.io.Serializable import java.io.Serializable
import java.text.SimpleDateFormat
import java.util.Date
import java.util.Locale
import java.util.concurrent.TimeUnit import java.util.concurrent.TimeUnit
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
@@ -63,6 +66,7 @@ class SyncService(private val context: Context, private val repository: ClimbRep
prettyPrint = true prettyPrint = true
ignoreUnknownKeys = true ignoreUnknownKeys = true
explicitNulls = false explicitNulls = false
coerceInputValues = true
} }
// State // State
@@ -195,26 +199,33 @@ class SyncService(private val context: Context, private val repository: ClimbRep
serverBackup.sessions.isNotEmpty() || serverBackup.sessions.isNotEmpty() ||
serverBackup.attempts.isNotEmpty() serverBackup.attempts.isNotEmpty()
when { // If both client and server have been synced before, use delta sync
!hasLocalData && hasServerData -> { val lastSyncTimeStr = sharedPreferences.getString(Keys.LAST_SYNC_TIME, null)
Log.d(TAG, "No local data found, performing full restore from server") if (hasLocalData && hasServerData && lastSyncTimeStr != null) {
val imagePathMapping = syncImagesFromServer(serverBackup) Log.d(TAG, "Using delta sync for incremental updates")
importBackupToRepository(serverBackup, imagePathMapping) performDeltaSync(lastSyncTimeStr)
Log.d(TAG, "Full restore completed") } else {
} when {
hasLocalData && !hasServerData -> { !hasLocalData && hasServerData -> {
Log.d(TAG, "No server data found, uploading local data to server") Log.d(TAG, "No local data found, performing full restore from server")
uploadData(localBackup) val imagePathMapping = syncImagesFromServer(serverBackup)
syncImagesForBackup(localBackup) importBackupToRepository(serverBackup, imagePathMapping)
Log.d(TAG, "Initial upload completed") Log.d(TAG, "Full restore completed")
} }
hasLocalData && hasServerData -> { hasLocalData && !hasServerData -> {
Log.d(TAG, "Both local and server data exist, merging (server wins)") Log.d(TAG, "No server data found, uploading local data to server")
mergeDataSafely(serverBackup) uploadData(localBackup)
Log.d(TAG, "Merge completed") syncImagesForBackup(localBackup)
} Log.d(TAG, "Initial upload completed")
else -> { }
Log.d(TAG, "No data to sync") hasLocalData && hasServerData -> {
Log.d(TAG, "Both local and server data exist, merging (server wins)")
mergeDataSafely(serverBackup)
Log.d(TAG, "Merge completed")
}
else -> {
Log.d(TAG, "No data to sync")
}
} }
} }
@@ -230,6 +241,287 @@ class SyncService(private val context: Context, private val repository: ClimbRep
} }
} }
private suspend fun performDeltaSync(lastSyncTimeStr: String) {
Log.d(TAG, "Starting delta sync with lastSyncTime=$lastSyncTimeStr")
// Parse last sync time to filter modified items
val lastSyncDate = parseISO8601(lastSyncTimeStr) ?: Date(0)
// Collect items modified since last sync
val allGyms = repository.getAllGyms().first()
val modifiedGyms =
allGyms
.filter { gym -> parseISO8601(gym.updatedAt)?.after(lastSyncDate) == true }
.map { BackupGym.fromGym(it) }
val allProblems = repository.getAllProblems().first()
val modifiedProblems =
allProblems
.filter { problem ->
parseISO8601(problem.updatedAt)?.after(lastSyncDate) == true
}
.map { problem ->
val backupProblem = BackupProblem.fromProblem(problem)
val normalizedImagePaths =
problem.imagePaths.mapIndexed { index, _ ->
ImageNamingUtils.generateImageFilename(problem.id, index)
}
if (normalizedImagePaths.isNotEmpty()) {
backupProblem.copy(imagePaths = normalizedImagePaths)
} else {
backupProblem
}
}
val allSessions = repository.getAllSessions().first()
val modifiedSessions =
allSessions
.filter { session ->
parseISO8601(session.updatedAt)?.after(lastSyncDate) == true
}
.map { BackupClimbSession.fromClimbSession(it) }
val allAttempts = repository.getAllAttempts().first()
val modifiedAttempts =
allAttempts
.filter { attempt ->
parseISO8601(attempt.createdAt)?.after(lastSyncDate) == true
}
.map { BackupAttempt.fromAttempt(it) }
val allDeletions = repository.getDeletedItems()
val modifiedDeletions =
allDeletions.filter { item ->
parseISO8601(item.deletedAt)?.after(lastSyncDate) == true
}
Log.d(
TAG,
"Delta sync sending: gyms=${modifiedGyms.size}, problems=${modifiedProblems.size}, sessions=${modifiedSessions.size}, attempts=${modifiedAttempts.size}, deletions=${modifiedDeletions.size}"
)
// Create delta request
val deltaRequest =
DeltaSyncRequest(
lastSyncTime = lastSyncTimeStr,
gyms = modifiedGyms,
problems = modifiedProblems,
sessions = modifiedSessions,
attempts = modifiedAttempts,
deletedItems = modifiedDeletions
)
val requestBody =
json.encodeToString(DeltaSyncRequest.serializer(), deltaRequest)
.toRequestBody("application/json".toMediaType())
val request =
Request.Builder()
.url("$serverUrl/sync/delta")
.header("Authorization", "Bearer $authToken")
.post(requestBody)
.build()
val deltaResponse =
withContext(Dispatchers.IO) {
try {
httpClient.newCall(request).execute().use { response ->
if (response.isSuccessful) {
val body = response.body?.string()
if (!body.isNullOrEmpty()) {
json.decodeFromString(DeltaSyncResponse.serializer(), body)
} else {
throw SyncException.InvalidResponse("Empty response body")
}
} else {
handleHttpError(response.code)
}
}
} catch (e: IOException) {
throw SyncException.NetworkError(e.message ?: "Network error")
}
}
Log.d(
TAG,
"Delta sync received: gyms=${deltaResponse.gyms.size}, problems=${deltaResponse.problems.size}, sessions=${deltaResponse.sessions.size}, attempts=${deltaResponse.attempts.size}, deletions=${deltaResponse.deletedItems.size}"
)
// Apply server changes to local data
applyDeltaResponse(deltaResponse)
// Sync only modified problem images
syncModifiedImages(modifiedProblems)
}
private fun parseISO8601(dateString: String): Date? {
return try {
val format = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US)
format.parse(dateString)
} catch (e: Exception) {
null
}
}
private suspend fun applyDeltaResponse(response: DeltaSyncResponse) {
// Temporarily disable auto-sync to prevent recursive sync triggers
repository.setAutoSyncCallback(null)
try {
// Merge and apply deletions first to prevent resurrection
val allDeletions = repository.getDeletedItems() + response.deletedItems
val uniqueDeletions = allDeletions.distinctBy { "${it.type}:${it.id}" }
Log.d(TAG, "Applying ${uniqueDeletions.size} deletion records before merging data")
applyDeletions(uniqueDeletions)
// Build deleted item lookup set
val deletedItemSet = uniqueDeletions.map { "${it.type}:${it.id}" }.toSet()
// Download images for new/modified problems from server
val imagePathMapping = mutableMapOf<String, String>()
for (problem in response.problems) {
if (deletedItemSet.contains("problem:${problem.id}")) {
continue
}
problem.imagePaths?.forEach { imagePath ->
val serverFilename = imagePath.substringAfterLast('/')
try {
val localImagePath = downloadImage(serverFilename)
if (localImagePath != null) {
imagePathMapping[imagePath] = localImagePath
}
} catch (e: Exception) {
Log.w(TAG, "Failed to download image $imagePath: ${e.message}")
}
}
}
// Merge gyms
val existingGyms = repository.getAllGyms().first()
for (backupGym in response.gyms) {
if (deletedItemSet.contains("gym:${backupGym.id}")) {
continue
}
val existing = existingGyms.find { it.id == backupGym.id }
if (existing == null || backupGym.updatedAt >= existing.updatedAt) {
val gym = backupGym.toGym()
if (existing != null) {
repository.updateGym(gym)
} else {
repository.insertGym(gym)
}
}
}
// Merge problems
val existingProblems = repository.getAllProblems().first()
for (backupProblem in response.problems) {
if (deletedItemSet.contains("problem:${backupProblem.id}")) {
continue
}
val updatedImagePaths =
backupProblem.imagePaths?.map { oldPath ->
imagePathMapping[oldPath] ?: oldPath
}
val problemToMerge = backupProblem.copy(imagePaths = updatedImagePaths)
val problem = problemToMerge.toProblem()
val existing = existingProblems.find { it.id == backupProblem.id }
if (existing == null || backupProblem.updatedAt >= existing.updatedAt) {
if (existing != null) {
repository.updateProblem(problem)
} else {
repository.insertProblem(problem)
}
}
}
// Merge sessions
val existingSessions = repository.getAllSessions().first()
for (backupSession in response.sessions) {
if (deletedItemSet.contains("session:${backupSession.id}")) {
continue
}
val session = backupSession.toClimbSession()
val existing = existingSessions.find { it.id == backupSession.id }
if (existing == null || backupSession.updatedAt >= existing.updatedAt) {
if (existing != null) {
repository.updateSession(session)
} else {
repository.insertSession(session)
}
}
}
// Merge attempts
val existingAttempts = repository.getAllAttempts().first()
for (backupAttempt in response.attempts) {
if (deletedItemSet.contains("attempt:${backupAttempt.id}")) {
continue
}
val attempt = backupAttempt.toAttempt()
val existing = existingAttempts.find { it.id == backupAttempt.id }
if (existing == null || backupAttempt.createdAt >= existing.createdAt) {
if (existing != null) {
repository.updateAttempt(attempt)
} else {
repository.insertAttempt(attempt)
}
}
}
// Apply deletions again for safety
applyDeletions(uniqueDeletions)
// Update deletion records
repository.clearDeletedItems()
uniqueDeletions.forEach { repository.trackDeletion(it.id, it.type) }
} finally {
// Re-enable auto-sync
repository.setAutoSyncCallback { serviceScope.launch { triggerAutoSync() } }
}
}
private suspend fun applyDeletions(
deletions: List<com.atridad.ascently.data.format.DeletedItem>
) {
val existingGyms = repository.getAllGyms().first()
val existingProblems = repository.getAllProblems().first()
val existingSessions = repository.getAllSessions().first()
val existingAttempts = repository.getAllAttempts().first()
for (item in deletions) {
when (item.type) {
"gym" -> {
existingGyms.find { it.id == item.id }?.let { repository.deleteGym(it) }
}
"problem" -> {
existingProblems.find { it.id == item.id }?.let { repository.deleteProblem(it) }
}
"session" -> {
existingSessions.find { it.id == item.id }?.let { repository.deleteSession(it) }
}
"attempt" -> {
existingAttempts.find { it.id == item.id }?.let { repository.deleteAttempt(it) }
}
}
}
}
private suspend fun syncModifiedImages(modifiedProblems: List<BackupProblem>) {
if (modifiedProblems.isEmpty()) return
Log.d(TAG, "Syncing images for ${modifiedProblems.size} modified problems")
for (backupProblem in modifiedProblems) {
backupProblem.imagePaths?.forEach { imagePath ->
val filename = imagePath.substringAfterLast('/')
uploadImage(imagePath, filename)
}
}
}
private suspend fun downloadData(): ClimbDataBackup { private suspend fun downloadData(): ClimbDataBackup {
val request = val request =
Request.Builder() Request.Builder()
@@ -272,7 +564,7 @@ class SyncService(private val context: Context, private val repository: ClimbRep
Request.Builder() Request.Builder()
.url("$serverUrl/sync") .url("$serverUrl/sync")
.header("Authorization", "Bearer $authToken") .header("Authorization", "Bearer $authToken")
.post(requestBody) .put(requestBody)
.build() .build()
withContext(Dispatchers.IO) { withContext(Dispatchers.IO) {

View File

@@ -31,6 +31,7 @@ import androidx.compose.ui.window.Dialog
import com.atridad.ascently.data.model.* import com.atridad.ascently.data.model.*
import com.atridad.ascently.ui.components.FullscreenImageViewer import com.atridad.ascently.ui.components.FullscreenImageViewer
import com.atridad.ascently.ui.components.ImageDisplaySection import com.atridad.ascently.ui.components.ImageDisplaySection
import com.atridad.ascently.ui.components.ImagePicker
import com.atridad.ascently.ui.theme.CustomIcons import com.atridad.ascently.ui.theme.CustomIcons
import com.atridad.ascently.ui.viewmodel.ClimbViewModel import com.atridad.ascently.ui.viewmodel.ClimbViewModel
import com.atridad.ascently.utils.DateFormatUtils import com.atridad.ascently.utils.DateFormatUtils
@@ -1489,6 +1490,7 @@ fun EnhancedAddAttemptDialog(
// New problem creation state // New problem creation state
var newProblemName by remember { mutableStateOf("") } var newProblemName by remember { mutableStateOf("") }
var newProblemGrade by remember { mutableStateOf("") } var newProblemGrade by remember { mutableStateOf("") }
var newProblemImagePaths by remember { mutableStateOf<List<String>>(emptyList()) }
var selectedClimbType by remember { mutableStateOf(ClimbType.BOULDER) } var selectedClimbType by remember { mutableStateOf(ClimbType.BOULDER) }
var selectedDifficultySystem by remember { var selectedDifficultySystem by remember {
mutableStateOf(gym.difficultySystems.firstOrNull() ?: DifficultySystem.V_SCALE) mutableStateOf(gym.difficultySystems.firstOrNull() ?: DifficultySystem.V_SCALE)
@@ -1690,7 +1692,14 @@ fun EnhancedAddAttemptDialog(
color = MaterialTheme.colorScheme.onSurface color = MaterialTheme.colorScheme.onSurface
) )
IconButton(onClick = { showCreateProblem = false }) { IconButton(
onClick = {
showCreateProblem = false
newProblemName = ""
newProblemGrade = ""
newProblemImagePaths = emptyList()
}
) {
Icon( Icon(
Icons.AutoMirrored.Filled.ArrowBack, Icons.AutoMirrored.Filled.ArrowBack,
contentDescription = "Back", contentDescription = "Back",
@@ -1905,6 +1914,21 @@ fun EnhancedAddAttemptDialog(
} }
} }
} }
// Photos Section
Column(verticalArrangement = Arrangement.spacedBy(8.dp)) {
Text(
text = "Photos (Optional)",
style = MaterialTheme.typography.bodyLarge,
fontWeight = FontWeight.Medium,
color = MaterialTheme.colorScheme.onSurface
)
ImagePicker(
imageUris = newProblemImagePaths,
onImagesChanged = { newProblemImagePaths = it },
maxImages = 5
)
}
} }
} }
} }
@@ -2069,7 +2093,9 @@ fun EnhancedAddAttemptDialog(
null null
}, },
climbType = selectedClimbType, climbType = selectedClimbType,
difficulty = difficulty difficulty = difficulty,
imagePaths =
newProblemImagePaths
) )
onProblemCreated(newProblem) onProblemCreated(newProblem)
@@ -2087,6 +2113,12 @@ fun EnhancedAddAttemptDialog(
notes = notes.ifBlank { null } notes = notes.ifBlank { null }
) )
onAttemptAdded(attempt) onAttemptAdded(attempt)
// Reset form
newProblemName = ""
newProblemGrade = ""
newProblemImagePaths = emptyList()
showCreateProblem = false
} }
} else { } else {
// Create attempt for selected problem // Create attempt for selected problem

View File

@@ -1,16 +1,26 @@
package com.atridad.ascently.ui.screens package com.atridad.ascently.ui.screens
import android.content.Context
import androidx.compose.foundation.background
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.* import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.lazy.grid.GridCells
import androidx.compose.foundation.lazy.grid.LazyVerticalGrid
import androidx.compose.foundation.lazy.items import androidx.compose.foundation.lazy.items
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.foundation.shape.RoundedCornerShape import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.icons.Icons import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.automirrored.filled.List
import androidx.compose.material.icons.filled.CalendarMonth
import androidx.compose.material.icons.filled.CheckCircle import androidx.compose.material.icons.filled.CheckCircle
import androidx.compose.material.icons.filled.Warning import androidx.compose.material.icons.filled.Warning
import androidx.compose.material3.* import androidx.compose.material3.*
import androidx.compose.runtime.* import androidx.compose.runtime.*
import androidx.compose.ui.Alignment import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.platform.LocalContext import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.res.painterResource import androidx.compose.ui.res.painterResource
import androidx.compose.ui.text.font.FontWeight import androidx.compose.ui.text.font.FontWeight
@@ -23,6 +33,16 @@ import com.atridad.ascently.ui.components.ActiveSessionBanner
import com.atridad.ascently.ui.components.SyncIndicator import com.atridad.ascently.ui.components.SyncIndicator
import com.atridad.ascently.ui.viewmodel.ClimbViewModel import com.atridad.ascently.ui.viewmodel.ClimbViewModel
import com.atridad.ascently.utils.DateFormatUtils import com.atridad.ascently.utils.DateFormatUtils
import java.time.LocalDate
import java.time.YearMonth
import java.time.format.DateTimeFormatter
import java.time.format.TextStyle
import java.util.Locale
enum class ViewMode {
LIST,
CALENDAR
}
@OptIn(ExperimentalMaterial3Api::class) @OptIn(ExperimentalMaterial3Api::class)
@Composable @Composable
@@ -33,7 +53,15 @@ fun SessionsScreen(viewModel: ClimbViewModel, onNavigateToSessionDetail: (String
val activeSession by viewModel.activeSession.collectAsState() val activeSession by viewModel.activeSession.collectAsState()
val uiState by viewModel.uiState.collectAsState() val uiState by viewModel.uiState.collectAsState()
// Filter out active sessions from regular session list val sharedPreferences =
context.getSharedPreferences("SessionsPreferences", Context.MODE_PRIVATE)
val savedViewMode = sharedPreferences.getString("view_mode", "LIST")
var viewMode by remember {
mutableStateOf(if (savedViewMode == "CALENDAR") ViewMode.CALENDAR else ViewMode.LIST)
}
var selectedMonth by remember { mutableStateOf(YearMonth.now()) }
var selectedDate by remember { mutableStateOf<LocalDate?>(null) }
val completedSessions = sessions.filter { it.status == SessionStatus.COMPLETED } val completedSessions = sessions.filter { it.status == SessionStatus.COMPLETED }
val activeSessionGym = activeSession?.let { session -> gyms.find { it.id == session.gymId } } val activeSessionGym = activeSession?.let { session -> gyms.find { it.id == session.gymId } }
@@ -55,12 +83,30 @@ fun SessionsScreen(viewModel: ClimbViewModel, onNavigateToSessionDetail: (String
fontWeight = FontWeight.Bold, fontWeight = FontWeight.Bold,
modifier = Modifier.weight(1f) modifier = Modifier.weight(1f)
) )
IconButton(
onClick = {
viewMode =
if (viewMode == ViewMode.LIST) ViewMode.CALENDAR else ViewMode.LIST
selectedDate = null
sharedPreferences.edit().putString("view_mode", viewMode.name).apply()
}
) {
Icon(
imageVector =
if (viewMode == ViewMode.LIST) Icons.Default.CalendarMonth
else Icons.AutoMirrored.Filled.List,
contentDescription =
if (viewMode == ViewMode.LIST) "Calendar View" else "List View",
tint = MaterialTheme.colorScheme.primary
)
}
SyncIndicator(isSyncing = viewModel.syncService.isSyncing) SyncIndicator(isSyncing = viewModel.syncService.isSyncing)
} }
Spacer(modifier = Modifier.height(16.dp)) Spacer(modifier = Modifier.height(16.dp))
// Active session banner
ActiveSessionBanner( ActiveSessionBanner(
activeSession = activeSession, activeSession = activeSession,
gym = activeSessionGym, gym = activeSessionGym,
@@ -83,20 +129,40 @@ fun SessionsScreen(viewModel: ClimbViewModel, onNavigateToSessionDetail: (String
actionText = "" actionText = ""
) )
} else { } else {
LazyColumn { when (viewMode) {
items(completedSessions) { session -> ViewMode.LIST -> {
SessionCard( LazyColumn {
session = session, items(completedSessions) { session ->
gymName = gyms.find { it.id == session.gymId }?.name ?: "Unknown Gym", SessionCard(
onClick = { onNavigateToSessionDetail(session.id) } session = session,
gymName = gyms.find { it.id == session.gymId }?.name
?: "Unknown Gym",
onClick = { onNavigateToSessionDetail(session.id) }
)
Spacer(modifier = Modifier.height(8.dp))
}
}
}
ViewMode.CALENDAR -> {
CalendarView(
sessions = completedSessions,
gyms = gyms,
activeSession = activeSession,
activeSessionGym = activeSessionGym,
selectedMonth = selectedMonth,
onMonthChange = { selectedMonth = it },
selectedDate = selectedDate,
onDateSelected = { selectedDate = it },
onNavigateToSessionDetail = onNavigateToSessionDetail,
onEndSession = {
activeSession?.let { viewModel.endSession(context, it.id) }
}
) )
Spacer(modifier = Modifier.height(8.dp))
} }
} }
} }
} }
// Show UI state messages and errors
uiState.message?.let { message -> uiState.message?.let { message ->
LaunchedEffect(message) { LaunchedEffect(message) {
kotlinx.coroutines.delay(5000) kotlinx.coroutines.delay(5000)
@@ -245,6 +311,226 @@ fun EmptyStateMessage(
} }
} }
@Composable
fun CalendarView(
sessions: List<ClimbSession>,
gyms: List<com.atridad.ascently.data.model.Gym>,
activeSession: ClimbSession?,
activeSessionGym: com.atridad.ascently.data.model.Gym?,
selectedMonth: YearMonth,
onMonthChange: (YearMonth) -> Unit,
selectedDate: LocalDate?,
onDateSelected: (LocalDate?) -> Unit,
onNavigateToSessionDetail: (String) -> Unit,
onEndSession: () -> Unit
) {
val sessionsByDate =
remember(sessions) {
sessions.groupBy {
try {
java.time.Instant.parse(it.date)
.atZone(java.time.ZoneId.systemDefault())
.toLocalDate()
} catch (e: Exception) {
LocalDate.parse(it.date, DateTimeFormatter.ISO_LOCAL_DATE)
}
}
}
Column(modifier = Modifier.fillMaxSize()) {
Card(
modifier = Modifier.fillMaxWidth(),
colors =
CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.surfaceVariant
)
) {
Column(
modifier = Modifier.fillMaxWidth().padding(horizontal = 8.dp, vertical = 12.dp),
horizontalAlignment = Alignment.CenterHorizontally
) {
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween,
verticalAlignment = Alignment.CenterVertically
) {
IconButton(onClick = { onMonthChange(selectedMonth.minusMonths(1)) }) {
Text("", style = MaterialTheme.typography.headlineMedium)
}
Text(
text =
"${selectedMonth.month.getDisplayName(TextStyle.FULL, Locale.getDefault())} ${selectedMonth.year}",
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold
)
IconButton(onClick = { onMonthChange(selectedMonth.plusMonths(1)) }) {
Text("", style = MaterialTheme.typography.headlineMedium)
}
}
Spacer(modifier = Modifier.height(8.dp))
Button(
onClick = {
val today = LocalDate.now()
onMonthChange(YearMonth.from(today))
onDateSelected(today)
},
shape = RoundedCornerShape(50),
colors =
ButtonDefaults.buttonColors(
containerColor = MaterialTheme.colorScheme.primary
),
contentPadding = PaddingValues(horizontal = 20.dp, vertical = 8.dp)
) {
Text(
text = "Today",
style = MaterialTheme.typography.labelLarge,
fontWeight = FontWeight.SemiBold
)
}
}
}
Spacer(modifier = Modifier.height(16.dp))
Row(modifier = Modifier.fillMaxWidth()) {
listOf("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat").forEach { day ->
Text(
text = day,
modifier = Modifier.weight(1f),
textAlign = TextAlign.Center,
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.onSurfaceVariant,
fontWeight = FontWeight.Bold
)
}
}
Spacer(modifier = Modifier.height(8.dp))
val firstDayOfMonth = selectedMonth.atDay(1)
val daysInMonth = selectedMonth.lengthOfMonth()
val firstDayOfWeek = firstDayOfMonth.dayOfWeek.value % 7
val totalCells =
((firstDayOfWeek + daysInMonth) / 7.0).let {
if (it == it.toInt().toDouble()) it.toInt() * 7 else (it.toInt() + 1) * 7
}
LazyVerticalGrid(columns = GridCells.Fixed(7), modifier = Modifier.fillMaxWidth()) {
items(totalCells) { index ->
val dayNumber = index - firstDayOfWeek + 1
if (dayNumber in 1..daysInMonth) {
val date = selectedMonth.atDay(dayNumber)
val sessionsOnDate = sessionsByDate[date] ?: emptyList()
val isSelected = date == selectedDate
val isToday = date == LocalDate.now()
CalendarDay(
day = dayNumber,
hasSession = sessionsOnDate.isNotEmpty(),
isSelected = isSelected,
isToday = isToday,
onClick = {
if (sessionsOnDate.isNotEmpty()) {
onDateSelected(if (isSelected) null else date)
}
}
)
} else {
Spacer(modifier = Modifier.aspectRatio(1f))
}
}
}
if (selectedDate != null) {
val sessionsOnSelectedDate = sessionsByDate[selectedDate] ?: emptyList()
Spacer(modifier = Modifier.height(16.dp))
Text(
text =
"Sessions on ${selectedDate.format(DateTimeFormatter.ofPattern("MMMM d, yyyy"))}",
style = MaterialTheme.typography.titleSmall,
fontWeight = FontWeight.Bold,
modifier = Modifier.padding(vertical = 8.dp)
)
LazyColumn(modifier = Modifier.fillMaxWidth()) {
items(sessionsOnSelectedDate) { session ->
SessionCard(
session = session,
gymName = gyms.find { it.id == session.gymId }?.name ?: "Unknown Gym",
onClick = { onNavigateToSessionDetail(session.id) }
)
Spacer(modifier = Modifier.height(8.dp))
}
}
}
}
}
@Composable
fun CalendarDay(
day: Int,
hasSession: Boolean,
isSelected: Boolean,
isToday: Boolean,
onClick: () -> Unit
) {
Box(
modifier =
Modifier.aspectRatio(1f)
.padding(2.dp)
.clip(CircleShape)
.background(
when {
isSelected -> MaterialTheme.colorScheme.primaryContainer
isToday -> MaterialTheme.colorScheme.secondaryContainer
else -> Color.Transparent
}
)
.clickable(enabled = hasSession, onClick = onClick),
contentAlignment = Alignment.Center
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.Center
) {
Text(
text = day.toString(),
style = MaterialTheme.typography.bodyMedium,
color =
when {
isSelected -> MaterialTheme.colorScheme.onPrimaryContainer
isToday -> MaterialTheme.colorScheme.onSecondaryContainer
!hasSession -> MaterialTheme.colorScheme.onSurfaceVariant
else -> MaterialTheme.colorScheme.onSurface
},
fontWeight = if (hasSession || isToday) FontWeight.Bold else FontWeight.Normal
)
if (hasSession) {
Box(
modifier =
Modifier.size(6.dp)
.clip(CircleShape)
.background(
if (isSelected) MaterialTheme.colorScheme.primary
else
MaterialTheme.colorScheme.primary.copy(
alpha = 0.7f
)
)
)
}
}
}
}
private fun formatDate(dateString: String): String { private fun formatDate(dateString: String): String {
return DateFormatUtils.formatDateForDisplay(dateString) return DateFormatUtils.formatDateForDisplay(dateString)
} }

View File

@@ -5,10 +5,6 @@ import android.content.SharedPreferences
import android.util.Log import android.util.Log
import androidx.core.content.edit import androidx.core.content.edit
/**
* Handles migration of data from OpenClimb to Ascently This includes SharedPreferences, database
* names, and other local storage
*/
class MigrationManager(private val context: Context) { class MigrationManager(private val context: Context) {
companion object { companion object {

View File

@@ -4,27 +4,6 @@
android:height="108dp" android:height="108dp"
android:viewportWidth="108" android:viewportWidth="108"
android:viewportHeight="108"> android:viewportHeight="108">
<path android:fillColor="#FFC107" android:pathData="M24.000,78.545 L41.851,38.380 L59.702,78.545 Z" />
<group <path android:fillColor="#F44336" android:pathData="M39.372,78.545 L61.686,29.455 L84.000,78.545 Z" />
android:scaleX="0.7"
android:scaleY="0.7"
android:translateX="16.2"
android:translateY="20">
<!-- Left mountain (yellow/amber) -->
<path
android:fillColor="#FFC107"
android:strokeColor="#1C1C1C"
android:strokeWidth="3"
android:strokeLineJoin="round"
android:pathData="M15,70 L35,25 L55,70 Z" />
<!-- Right mountain (red) -->
<path
android:fillColor="#F44336"
android:strokeColor="#1C1C1C"
android:strokeWidth="3"
android:strokeLineJoin="round"
android:pathData="M40,70 L65,15 L90,70 Z" />
</group>
</vector> </vector>

View File

@@ -4,29 +4,6 @@
android:height="24dp" android:height="24dp"
android:viewportWidth="24" android:viewportWidth="24"
android:viewportHeight="24"> android:viewportHeight="24">
<path android:fillColor="#FFC107" android:pathData="M2.000,20.182 L7.950,6.793 L13.901,20.182 Z" />
<!-- Left mountain (yellow/amber) --> <path android:fillColor="#F44336" android:pathData="M7.124,20.182 L14.562,3.818 L22.000,20.182 Z" />
<path </vector>
android:fillColor="#FFC107"
android:pathData="M3,18 L8,9 L13,18 Z" />
<!-- Right mountain (red) -->
<path
android:fillColor="#F44336"
android:pathData="M11,18 L16,7 L21,18 Z" />
<!-- Black outlines -->
<path
android:fillColor="@android:color/transparent"
android:strokeColor="#1C1C1C"
android:strokeWidth="1"
android:strokeLineJoin="round"
android:pathData="M3,18 L8,9 L13,18" />
<path
android:fillColor="@android:color/transparent"
android:strokeColor="#1C1C1C"
android:strokeWidth="1"
android:strokeLineJoin="round"
android:pathData="M11,18 L16,7 L21,18" />
</vector>

View File

@@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#FFFFFF"
android:pathData="M0,0 L108,0 L108,108 L0,108 Z" />
<path
android:fillColor="#FFC107"
android:pathData="M24,74 L42,34 L60,74 Z" />
<path
android:fillColor="#F44336"
android:pathData="M41,74 L59,24 L84,74 Z" />
</vector>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 550 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 730 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 982 B

After

Width:  |  Height:  |  Size: 388 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 514 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

After

Width:  |  Height:  |  Size: 628 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.8 KiB

After

Width:  |  Height:  |  Size: 854 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 970 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.8 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.8 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -4,7 +4,7 @@
<style name="Theme.Ascently.Splash" parent="Theme.Ascently"> <style name="Theme.Ascently.Splash" parent="Theme.Ascently">
<item name="android:windowSplashScreenBackground">@color/splash_background</item> <item name="android:windowSplashScreenBackground">@color/splash_background</item>
<item name="android:windowSplashScreenAnimatedIcon">@drawable/ic_mountains</item> <item name="android:windowSplashScreenAnimatedIcon">@drawable/ic_splash</item>
<item name="android:windowSplashScreenAnimationDuration">200</item> <item name="android:windowSplashScreenAnimationDuration">200</item>
</style> </style>
</resources> </resources>

View File

@@ -457,10 +457,6 @@ class SyncMergeLogicTest {
@Test @Test
fun `test active sessions excluded from sync`() { fun `test active sessions excluded from sync`() {
// Test scenario: Active sessions should not be included in sync data
// This tests the new behavior where active sessions are excluded from sync
// until they are completed
val allLocalSessions = val allLocalSessions =
listOf( listOf(
BackupClimbSession( BackupClimbSession(

3
branding/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
*.tmp
.DS_Store
*.log

394
branding/generate.py Executable file
View File

@@ -0,0 +1,394 @@
#!/usr/bin/env python3
import xml.etree.ElementTree as ET
from pathlib import Path
from typing import Callable, TypedDict
from PIL import Image, ImageDraw
class Polygon(TypedDict):
coords: list[tuple[float, float]]
fill: str
SCRIPT_DIR = Path(__file__).parent
PROJECT_ROOT = SCRIPT_DIR.parent
SOURCE_DIR = SCRIPT_DIR / "source"
LOGOS_DIR = SCRIPT_DIR / "logos"
def parse_svg_polygons(svg_path: Path) -> list[Polygon]:
tree = ET.parse(svg_path)
root = tree.getroot()
ns = {"svg": "http://www.w3.org/2000/svg"}
polygons = root.findall(".//svg:polygon", ns)
if not polygons:
polygons = root.findall(".//polygon")
result: list[Polygon] = []
for poly in polygons:
points_str = poly.get("points", "").strip()
fill = poly.get("fill", "#000000")
coords: list[tuple[float, float]] = []
for pair in points_str.split():
x, y = pair.split(",")
coords.append((float(x), float(y)))
result.append({"coords": coords, "fill": fill})
return result
def get_bbox(polygons: list[Polygon]) -> dict[str, float]:
all_coords: list[tuple[float, float]] = []
for poly in polygons:
all_coords.extend(poly["coords"])
xs = [c[0] for c in all_coords]
ys = [c[1] for c in all_coords]
return {
"min_x": min(xs),
"max_x": max(xs),
"min_y": min(ys),
"max_y": max(ys),
"width": max(xs) - min(xs),
"height": max(ys) - min(ys),
}
def scale_and_center(
polygons: list[Polygon], viewbox_size: float, target_width: float
) -> list[Polygon]:
bbox = get_bbox(polygons)
scale = target_width / bbox["width"]
center = viewbox_size / 2
scaled_polys: list[Polygon] = []
for poly in polygons:
scaled_coords = [(x * scale, y * scale) for x, y in poly["coords"]]
scaled_polys.append({"coords": scaled_coords, "fill": poly["fill"]})
scaled_bbox = get_bbox(scaled_polys)
current_center_x = (scaled_bbox["min_x"] + scaled_bbox["max_x"]) / 2
current_center_y = (scaled_bbox["min_y"] + scaled_bbox["max_y"]) / 2
offset_x = center - current_center_x
offset_y = center - current_center_y
final_polys: list[Polygon] = []
for poly in scaled_polys:
final_coords = [(x + offset_x, y + offset_y) for x, y in poly["coords"]]
final_polys.append({"coords": final_coords, "fill": poly["fill"]})
return final_polys
def format_svg_points(coords: list[tuple[float, float]]) -> str:
return " ".join(f"{x:.3f},{y:.3f}" for x, y in coords)
def format_android_path(coords: list[tuple[float, float]]) -> str:
points = " ".join(f"{x:.3f},{y:.3f}" for x, y in coords)
pairs = points.split()
return f"M{pairs[0]} L{pairs[1]} L{pairs[2]} Z"
def generate_svg(polygons: list[Polygon], width: int, height: int) -> str:
lines = [
f'<svg width="{width}" height="{height}" viewBox="0 0 {width} {height}" xmlns="http://www.w3.org/2000/svg">'
]
for poly in polygons:
points = format_svg_points(poly["coords"])
lines.append(f' <polygon points="{points}" fill="{poly["fill"]}"/>')
lines.append("</svg>")
return "\n".join(lines)
def generate_android_vector(
polygons: list[Polygon], width: int, height: int, viewbox: int
) -> str:
lines = [
'<?xml version="1.0" encoding="utf-8"?>',
'<vector xmlns:android="http://schemas.android.com/apk/res/android"',
f' android:width="{width}dp"',
f' android:height="{height}dp"',
f' android:viewportWidth="{viewbox}"',
f' android:viewportHeight="{viewbox}">',
]
for poly in polygons:
path = format_android_path(poly["coords"])
lines.append(
f' <path android:fillColor="{poly["fill"]}" android:pathData="{path}" />'
)
lines.append("</vector>")
return "\n".join(lines)
def rasterize_svg(
svg_path: Path,
output_path: Path,
size: int,
bg_color: tuple[int, int, int, int] | None = None,
circular: bool = False,
) -> None:
from xml.dom import minidom
doc = minidom.parse(str(svg_path))
img = Image.new(
"RGBA", (size, size), (255, 255, 255, 0) if bg_color is None else bg_color
)
draw = ImageDraw.Draw(img)
svg_elem = doc.getElementsByTagName("svg")[0]
viewbox = svg_elem.getAttribute("viewBox").split()
if viewbox:
vb_width = float(viewbox[2])
vb_height = float(viewbox[3])
scale_x = size / vb_width
scale_y = size / vb_height
else:
scale_x = scale_y = 1
def parse_transform(
transform_str: str,
) -> Callable[[float, float], tuple[float, float]]:
import re
if not transform_str:
return lambda x, y: (x, y)
transforms: list[tuple[str, list[float]]] = []
for match in re.finditer(r"(\w+)\(([^)]+)\)", transform_str):
func, args_str = match.groups()
args = [float(x) for x in args_str.replace(",", " ").split()]
transforms.append((func, args))
def apply_transforms(x: float, y: float) -> tuple[float, float]:
for func, args in transforms:
if func == "translate":
x += args[0]
y += args[1] if len(args) > 1 else args[0]
elif func == "scale":
x *= args[0]
y *= args[1] if len(args) > 1 else args[0]
return x, y
return apply_transforms
for g in doc.getElementsByTagName("g"):
transform = parse_transform(g.getAttribute("transform"))
for poly in g.getElementsByTagName("polygon"):
points_str = poly.getAttribute("points").strip()
fill = poly.getAttribute("fill")
if not fill:
fill = "#000000"
coords: list[tuple[float, float]] = []
for pair in points_str.split():
x, y = pair.split(",")
x, y = float(x), float(y)
x, y = transform(x, y)
coords.append((x * scale_x, y * scale_y))
draw.polygon(coords, fill=fill)
for poly in doc.getElementsByTagName("polygon"):
if poly.parentNode and getattr(poly.parentNode, "tagName", None) == "g":
continue
points_str = poly.getAttribute("points").strip()
fill = poly.getAttribute("fill")
if not fill:
fill = "#000000"
coords = []
for pair in points_str.split():
x, y = pair.split(",")
coords.append((float(x) * scale_x, float(y) * scale_y))
draw.polygon(coords, fill=fill)
if circular:
mask = Image.new("L", (size, size), 0)
mask_draw = ImageDraw.Draw(mask)
mask_draw.ellipse((0, 0, size, size), fill=255)
img.putalpha(mask)
img.save(output_path)
def main() -> None:
print("Generating branding assets...")
logo_svg = SOURCE_DIR / "logo.svg"
icon_light = SOURCE_DIR / "icon-light.svg"
icon_dark = SOURCE_DIR / "icon-dark.svg"
icon_tinted = SOURCE_DIR / "icon-tinted.svg"
polygons = parse_svg_polygons(logo_svg)
print(" iOS...")
ios_assets = PROJECT_ROOT / "ios/Ascently/Assets.xcassets/AppIcon.appiconset"
for src, dst in [
(icon_light, ios_assets / "app_icon_light_template.svg"),
(icon_dark, ios_assets / "app_icon_dark_template.svg"),
(icon_tinted, ios_assets / "app_icon_tinted_template.svg"),
]:
with open(src) as f:
content = f.read()
with open(dst, "w") as f:
f.write(content)
img_light = Image.new("RGB", (1024, 1024), (255, 255, 255))
draw_light = ImageDraw.Draw(img_light)
scaled = scale_and_center(polygons, 1024, int(1024 * 0.7))
for poly in scaled:
coords = [(x, y) for x, y in poly["coords"]]
draw_light.polygon(coords, fill=poly["fill"])
img_light.save(ios_assets / "app_icon_1024.png")
img_dark = Image.new("RGB", (1024, 1024), (26, 26, 26))
draw_dark = ImageDraw.Draw(img_dark)
for poly in scaled:
coords = [(x, y) for x, y in poly["coords"]]
draw_dark.polygon(coords, fill=poly["fill"])
img_dark.save(ios_assets / "app_icon_1024_dark.png")
img_tinted = Image.new("RGB", (1024, 1024), (0, 0, 0))
draw_tinted = ImageDraw.Draw(img_tinted)
for i, poly in enumerate(scaled):
coords = [(x, y) for x, y in poly["coords"]]
draw_tinted.polygon(coords, fill=(0, 0, 0))
img_tinted.save(ios_assets / "app_icon_1024_tinted.png")
print(" Android...")
polys_108 = scale_and_center(polygons, 108, 60)
android_xml = generate_android_vector(polys_108, 108, 108, 108)
(
PROJECT_ROOT / "android/app/src/main/res/drawable/ic_launcher_foreground.xml"
).write_text(android_xml)
polys_24 = scale_and_center(polygons, 24, 20)
mountains_xml = generate_android_vector(polys_24, 24, 24, 24)
(PROJECT_ROOT / "android/app/src/main/res/drawable/ic_mountains.xml").write_text(
mountains_xml
)
for density, size in [
("mdpi", 48),
("hdpi", 72),
("xhdpi", 96),
("xxhdpi", 144),
("xxxhdpi", 192),
]:
mipmap_dir = PROJECT_ROOT / f"android/app/src/main/res/mipmap-{density}"
img = Image.new("RGBA", (size, size), (255, 255, 255, 255))
draw = ImageDraw.Draw(img)
scaled = scale_and_center(polygons, size, int(size * 0.6))
for poly in scaled:
coords = [(x, y) for x, y in poly["coords"]]
draw.polygon(coords, fill=poly["fill"])
img.save(mipmap_dir / "ic_launcher.webp")
img_round = Image.new("RGBA", (size, size), (255, 255, 255, 255))
draw_round = ImageDraw.Draw(img_round)
for poly in scaled:
coords = [(x, y) for x, y in poly["coords"]]
draw_round.polygon(coords, fill=poly["fill"])
mask = Image.new("L", (size, size), 0)
mask_draw = ImageDraw.Draw(mask)
mask_draw.ellipse((0, 0, size, size), fill=255)
img_round.putalpha(mask)
img_round.save(mipmap_dir / "ic_launcher_round.webp")
print(" Docs...")
polys_32 = scale_and_center(polygons, 32, 26)
logo_svg_32 = generate_svg(polys_32, 32, 32)
(PROJECT_ROOT / "docs/src/assets/logo.svg").write_text(logo_svg_32)
(PROJECT_ROOT / "docs/src/assets/logo-dark.svg").write_text(logo_svg_32)
polys_256 = scale_and_center(polygons, 256, 208)
logo_svg_256 = generate_svg(polys_256, 256, 256)
(PROJECT_ROOT / "docs/src/assets/logo-highres.svg").write_text(logo_svg_256)
logo_32_path = PROJECT_ROOT / "docs/src/assets/logo.svg"
rasterize_svg(logo_32_path, PROJECT_ROOT / "docs/public/favicon.png", 32)
sizes = [16, 32, 48]
imgs = []
for size in sizes:
img = Image.new("RGBA", (size, size), (255, 255, 255, 0))
draw = ImageDraw.Draw(img)
scaled = scale_and_center(polygons, size, int(size * 0.8))
for poly in scaled:
coords = [(x, y) for x, y in poly["coords"]]
draw.polygon(coords, fill=poly["fill"])
imgs.append(img)
imgs[0].save(
PROJECT_ROOT / "docs/public/favicon.ico",
format="ICO",
sizes=[(s, s) for s in sizes],
append_images=imgs[1:],
)
print(" Logos...")
LOGOS_DIR.mkdir(exist_ok=True)
sizes = [64, 128, 256, 512, 1024, 2048]
for size in sizes:
img = Image.new("RGBA", (size, size), (255, 255, 255, 0))
draw = ImageDraw.Draw(img)
scaled = scale_and_center(polygons, size, int(size * 0.8))
for poly in scaled:
coords = [(x, y) for x, y in poly["coords"]]
draw.polygon(coords, fill=poly["fill"])
img.save(LOGOS_DIR / f"logo-{size}.png")
for size in sizes:
img = Image.new("RGBA", (size, size), (255, 255, 255, 255))
draw = ImageDraw.Draw(img)
scaled = scale_and_center(polygons, size, int(size * 0.8))
for poly in scaled:
coords = [(x, y) for x, y in poly["coords"]]
draw.polygon(coords, fill=poly["fill"])
img.save(LOGOS_DIR / f"logo-{size}-white.png")
for size in sizes:
img = Image.new("RGBA", (size, size), (26, 26, 26, 255))
draw = ImageDraw.Draw(img)
scaled = scale_and_center(polygons, size, int(size * 0.8))
for poly in scaled:
coords = [(x, y) for x, y in poly["coords"]]
draw.polygon(coords, fill=poly["fill"])
img.save(LOGOS_DIR / f"logo-{size}-dark.png")
print("Done.")
if __name__ == "__main__":
main()

12
branding/generate.sh Executable file
View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if ! command -v python3 &> /dev/null; then
echo "Error: Python 3 required"
exit 1
fi
python3 "$SCRIPT_DIR/generate.py"

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 804 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 798 B

BIN
branding/logos/logo-128.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 795 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

BIN
branding/logos/logo-256.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

BIN
branding/logos/logo-512.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 411 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 413 B

BIN
branding/logos/logo-64.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 413 B

View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="1024" height="1024" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg">
<rect width="1024" height="1024" fill="#1A1A1A" rx="180" ry="180"/>
<g transform="translate(512, 512) scale(4.75) translate(-54, -42.5)">
<polygon points="8,75 35,14.25 62,75" fill="#FFC107"/>
<polygon points="31.25,75 65,0.75 98.75,75" fill="#F44336"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 411 B

View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="1024" height="1024" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg">
<rect width="1024" height="1024" fill="#FFFFFF" rx="180" ry="180"/>
<g transform="translate(512, 512) scale(4.75) translate(-54, -42.5)">
<polygon points="8,75 35,14.25 62,75" fill="#FFC107"/>
<polygon points="31.25,75 65,0.75 98.75,75" fill="#F44336"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 411 B

View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="1024" height="1024" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg">
<rect width="1024" height="1024" fill="transparent" rx="180" ry="180"/>
<g transform="translate(512, 512) scale(4.75) translate(-54, -42.5)">
<polygon points="8,75 35,14.25 62,75" fill="#000000" opacity="0.8"/>
<polygon points="31.25,75 65,0.75 98.75,75" fill="#000000" opacity="0.9"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 443 B

5
branding/source/logo.svg Normal file
View File

@@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="108" height="108" viewBox="0 0 108 108" xmlns="http://www.w3.org/2000/svg">
<polygon points="8,75 35,14.25 62,75" fill="#FFC107"/>
<polygon points="31.25,75 65,0.75 98.75,75" fill="#F44336"/>
</svg>

After

Width:  |  Height:  |  Size: 254 B

View File

@@ -40,12 +40,9 @@ export default defineConfig({
items: [ items: [
{ label: "Overview", slug: "sync/overview" }, { label: "Overview", slug: "sync/overview" },
{ label: "Quick Start", slug: "sync/quick-start" }, { label: "Quick Start", slug: "sync/quick-start" },
{ label: "API Reference", slug: "sync/api-reference" },
], ],
}, },
{
label: "Reference",
autogenerate: { directory: "reference" },
},
{ {
label: "Privacy", label: "Privacy",
link: "/privacy/", link: "/privacy/",

View File

@@ -26,7 +26,7 @@
}, },
"dependencies": { "dependencies": {
"@astrojs/node": "^9.5.0", "@astrojs/node": "^9.5.0",
"@astrojs/starlight": "^0.36.0", "@astrojs/starlight": "^0.36.1",
"astro": "^5.14.5", "astro": "^5.14.5",
"sharp": "^0.34.4" "sharp": "^0.34.4"
} }

258
docs/pnpm-lock.yaml generated
View File

@@ -12,8 +12,8 @@ importers:
specifier: ^9.5.0 specifier: ^9.5.0
version: 9.5.0(astro@5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3)) version: 9.5.0(astro@5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3))
'@astrojs/starlight': '@astrojs/starlight':
specifier: ^0.36.0 specifier: ^0.36.1
version: 0.36.0(astro@5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3)) version: 0.36.1(astro@5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3))
astro: astro:
specifier: ^5.14.5 specifier: ^5.14.5
version: 5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3) version: 5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3)
@@ -50,8 +50,8 @@ packages:
'@astrojs/sitemap@3.6.0': '@astrojs/sitemap@3.6.0':
resolution: {integrity: sha512-4aHkvcOZBWJigRmMIAJwRQXBS+ayoP5z40OklTXYXhUDhwusz+DyDl+nSshY6y9DvkVEavwNcFO8FD81iGhXjg==} resolution: {integrity: sha512-4aHkvcOZBWJigRmMIAJwRQXBS+ayoP5z40OklTXYXhUDhwusz+DyDl+nSshY6y9DvkVEavwNcFO8FD81iGhXjg==}
'@astrojs/starlight@0.36.0': '@astrojs/starlight@0.36.1':
resolution: {integrity: sha512-aVJVBfvFuE2avsMDhmRzn6I5GjDhUwIQFlu3qH9a1C0fNsPYDw2asxHQODAD7EfGiKGvvHCJgHb+9jbJ8lCfNQ==} resolution: {integrity: sha512-Fmt8mIsAIZN18Y4YQDI6p521GsYGe4hYxh9jWmz0pHBXnS5J7Na3TSXNya4eyIymCcKkuiKFbs7b/knsdGVYPg==}
peerDependencies: peerDependencies:
astro: ^5.5.0 astro: ^5.5.0
@@ -91,158 +91,158 @@ packages:
'@emnapi/runtime@1.5.0': '@emnapi/runtime@1.5.0':
resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==} resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==}
'@esbuild/aix-ppc64@0.25.10': '@esbuild/aix-ppc64@0.25.11':
resolution: {integrity: sha512-0NFWnA+7l41irNuaSVlLfgNT12caWJVLzp5eAVhZ0z1qpxbockccEt3s+149rE64VUI3Ml2zt8Nv5JVc4QXTsw==} resolution: {integrity: sha512-Xt1dOL13m8u0WE8iplx9Ibbm+hFAO0GsU2P34UNoDGvZYkY8ifSiy6Zuc1lYxfG7svWE2fzqCUmFp5HCn51gJg==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [ppc64] cpu: [ppc64]
os: [aix] os: [aix]
'@esbuild/android-arm64@0.25.10': '@esbuild/android-arm64@0.25.11':
resolution: {integrity: sha512-LSQa7eDahypv/VO6WKohZGPSJDq5OVOo3UoFR1E4t4Gj1W7zEQMUhI+lo81H+DtB+kP+tDgBp+M4oNCwp6kffg==} resolution: {integrity: sha512-9slpyFBc4FPPz48+f6jyiXOx/Y4v34TUeDDXJpZqAWQn/08lKGeD8aDp9TMn9jDz2CiEuHwfhRmGBvpnd/PWIQ==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [android] os: [android]
'@esbuild/android-arm@0.25.10': '@esbuild/android-arm@0.25.11':
resolution: {integrity: sha512-dQAxF1dW1C3zpeCDc5KqIYuZ1tgAdRXNoZP7vkBIRtKZPYe2xVr/d3SkirklCHudW1B45tGiUlz2pUWDfbDD4w==} resolution: {integrity: sha512-uoa7dU+Dt3HYsethkJ1k6Z9YdcHjTrSb5NUy66ZfZaSV8hEYGD5ZHbEMXnqLFlbBflLsl89Zke7CAdDJ4JI+Gg==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm] cpu: [arm]
os: [android] os: [android]
'@esbuild/android-x64@0.25.10': '@esbuild/android-x64@0.25.11':
resolution: {integrity: sha512-MiC9CWdPrfhibcXwr39p9ha1x0lZJ9KaVfvzA0Wxwz9ETX4v5CHfF09bx935nHlhi+MxhA63dKRRQLiVgSUtEg==} resolution: {integrity: sha512-Sgiab4xBjPU1QoPEIqS3Xx+R2lezu0LKIEcYe6pftr56PqPygbB7+szVnzoShbx64MUupqoE0KyRlN7gezbl8g==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [android] os: [android]
'@esbuild/darwin-arm64@0.25.10': '@esbuild/darwin-arm64@0.25.11':
resolution: {integrity: sha512-JC74bdXcQEpW9KkV326WpZZjLguSZ3DfS8wrrvPMHgQOIEIG/sPXEN/V8IssoJhbefLRcRqw6RQH2NnpdprtMA==} resolution: {integrity: sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [darwin] os: [darwin]
'@esbuild/darwin-x64@0.25.10': '@esbuild/darwin-x64@0.25.11':
resolution: {integrity: sha512-tguWg1olF6DGqzws97pKZ8G2L7Ig1vjDmGTwcTuYHbuU6TTjJe5FXbgs5C1BBzHbJ2bo1m3WkQDbWO2PvamRcg==} resolution: {integrity: sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [darwin] os: [darwin]
'@esbuild/freebsd-arm64@0.25.10': '@esbuild/freebsd-arm64@0.25.11':
resolution: {integrity: sha512-3ZioSQSg1HT2N05YxeJWYR+Libe3bREVSdWhEEgExWaDtyFbbXWb49QgPvFH8u03vUPX10JhJPcz7s9t9+boWg==} resolution: {integrity: sha512-CmKjrnayyTJF2eVuO//uSjl/K3KsMIeYeyN7FyDBjsR3lnSJHaXlVoAK8DZa7lXWChbuOk7NjAc7ygAwrnPBhA==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [freebsd] os: [freebsd]
'@esbuild/freebsd-x64@0.25.10': '@esbuild/freebsd-x64@0.25.11':
resolution: {integrity: sha512-LLgJfHJk014Aa4anGDbh8bmI5Lk+QidDmGzuC2D+vP7mv/GeSN+H39zOf7pN5N8p059FcOfs2bVlrRr4SK9WxA==} resolution: {integrity: sha512-Dyq+5oscTJvMaYPvW3x3FLpi2+gSZTCE/1ffdwuM6G1ARang/mb3jvjxs0mw6n3Lsw84ocfo9CrNMqc5lTfGOw==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [freebsd] os: [freebsd]
'@esbuild/linux-arm64@0.25.10': '@esbuild/linux-arm64@0.25.11':
resolution: {integrity: sha512-5luJWN6YKBsawd5f9i4+c+geYiVEw20FVW5x0v1kEMWNq8UctFjDiMATBxLvmmHA4bf7F6hTRaJgtghFr9iziQ==} resolution: {integrity: sha512-Qr8AzcplUhGvdyUF08A1kHU3Vr2O88xxP0Tm8GcdVOUm25XYcMPp2YqSVHbLuXzYQMf9Bh/iKx7YPqECs6ffLA==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [linux] os: [linux]
'@esbuild/linux-arm@0.25.10': '@esbuild/linux-arm@0.25.11':
resolution: {integrity: sha512-oR31GtBTFYCqEBALI9r6WxoU/ZofZl962pouZRTEYECvNF/dtXKku8YXcJkhgK/beU+zedXfIzHijSRapJY3vg==} resolution: {integrity: sha512-TBMv6B4kCfrGJ8cUPo7vd6NECZH/8hPpBHHlYI3qzoYFvWu2AdTvZNuU/7hsbKWqu/COU7NIK12dHAAqBLLXgw==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm] cpu: [arm]
os: [linux] os: [linux]
'@esbuild/linux-ia32@0.25.10': '@esbuild/linux-ia32@0.25.11':
resolution: {integrity: sha512-NrSCx2Kim3EnnWgS4Txn0QGt0Xipoumb6z6sUtl5bOEZIVKhzfyp/Lyw4C1DIYvzeW/5mWYPBFJU3a/8Yr75DQ==} resolution: {integrity: sha512-TmnJg8BMGPehs5JKrCLqyWTVAvielc615jbkOirATQvWWB1NMXY77oLMzsUjRLa0+ngecEmDGqt5jiDC6bfvOw==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [ia32] cpu: [ia32]
os: [linux] os: [linux]
'@esbuild/linux-loong64@0.25.10': '@esbuild/linux-loong64@0.25.11':
resolution: {integrity: sha512-xoSphrd4AZda8+rUDDfD9J6FUMjrkTz8itpTITM4/xgerAZZcFW7Dv+sun7333IfKxGG8gAq+3NbfEMJfiY+Eg==} resolution: {integrity: sha512-DIGXL2+gvDaXlaq8xruNXUJdT5tF+SBbJQKbWy/0J7OhU8gOHOzKmGIlfTTl6nHaCOoipxQbuJi7O++ldrxgMw==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [loong64] cpu: [loong64]
os: [linux] os: [linux]
'@esbuild/linux-mips64el@0.25.10': '@esbuild/linux-mips64el@0.25.11':
resolution: {integrity: sha512-ab6eiuCwoMmYDyTnyptoKkVS3k8fy/1Uvq7Dj5czXI6DF2GqD2ToInBI0SHOp5/X1BdZ26RKc5+qjQNGRBelRA==} resolution: {integrity: sha512-Osx1nALUJu4pU43o9OyjSCXokFkFbyzjXb6VhGIJZQ5JZi8ylCQ9/LFagolPsHtgw6himDSyb5ETSfmp4rpiKQ==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [mips64el] cpu: [mips64el]
os: [linux] os: [linux]
'@esbuild/linux-ppc64@0.25.10': '@esbuild/linux-ppc64@0.25.11':
resolution: {integrity: sha512-NLinzzOgZQsGpsTkEbdJTCanwA5/wozN9dSgEl12haXJBzMTpssebuXR42bthOF3z7zXFWH1AmvWunUCkBE4EA==} resolution: {integrity: sha512-nbLFgsQQEsBa8XSgSTSlrnBSrpoWh7ioFDUmwo158gIm5NNP+17IYmNWzaIzWmgCxq56vfr34xGkOcZ7jX6CPw==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [ppc64] cpu: [ppc64]
os: [linux] os: [linux]
'@esbuild/linux-riscv64@0.25.10': '@esbuild/linux-riscv64@0.25.11':
resolution: {integrity: sha512-FE557XdZDrtX8NMIeA8LBJX3dC2M8VGXwfrQWU7LB5SLOajfJIxmSdyL/gU1m64Zs9CBKvm4UAuBp5aJ8OgnrA==} resolution: {integrity: sha512-HfyAmqZi9uBAbgKYP1yGuI7tSREXwIb438q0nqvlpxAOs3XnZ8RsisRfmVsgV486NdjD7Mw2UrFSw51lzUk1ww==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [riscv64] cpu: [riscv64]
os: [linux] os: [linux]
'@esbuild/linux-s390x@0.25.10': '@esbuild/linux-s390x@0.25.11':
resolution: {integrity: sha512-3BBSbgzuB9ajLoVZk0mGu+EHlBwkusRmeNYdqmznmMc9zGASFjSsxgkNsqmXugpPk00gJ0JNKh/97nxmjctdew==} resolution: {integrity: sha512-HjLqVgSSYnVXRisyfmzsH6mXqyvj0SA7pG5g+9W7ESgwA70AXYNpfKBqh1KbTxmQVaYxpzA/SvlB9oclGPbApw==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [s390x] cpu: [s390x]
os: [linux] os: [linux]
'@esbuild/linux-x64@0.25.10': '@esbuild/linux-x64@0.25.11':
resolution: {integrity: sha512-QSX81KhFoZGwenVyPoberggdW1nrQZSvfVDAIUXr3WqLRZGZqWk/P4T8p2SP+de2Sr5HPcvjhcJzEiulKgnxtA==} resolution: {integrity: sha512-HSFAT4+WYjIhrHxKBwGmOOSpphjYkcswF449j6EjsjbinTZbp8PJtjsVK1XFJStdzXdy/jaddAep2FGY+wyFAQ==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [linux] os: [linux]
'@esbuild/netbsd-arm64@0.25.10': '@esbuild/netbsd-arm64@0.25.11':
resolution: {integrity: sha512-AKQM3gfYfSW8XRk8DdMCzaLUFB15dTrZfnX8WXQoOUpUBQ+NaAFCP1kPS/ykbbGYz7rxn0WS48/81l9hFl3u4A==} resolution: {integrity: sha512-hr9Oxj1Fa4r04dNpWr3P8QKVVsjQhqrMSUzZzf+LZcYjZNqhA3IAfPQdEh1FLVUJSiu6sgAwp3OmwBfbFgG2Xg==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [netbsd] os: [netbsd]
'@esbuild/netbsd-x64@0.25.10': '@esbuild/netbsd-x64@0.25.11':
resolution: {integrity: sha512-7RTytDPGU6fek/hWuN9qQpeGPBZFfB4zZgcz2VK2Z5VpdUxEI8JKYsg3JfO0n/Z1E/6l05n0unDCNc4HnhQGig==} resolution: {integrity: sha512-u7tKA+qbzBydyj0vgpu+5h5AeudxOAGncb8N6C9Kh1N4n7wU1Xw1JDApsRjpShRpXRQlJLb9wY28ELpwdPcZ7A==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [netbsd] os: [netbsd]
'@esbuild/openbsd-arm64@0.25.10': '@esbuild/openbsd-arm64@0.25.11':
resolution: {integrity: sha512-5Se0VM9Wtq797YFn+dLimf2Zx6McttsH2olUBsDml+lm0GOCRVebRWUvDtkY4BWYv/3NgzS8b/UM3jQNh5hYyw==} resolution: {integrity: sha512-Qq6YHhayieor3DxFOoYM1q0q1uMFYb7cSpLD2qzDSvK1NAvqFi8Xgivv0cFC6J+hWVw2teCYltyy9/m/14ryHg==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [openbsd] os: [openbsd]
'@esbuild/openbsd-x64@0.25.10': '@esbuild/openbsd-x64@0.25.11':
resolution: {integrity: sha512-XkA4frq1TLj4bEMB+2HnI0+4RnjbuGZfet2gs/LNs5Hc7D89ZQBHQ0gL2ND6Lzu1+QVkjp3x1gIcPKzRNP8bXw==} resolution: {integrity: sha512-CN+7c++kkbrckTOz5hrehxWN7uIhFFlmS/hqziSFVWpAzpWrQoAG4chH+nN3Be+Kzv/uuo7zhX716x3Sn2Jduw==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [openbsd] os: [openbsd]
'@esbuild/openharmony-arm64@0.25.10': '@esbuild/openharmony-arm64@0.25.11':
resolution: {integrity: sha512-AVTSBhTX8Y/Fz6OmIVBip9tJzZEUcY8WLh7I59+upa5/GPhh2/aM6bvOMQySspnCCHvFi79kMtdJS1w0DXAeag==} resolution: {integrity: sha512-rOREuNIQgaiR+9QuNkbkxubbp8MSO9rONmwP5nKncnWJ9v5jQ4JxFnLu4zDSRPf3x4u+2VN4pM4RdyIzDty/wQ==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [openharmony] os: [openharmony]
'@esbuild/sunos-x64@0.25.10': '@esbuild/sunos-x64@0.25.11':
resolution: {integrity: sha512-fswk3XT0Uf2pGJmOpDB7yknqhVkJQkAQOcW/ccVOtfx05LkbWOaRAtn5SaqXypeKQra1QaEa841PgrSL9ubSPQ==} resolution: {integrity: sha512-nq2xdYaWxyg9DcIyXkZhcYulC6pQ2FuCgem3LI92IwMgIZ69KHeY8T4Y88pcwoLIjbed8n36CyKoYRDygNSGhA==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [sunos] os: [sunos]
'@esbuild/win32-arm64@0.25.10': '@esbuild/win32-arm64@0.25.11':
resolution: {integrity: sha512-ah+9b59KDTSfpaCg6VdJoOQvKjI33nTaQr4UluQwW7aEwZQsbMCfTmfEO4VyewOxx4RaDT/xCy9ra2GPWmO7Kw==} resolution: {integrity: sha512-3XxECOWJq1qMZ3MN8srCJ/QfoLpL+VaxD/WfNRm1O3B4+AZ/BnLVgFbUV3eiRYDMXetciH16dwPbbHqwe1uU0Q==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [arm64] cpu: [arm64]
os: [win32] os: [win32]
'@esbuild/win32-ia32@0.25.10': '@esbuild/win32-ia32@0.25.11':
resolution: {integrity: sha512-QHPDbKkrGO8/cz9LKVnJU22HOi4pxZnZhhA2HYHez5Pz4JeffhDjf85E57Oyco163GnzNCVkZK0b/n4Y0UHcSw==} resolution: {integrity: sha512-3ukss6gb9XZ8TlRyJlgLn17ecsK4NSQTmdIXRASVsiS2sQ6zPPZklNJT5GR5tE/MUarymmy8kCEf5xPCNCqVOA==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [ia32] cpu: [ia32]
os: [win32] os: [win32]
'@esbuild/win32-x64@0.25.10': '@esbuild/win32-x64@0.25.11':
resolution: {integrity: sha512-9KpxSVFCu0iK1owoez6aC/s/EdUQLDN3adTxGCqxMVhrPDj6bt5dbrHDXUuq+Bs2vATFBBrQS5vdQ/Ed2P+nbw==} resolution: {integrity: sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA==}
engines: {node: '>=18'} engines: {node: '>=18'}
cpu: [x64] cpu: [x64]
os: [win32] os: [win32]
@@ -811,8 +811,8 @@ packages:
resolution: {integrity: sha512-KxektNH63SrbfUyDiwXqRb1rLwKt33AmMv+5Nhsw1kqZ13SJBRTgZHtGbE+hH3a1mVW1cz+4pqSWVPAtLVXTzQ==} resolution: {integrity: sha512-KxektNH63SrbfUyDiwXqRb1rLwKt33AmMv+5Nhsw1kqZ13SJBRTgZHtGbE+hH3a1mVW1cz+4pqSWVPAtLVXTzQ==}
engines: {node: '>=18'} engines: {node: '>=18'}
devalue@5.3.2: devalue@5.4.1:
resolution: {integrity: sha512-UDsjUbpQn9kvm68slnrs+mfxwFkIflOhkanmyabZ8zOYk8SMEIbJ3TK+88g70hSIeytu4y18f0z/hYHMTrXIWw==} resolution: {integrity: sha512-YtoaOfsqjbZQKGIMRYDWKjUmSB4VJ/RElB+bXZawQAQYAo4xu08GKTMVlsZDTF6R2MbAgjcAQRPI5eIyRAT2OQ==}
devlop@1.1.0: devlop@1.1.0:
resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==}
@@ -861,8 +861,8 @@ packages:
esast-util-from-js@2.0.1: esast-util-from-js@2.0.1:
resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==} resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==}
esbuild@0.25.10: esbuild@0.25.11:
resolution: {integrity: sha512-9RiGKvCwaqxO2owP61uQ4BgNborAQskMR6QusfWzQqv7AZOg5oGehdY2pRJMTKuwxd1IDBP4rSbI5lHzU7SMsQ==} resolution: {integrity: sha512-KohQwyzrKTQmhXDW1PjCv3Tyspn9n5GcY2RTDqeORIdIJY8yKIF7sTSopFmn/wpMPW4rdPXI0UE5LJLuq3bx0Q==}
engines: {node: '>=18'} engines: {node: '>=18'}
hasBin: true hasBin: true
@@ -1344,8 +1344,8 @@ packages:
resolution: {integrity: sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==} resolution: {integrity: sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==}
engines: {node: '>=14.16'} engines: {node: '>=14.16'}
package-manager-detector@1.4.0: package-manager-detector@1.4.1:
resolution: {integrity: sha512-rRZ+pR1Usc+ND9M2NkmCvE/LYJS+8ORVV9X0KuNSY/gFsp7RBHJM/ADh9LYq4Vvfq6QkKrW6/weuh8SMEtN5gw==} resolution: {integrity: sha512-dSMiVLBEA4XaNJ0PRb4N5cV/SEP4BWrWZKBmfF+OUm2pQTiZ6DDkKeWaltwu3JRhLoy59ayIkJ00cx9K9CaYTg==}
pagefind@1.4.0: pagefind@1.4.0:
resolution: {integrity: sha512-z2kY1mQlL4J8q5EIsQkLzQjilovKzfNVhX8De6oyE6uHpfFtyBaqUpcl/XzJC/4fjD8vBDyh1zolimIcVrCn9g==} resolution: {integrity: sha512-z2kY1mQlL4J8q5EIsQkLzQjilovKzfNVhX8De6oyE6uHpfFtyBaqUpcl/XzJC/4fjD8vBDyh1zolimIcVrCn9g==}
@@ -1753,8 +1753,8 @@ packages:
vfile@6.0.3: vfile@6.0.3:
resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==}
vite@6.3.7: vite@6.4.0:
resolution: {integrity: sha512-mQYaKepA0NGMBsz8Xktt3tJUG5ELE2iT7IJ+ssXI6nxVdE2sFc/d/6w/JByqMLvWg8hNKHpPgzjgOkrhpKFnrA==} resolution: {integrity: sha512-oLnWs9Hak/LOlKjeSpOwD6JMks8BeICEdYMJBf6P4Lac/pO9tKiv/XhXnAM7nNfSkZahjlCZu9sS50zL8fSnsw==}
engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0}
hasBin: true hasBin: true
peerDependencies: peerDependencies:
@@ -1922,7 +1922,7 @@ snapshots:
stream-replace-string: 2.0.0 stream-replace-string: 2.0.0
zod: 3.25.76 zod: 3.25.76
'@astrojs/starlight@0.36.0(astro@5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3))': '@astrojs/starlight@0.36.1(astro@5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3))':
dependencies: dependencies:
'@astrojs/markdown-remark': 6.3.8 '@astrojs/markdown-remark': 6.3.8
'@astrojs/mdx': 4.3.7(astro@5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3)) '@astrojs/mdx': 4.3.7(astro@5.14.5(@types/node@24.7.2)(rollup@4.52.4)(typescript@5.9.3))
@@ -1993,82 +1993,82 @@ snapshots:
tslib: 2.8.1 tslib: 2.8.1
optional: true optional: true
'@esbuild/aix-ppc64@0.25.10': '@esbuild/aix-ppc64@0.25.11':
optional: true optional: true
'@esbuild/android-arm64@0.25.10': '@esbuild/android-arm64@0.25.11':
optional: true optional: true
'@esbuild/android-arm@0.25.10': '@esbuild/android-arm@0.25.11':
optional: true optional: true
'@esbuild/android-x64@0.25.10': '@esbuild/android-x64@0.25.11':
optional: true optional: true
'@esbuild/darwin-arm64@0.25.10': '@esbuild/darwin-arm64@0.25.11':
optional: true optional: true
'@esbuild/darwin-x64@0.25.10': '@esbuild/darwin-x64@0.25.11':
optional: true optional: true
'@esbuild/freebsd-arm64@0.25.10': '@esbuild/freebsd-arm64@0.25.11':
optional: true optional: true
'@esbuild/freebsd-x64@0.25.10': '@esbuild/freebsd-x64@0.25.11':
optional: true optional: true
'@esbuild/linux-arm64@0.25.10': '@esbuild/linux-arm64@0.25.11':
optional: true optional: true
'@esbuild/linux-arm@0.25.10': '@esbuild/linux-arm@0.25.11':
optional: true optional: true
'@esbuild/linux-ia32@0.25.10': '@esbuild/linux-ia32@0.25.11':
optional: true optional: true
'@esbuild/linux-loong64@0.25.10': '@esbuild/linux-loong64@0.25.11':
optional: true optional: true
'@esbuild/linux-mips64el@0.25.10': '@esbuild/linux-mips64el@0.25.11':
optional: true optional: true
'@esbuild/linux-ppc64@0.25.10': '@esbuild/linux-ppc64@0.25.11':
optional: true optional: true
'@esbuild/linux-riscv64@0.25.10': '@esbuild/linux-riscv64@0.25.11':
optional: true optional: true
'@esbuild/linux-s390x@0.25.10': '@esbuild/linux-s390x@0.25.11':
optional: true optional: true
'@esbuild/linux-x64@0.25.10': '@esbuild/linux-x64@0.25.11':
optional: true optional: true
'@esbuild/netbsd-arm64@0.25.10': '@esbuild/netbsd-arm64@0.25.11':
optional: true optional: true
'@esbuild/netbsd-x64@0.25.10': '@esbuild/netbsd-x64@0.25.11':
optional: true optional: true
'@esbuild/openbsd-arm64@0.25.10': '@esbuild/openbsd-arm64@0.25.11':
optional: true optional: true
'@esbuild/openbsd-x64@0.25.10': '@esbuild/openbsd-x64@0.25.11':
optional: true optional: true
'@esbuild/openharmony-arm64@0.25.10': '@esbuild/openharmony-arm64@0.25.11':
optional: true optional: true
'@esbuild/sunos-x64@0.25.10': '@esbuild/sunos-x64@0.25.11':
optional: true optional: true
'@esbuild/win32-arm64@0.25.10': '@esbuild/win32-arm64@0.25.11':
optional: true optional: true
'@esbuild/win32-ia32@0.25.10': '@esbuild/win32-ia32@0.25.11':
optional: true optional: true
'@esbuild/win32-x64@0.25.10': '@esbuild/win32-x64@0.25.11':
optional: true optional: true
'@expressive-code/core@0.41.3': '@expressive-code/core@0.41.3':
@@ -2453,12 +2453,12 @@ snapshots:
cssesc: 3.0.0 cssesc: 3.0.0
debug: 4.4.3 debug: 4.4.3
deterministic-object-hash: 2.0.2 deterministic-object-hash: 2.0.2
devalue: 5.3.2 devalue: 5.4.1
diff: 5.2.0 diff: 5.2.0
dlv: 1.1.3 dlv: 1.1.3
dset: 3.1.4 dset: 3.1.4
es-module-lexer: 1.7.0 es-module-lexer: 1.7.0
esbuild: 0.25.10 esbuild: 0.25.11
estree-walker: 3.0.3 estree-walker: 3.0.3
flattie: 1.1.1 flattie: 1.1.1
fontace: 0.3.1 fontace: 0.3.1
@@ -2474,7 +2474,7 @@ snapshots:
neotraverse: 0.6.18 neotraverse: 0.6.18
p-limit: 6.2.0 p-limit: 6.2.0
p-queue: 8.1.1 p-queue: 8.1.1
package-manager-detector: 1.4.0 package-manager-detector: 1.4.1
picomatch: 4.0.3 picomatch: 4.0.3
prompts: 2.4.2 prompts: 2.4.2
rehype: 13.0.2 rehype: 13.0.2
@@ -2489,8 +2489,8 @@ snapshots:
unist-util-visit: 5.0.0 unist-util-visit: 5.0.0
unstorage: 1.17.1 unstorage: 1.17.1
vfile: 6.0.3 vfile: 6.0.3
vite: 6.3.7(@types/node@24.7.2) vite: 6.4.0(@types/node@24.7.2)
vitefu: 1.1.1(vite@6.3.7(@types/node@24.7.2)) vitefu: 1.1.1(vite@6.4.0(@types/node@24.7.2))
xxhash-wasm: 1.1.0 xxhash-wasm: 1.1.0
yargs-parser: 21.1.1 yargs-parser: 21.1.1
yocto-spinner: 0.2.3 yocto-spinner: 0.2.3
@@ -2638,7 +2638,7 @@ snapshots:
dependencies: dependencies:
base-64: 1.0.0 base-64: 1.0.0
devalue@5.3.2: {} devalue@5.4.1: {}
devlop@1.1.0: devlop@1.1.0:
dependencies: dependencies:
@@ -2680,34 +2680,34 @@ snapshots:
esast-util-from-estree: 2.0.0 esast-util-from-estree: 2.0.0
vfile-message: 4.0.3 vfile-message: 4.0.3
esbuild@0.25.10: esbuild@0.25.11:
optionalDependencies: optionalDependencies:
'@esbuild/aix-ppc64': 0.25.10 '@esbuild/aix-ppc64': 0.25.11
'@esbuild/android-arm': 0.25.10 '@esbuild/android-arm': 0.25.11
'@esbuild/android-arm64': 0.25.10 '@esbuild/android-arm64': 0.25.11
'@esbuild/android-x64': 0.25.10 '@esbuild/android-x64': 0.25.11
'@esbuild/darwin-arm64': 0.25.10 '@esbuild/darwin-arm64': 0.25.11
'@esbuild/darwin-x64': 0.25.10 '@esbuild/darwin-x64': 0.25.11
'@esbuild/freebsd-arm64': 0.25.10 '@esbuild/freebsd-arm64': 0.25.11
'@esbuild/freebsd-x64': 0.25.10 '@esbuild/freebsd-x64': 0.25.11
'@esbuild/linux-arm': 0.25.10 '@esbuild/linux-arm': 0.25.11
'@esbuild/linux-arm64': 0.25.10 '@esbuild/linux-arm64': 0.25.11
'@esbuild/linux-ia32': 0.25.10 '@esbuild/linux-ia32': 0.25.11
'@esbuild/linux-loong64': 0.25.10 '@esbuild/linux-loong64': 0.25.11
'@esbuild/linux-mips64el': 0.25.10 '@esbuild/linux-mips64el': 0.25.11
'@esbuild/linux-ppc64': 0.25.10 '@esbuild/linux-ppc64': 0.25.11
'@esbuild/linux-riscv64': 0.25.10 '@esbuild/linux-riscv64': 0.25.11
'@esbuild/linux-s390x': 0.25.10 '@esbuild/linux-s390x': 0.25.11
'@esbuild/linux-x64': 0.25.10 '@esbuild/linux-x64': 0.25.11
'@esbuild/netbsd-arm64': 0.25.10 '@esbuild/netbsd-arm64': 0.25.11
'@esbuild/netbsd-x64': 0.25.10 '@esbuild/netbsd-x64': 0.25.11
'@esbuild/openbsd-arm64': 0.25.10 '@esbuild/openbsd-arm64': 0.25.11
'@esbuild/openbsd-x64': 0.25.10 '@esbuild/openbsd-x64': 0.25.11
'@esbuild/openharmony-arm64': 0.25.10 '@esbuild/openharmony-arm64': 0.25.11
'@esbuild/sunos-x64': 0.25.10 '@esbuild/sunos-x64': 0.25.11
'@esbuild/win32-arm64': 0.25.10 '@esbuild/win32-arm64': 0.25.11
'@esbuild/win32-ia32': 0.25.10 '@esbuild/win32-ia32': 0.25.11
'@esbuild/win32-x64': 0.25.10 '@esbuild/win32-x64': 0.25.11
escape-html@1.0.3: {} escape-html@1.0.3: {}
@@ -3595,7 +3595,7 @@ snapshots:
p-timeout@6.1.4: {} p-timeout@6.1.4: {}
package-manager-detector@1.4.0: {} package-manager-detector@1.4.1: {}
pagefind@1.4.0: pagefind@1.4.0:
optionalDependencies: optionalDependencies:
@@ -4116,9 +4116,9 @@ snapshots:
'@types/unist': 3.0.3 '@types/unist': 3.0.3
vfile-message: 4.0.3 vfile-message: 4.0.3
vite@6.3.7(@types/node@24.7.2): vite@6.4.0(@types/node@24.7.2):
dependencies: dependencies:
esbuild: 0.25.10 esbuild: 0.25.11
fdir: 6.5.0(picomatch@4.0.3) fdir: 6.5.0(picomatch@4.0.3)
picomatch: 4.0.3 picomatch: 4.0.3
postcss: 8.5.6 postcss: 8.5.6
@@ -4128,9 +4128,9 @@ snapshots:
'@types/node': 24.7.2 '@types/node': 24.7.2
fsevents: 2.3.3 fsevents: 2.3.3
vitefu@1.1.1(vite@6.3.7(@types/node@24.7.2)): vitefu@1.1.1(vite@6.4.0(@types/node@24.7.2)):
optionalDependencies: optionalDependencies:
vite: 6.3.7(@types/node@24.7.2) vite: 6.4.0(@types/node@24.7.2)
web-namespaces@2.0.1: {} web-namespaces@2.0.1: {}

View File

@@ -1,3 +0,0 @@
onlyBuiltDependencies:
- esbuild
- sharp

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

After

Width:  |  Height:  |  Size: 166 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 731 B

After

Width:  |  Height:  |  Size: 229 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 96 KiB

View File

@@ -1,15 +1,4 @@
<svg width="32" height="32" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg"> <svg width="32" height="32" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg">
<!-- Left mountain (amber/yellow) --> <polygon points="3.000,26.636 10.736,9.231 18.471,26.636" fill="#FFC107"/>
<polygon points="6,24 12,8 18,24" <polygon points="9.661,26.636 19.331,5.364 29.000,26.636" fill="#F44336"/>
fill="#FFC107" </svg>
stroke="#FFFFFF"
stroke-width="1"
stroke-linejoin="round"/>
<!-- Right mountain (red) -->
<polygon points="14,24 22,4 30,24"
fill="#F44336"
stroke="#FFFFFF"
stroke-width="1"
stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 475 B

After

Width:  |  Height:  |  Size: 244 B

View File

@@ -1,15 +1,4 @@
<svg width="256" height="256" viewBox="0 0 256 256" xmlns="http://www.w3.org/2000/svg"> <svg width="256" height="256" viewBox="0 0 256 256" xmlns="http://www.w3.org/2000/svg">
<!-- Left mountain (amber/yellow) --> <polygon points="24.000,213.091 85.884,73.851 147.769,213.091" fill="#FFC107"/>
<polygon points="48,192 96,64 144,192" <polygon points="77.289,213.091 154.645,42.909 232.000,213.091" fill="#F44336"/>
fill="#FFC107" </svg>
stroke="#1C1C1C"
stroke-width="4"
stroke-linejoin="round"/>
<!-- Right mountain (red) -->
<polygon points="112,192 176,32 240,192"
fill="#F44336"
stroke="#1C1C1C"
stroke-width="4"
stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 490 B

After

Width:  |  Height:  |  Size: 259 B

View File

@@ -1,15 +1,4 @@
<svg width="32" height="32" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg"> <svg width="32" height="32" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg">
<!-- Left mountain (amber/yellow) --> <polygon points="3.000,26.636 10.736,9.231 18.471,26.636" fill="#FFC107"/>
<polygon points="6,24 12,8 18,24" <polygon points="9.661,26.636 19.331,5.364 29.000,26.636" fill="#F44336"/>
fill="#FFC107" </svg>
stroke="#1C1C1C"
stroke-width="1"
stroke-linejoin="round"/>
<!-- Right mountain (red) -->
<polygon points="14,24 22,4 30,24"
fill="#F44336"
stroke="#1C1C1C"
stroke-width="1"
stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 475 B

After

Width:  |  Height:  |  Size: 244 B

View File

@@ -15,11 +15,11 @@ Use Obtainium for automatic updates:
## iOS ## iOS
### TestFlight Beta
Join the TestFlight beta: [https://testflight.apple.com/join/E2DYRGH8](https://testflight.apple.com/join/E2DYRGH8)
### App Store ### App Store
App Store release coming soon. Download from the app store [here](https://apps.apple.com/ca/app/ascently/id6753959144)
### TestFlight Beta
Join the TestFlight beta [here](https://testflight.apple.com/join/E2DYRGH8)
## Requirements ## Requirements

View File

@@ -40,21 +40,6 @@ Ascently is an **offline-first FOSS** app designed to help climbers track their
</Card> </Card>
</CardGrid> </CardGrid>
## Requirements
- **Android:** Version 12+
- **iOS:** Version 17+
## Download
**Android:**
- Download the latest APK from the [Releases page](https://git.atri.dad/atridad/Ascently/releases)
- Use [Obtainium](https://apps.obtainium.imranr.dev/redirect?r=obtainium://app/%7B%22id%22%3A%22com.atridad.ascently%22%2C%22url%22%3A%22https%3A%2F%2Fgit.atri.dad%2Fatridad%2FAscently%2Freleases%22%2C%22author%22%3A%22git.atri.dad%22%2C%22name%22%3A%22Ascently%22%2C%22preferredApkIndex%22%3A0%2C%22additionalSettings%22%3A%22%7B%5C%22intermediateLink%5C%22%3A%5B%5D%2C%5C%22customLinkFilterRegex%5C%22%3A%5C%22%5C%22%2C%5C%22filterByLinkText%5C%22%3Afalse%2C%5C%22skipSort%5C%22%3Afalse%2C%5C%22reverseSort%5C%22%3Afalse%2C%5C%22sortByLastLinkSegment%5C%22%3Afalse%2C%5C%22versionExtractWholePage%5C%22%3Afalse%2C%5C%22requestHeader%5C%22%3A%5B%7B%5C%22requestHeader%5C%22%3A%5C%22User-Agent%3A%20Mozilla%2F5.0%20(Linux%3B%20Android%2010%3B%20K)%20AppleWebKit%2F537.36%20(KHTML%2C%20like%20Gecko)%20Chrome%2F114.0.0.0%20Mobile%20Safari%2F537.36%5C%22%7D%5D%2C%5C%22defaultPseudoVersioningMethod%5C%22%3A%5C%22partialAPKHash%5C%22%2C%5C%22trackOnly%5C%22%3Afalse%2C%5C%22versionExtractionRegEx%5C%22%3A%5C%22%5C%22%2C%5C%22matchGroupToUse%5C%22%3A%5C%22%5C%22%2C%5C%22versionDetection%5C%22%3Afalse%2C%5C%22useVersionCodeAsOSVersion%5C%22%3Afalse%2C%5C%22apkFilterRegEx%5C%22%3A%5C%22%5C%22%2C%5C%22invertAPKFilter%5C%22%3Afalse%2C%5C%22autoApkFilterByArch%5C%22%3Atrue%2C%5C%22appName%5C%22%3A%5C%22Ascently%5C%22%2C%5C%22appAuthor%5C%22%3A%5C%22%5C%22%2C%5C%22shizukuPretendToBeGooglePlay%5C%22%3Afalse%2C%5C%22allowInsecure%5C%22%3Afalse%2C%5C%22exemptFromBackgroundUpdates%5C%22%3Afalse%2C%5C%22skipUpdateNotifications%5C%22%3Afalse%2C%5C%22about%5C%22%3A%5C%22%5C%22%2C%5C%22refreshBeforeDownload%5C%22%3Afalse%7D%22%2C%22overrideSource%22%3Anull%7D) for automatic updates
**iOS:**
- Join the [TestFlight Beta](https://testflight.apple.com/join/E2DYRGH8)
- App Store release coming soon
--- ---
*Built with ❤️ by Atridad Lahiji* *Built with ❤️ by Atridad Lahiji*

View File

@@ -1,51 +0,0 @@
---
title: Sync Server API
description: API endpoints for the Ascently sync server
---
The sync server provides a minimal REST API for data synchronization.
## Authentication
All endpoints require an `Authorization: Bearer <your-auth-token>` header.
## Endpoints
### Data Sync
**GET /sync**
- Download `ascently.json` file
- Returns: JSON data file or 404 if no data exists
**POST /sync**
- Upload `ascently.json` file
- Body: JSON data
- Returns: Success confirmation
### Images
**GET /images/{imageName}**
- Download an image file
- Returns: Image file or 404 if not found
**POST /images/{imageName}**
- Upload an image file
- Body: Image data
- Returns: Success confirmation
## Example Usage
```bash
# Download data
curl -H "Authorization: Bearer your-token" \
http://localhost:8080/sync
# Upload data
curl -X POST \
-H "Authorization: Bearer your-token" \
-H "Content-Type: application/json" \
-d @ascently.json \
http://localhost:8080/sync
```
See `main.go` in the sync directory for implementation details.

View File

@@ -0,0 +1,152 @@
---
title: API Reference
description: Complete API documentation for the Ascently sync server
---
Complete reference for all sync server endpoints.
## Authentication
All endpoints require a bearer token in the `Authorization` header:
```
Authorization: Bearer your-auth-token
```
Unauthorized requests return `401 Unauthorized`.
## Endpoints
### Health Check
**`GET /health`**
Check if the server is running.
**Response:**
```json
{
"status": "ok",
"version": "2.0.0"
}
```
### Full Sync - Download
**`GET /sync`**
Download the entire dataset from the server.
**Response:**
```json
{
"exportedAt": "2024-01-15T10:30:00.000Z",
"version": "2.0",
"formatVersion": "2.0",
"gyms": [...],
"problems": [...],
"sessions": [...],
"attempts": [...],
"deletedItems": [...]
}
```
Returns `200 OK` with the backup data, or `404 Not Found` if no data exists.
### Full Sync - Upload
**`POST /sync`**
Upload your entire dataset to the server. This overwrites all server data.
**Request Body:**
```json
{
"exportedAt": "2024-01-15T10:30:00.000Z",
"version": "2.0",
"formatVersion": "2.0",
"gyms": [...],
"problems": [...],
"sessions": [...],
"attempts": [...],
"deletedItems": [...]
}
```
**Response:**
```
200 OK
```
### Delta Sync
**`POST /sync/delta`**
Sync only changed data since your last sync. Much faster than full sync.
**Request Body:**
```json
{
"lastSyncTime": "2024-01-15T10:00:00.000Z",
"gyms": [...],
"problems": [...],
"sessions": [...],
"attempts": [...],
"deletedItems": [...]
}
```
Include only items modified after `lastSyncTime`. The server merges your changes with its data using last-write-wins based on `updatedAt` timestamps.
**Response:**
```json
{
"serverTime": "2024-01-15T10:30:00.000Z",
"gyms": [...],
"problems": [...],
"sessions": [...],
"attempts": [...],
"deletedItems": [...]
}
```
Returns only server items modified after your `lastSyncTime`. Save `serverTime` as your new `lastSyncTime` for the next delta sync.
### Image Upload
**`POST /images/upload?filename={name}`**
Upload an image file.
**Query Parameters:**
- `filename`: Image filename (e.g., `problem_abc123_0.jpg`)
**Request Body:**
Binary image data (JPEG, PNG, GIF, or WebP)
**Response:**
```
200 OK
```
### Image Download
**`GET /images/download?filename={name}`**
Download an image file.
**Query Parameters:**
- `filename`: Image filename
**Response:**
Binary image data with appropriate `Content-Type` header.
Returns `404 Not Found` if the image doesn't exist.
## Notes
- All timestamps are ISO 8601 format with milliseconds
- Active sessions (status `active`) are excluded from sync
- Images are stored separately and referenced by filename
- The server stores everything in a single `ascently.json` file
- No versioning or history - last write wins

View File

@@ -3,28 +3,49 @@ title: Self-Hosted Sync Overview
description: Learn about Ascently's optional sync server for cross-device data synchronization description: Learn about Ascently's optional sync server for cross-device data synchronization
--- ---
You can run your own sync server to keep your data in sync across devices. The server is lightweight and easy to set up using Docker. Run your own sync server to keep your data in sync across devices. The server is lightweight and easy to set up with Docker.
## How It Works ## How It Works
This server uses a single `ascently.json` file for your data and a directory for images. The last client to upload wins, overwriting the old data. Authentication is just a static bearer token. The server stores your data in a single `ascently.json` file and images in a directory. It's simple: last write wins. Authentication is a static bearer token you set.
## API ## Features
All endpoints require an `Authorization: Bearer <your-auth-token>` header. - **Delta sync**: Only syncs changed data
- **Image sync**: Automatically syncs problem images
- **Conflict resolution**: Last-write-wins based on timestamps
- **Cross-platform**: Works with iOS and Android clients
- **Privacy**: Your data, your server, no analytics
- `GET /sync`: Download `ascently.json` ## API Endpoints
- `POST /sync`: Upload `ascently.json`
- `GET /images/{imageName}`: Download an image - `GET /health` - Health check
- `POST /images/{imageName}`: Upload an image - `GET /sync` - Download full dataset
- `POST /sync` - Upload full dataset
- `POST /sync/delta` - Sync only changes (recommended)
- `POST /images/upload?filename={name}` - Upload image
- `GET /images/download?filename={name}` - Download image
All endpoints require `Authorization: Bearer <your-token>` header.
See the [API Reference](/sync/api-reference/) for complete documentation.
## Getting Started ## Getting Started
The easiest way to get started is with the [Quick Start guide](/sync/quick-start/) using Docker Compose. Check out the [Quick Start guide](/sync/quick-start/) to get your server running with Docker Compose.
You'll need: You'll need:
- Docker and Docker Compose - Docker and Docker Compose
- A secure authentication token - A secure authentication token
- A place to store your data - A place to store your data
The server will be available at `http://localhost:8080` by default. Configure your clients with your server URL and auth token to start syncing. The server will be available at `http://localhost:8080` by default. Configure your Ascently apps with your server URL and auth token to start syncing.
## How Sync Works
1. **First sync**: Client uploads or downloads full dataset
2. **Subsequent syncs**: Client uses delta sync to only transfer changed data
3. **Conflicts**: Resolved automatically using timestamps (newer wins)
4. **Images**: Synced automatically with problem data
Active sessions are excluded from sync until completed.

View File

@@ -3,7 +3,7 @@ title: Quick Start
description: Get your Ascently sync server running with Docker Compose description: Get your Ascently sync server running with Docker Compose
--- ---
Get your Ascently sync server up and running using Docker Compose. Get your sync server running in minutes with Docker Compose.
## Prerequisites ## Prerequisites
@@ -12,50 +12,158 @@ Get your Ascently sync server up and running using Docker Compose.
## Setup ## Setup
1. Create a `.env` file with your configuration: 1. Create a `docker-compose.yml` file:
```env ```yaml
IMAGE=git.atri.dad/atridad/ascently-sync:latest version: '3.8'
APP_PORT=8080
AUTH_TOKEN=your-super-secret-token services:
DATA_FILE=/data/ascently.json ascently-sync:
IMAGES_DIR=/data/images image: git.atri.dad/atridad/ascently-sync:latest
ROOT_DIR=./ascently-data ports:
- "8080:8080"
environment:
- AUTH_TOKEN=${AUTH_TOKEN}
- DATA_FILE=/data/ascently.json
- IMAGES_DIR=/data/images
volumes:
- ./ascently-data:/data
restart: unless-stopped
``` ```
Set `AUTH_TOKEN` to a long, random string. `ROOT_DIR` is where the server will store its data on your machine. 2. Create a `.env` file in the same directory:
2. Use the provided `docker-compose.yml` in the `sync/` directory: ```env
AUTH_TOKEN=your-super-secret-token-here
```
Replace `your-super-secret-token-here` with a secure random token (see below).
3. Start the server:
```bash ```bash
cd sync/
docker-compose up -d docker-compose up -d
``` ```
The server will be available at `http://localhost:8080`. The server will be available at `http://localhost:8080`.
## Configure Your Clients ## Generate a Secure Token
Configure your Ascently apps with: Use this command to generate a secure authentication token:
- **Server URL**: `http://your-server-ip:8080` (or your domain)
- **Auth Token**: The token from your `.env` file
Enable sync and perform your first sync to start synchronizing data across devices.
## Generating a Secure Token
Generate a secure authentication token:
```bash ```bash
# On Linux/macOS
openssl rand -base64 32 openssl rand -base64 32
``` ```
Keep this token secure and don't share it publicly. Copy the output and paste it into your `.env` file as the `AUTH_TOKEN`.
## Accessing Remotely Keep this token secret and don't commit it to version control.
For remote access, you'll need to: ## Configure Your Apps
- Set up port forwarding on your router (port 8080)
- Use your public IP address or set up a domain name Open Ascently on your iOS or Android device:
- Consider using HTTPS with a reverse proxy for security
1. Go to **Settings**
2. Scroll to **Sync Configuration**
3. Enter your **Server URL**: `http://your-server-ip:8080`
4. Enter your **Auth Token**: (the token from your `.env` file)
5. Tap **Test Connection** to verify it works
6. Enable **Auto Sync**
7. Tap **Sync Now** to perform your first sync
Repeat this on all your devices to keep them in sync.
## Verify It's Working
Check the server logs:
```bash
docker-compose logs -f ascently-sync
```
You should see logs like:
```
Delta sync from 192.168.1.100: lastSyncTime=2024-01-15T10:00:00.000Z, gyms=1, problems=5, sessions=2, attempts=10, deletedItems=0
```
## Remote Access
To access your server remotely:
### Option 1: Port Forwarding
1. Forward port 8080 on your router to your server
2. Find your public IP address
3. Use `http://your-public-ip:8080` as the server URL
### Option 2: Domain Name (Recommended)
1. Get a domain name and point it to your server
2. Set up a reverse proxy (nginx, Caddy, Traefik)
3. Enable HTTPS with Let's Encrypt
4. Use `https://sync.yourdomain.com` as the server URL
Example nginx config with HTTPS:
```nginx
server {
listen 443 ssl http2;
server_name sync.yourdomain.com;
ssl_certificate /etc/letsencrypt/live/sync.yourdomain.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/sync.yourdomain.com/privkey.pem;
location / {
proxy_pass http://localhost:8080;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
}
}
```
## Updating
Pull the latest image and restart:
```bash
docker-compose pull
docker-compose up -d
```
Your data is stored in `./ascently-data` and persists across updates.
## Troubleshooting
### Connection Failed
- Check the server is running: `docker-compose ps`
- Verify the auth token matches on server and client
- Check firewall settings and port forwarding
- Test locally first with `http://localhost:8080`
### Sync Errors
- Check server logs: `docker-compose logs ascently-sync`
- Verify your device has internet connection
- Try disabling and re-enabling sync
- Perform a manual sync from Settings
### Data Location
All data is stored in `./ascently-data/`:
```
ascently-data/
├── ascently.json # Your climb data
└── images/ # Problem images
```
You can back this up or move it to another server.
## Next Steps
- Read the [API Reference](/sync/api-reference/) for advanced usage
- Set up automated backups of your `ascently-data` directory
- Configure HTTPS for secure remote access
- Monitor server logs for sync activity

View File

@@ -465,7 +465,7 @@
CODE_SIGN_ENTITLEMENTS = Ascently/Ascently.entitlements; CODE_SIGN_ENTITLEMENTS = Ascently/Ascently.entitlements;
CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 26; CURRENT_PROJECT_VERSION = 30;
DEVELOPMENT_TEAM = 4BC9Y2LL4B; DEVELOPMENT_TEAM = 4BC9Y2LL4B;
DRIVERKIT_DEPLOYMENT_TARGET = 24.6; DRIVERKIT_DEPLOYMENT_TARGET = 24.6;
ENABLE_PREVIEWS = YES; ENABLE_PREVIEWS = YES;
@@ -487,7 +487,7 @@
"@executable_path/Frameworks", "@executable_path/Frameworks",
); );
MACOSX_DEPLOYMENT_TARGET = 15.6; MACOSX_DEPLOYMENT_TARGET = 15.6;
MARKETING_VERSION = 2.0.0; MARKETING_VERSION = 2.2.0;
PRODUCT_BUNDLE_IDENTIFIER = com.atridad.Ascently; PRODUCT_BUNDLE_IDENTIFIER = com.atridad.Ascently;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = ""; PROVISIONING_PROFILE_SPECIFIER = "";
@@ -513,7 +513,7 @@
CODE_SIGN_ENTITLEMENTS = Ascently/Ascently.entitlements; CODE_SIGN_ENTITLEMENTS = Ascently/Ascently.entitlements;
CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 26; CURRENT_PROJECT_VERSION = 30;
DEVELOPMENT_TEAM = 4BC9Y2LL4B; DEVELOPMENT_TEAM = 4BC9Y2LL4B;
DRIVERKIT_DEPLOYMENT_TARGET = 24.6; DRIVERKIT_DEPLOYMENT_TARGET = 24.6;
ENABLE_PREVIEWS = YES; ENABLE_PREVIEWS = YES;
@@ -535,7 +535,7 @@
"@executable_path/Frameworks", "@executable_path/Frameworks",
); );
MACOSX_DEPLOYMENT_TARGET = 15.6; MACOSX_DEPLOYMENT_TARGET = 15.6;
MARKETING_VERSION = 2.0.0; MARKETING_VERSION = 2.2.0;
PRODUCT_BUNDLE_IDENTIFIER = com.atridad.Ascently; PRODUCT_BUNDLE_IDENTIFIER = com.atridad.Ascently;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = ""; PROVISIONING_PROFILE_SPECIFIER = "";
@@ -602,7 +602,7 @@
ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground; ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground;
CODE_SIGN_ENTITLEMENTS = SessionStatusLiveExtension.entitlements; CODE_SIGN_ENTITLEMENTS = SessionStatusLiveExtension.entitlements;
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 26; CURRENT_PROJECT_VERSION = 30;
DEVELOPMENT_TEAM = 4BC9Y2LL4B; DEVELOPMENT_TEAM = 4BC9Y2LL4B;
GENERATE_INFOPLIST_FILE = YES; GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = SessionStatusLive/Info.plist; INFOPLIST_FILE = SessionStatusLive/Info.plist;
@@ -613,7 +613,7 @@
"@executable_path/Frameworks", "@executable_path/Frameworks",
"@executable_path/../../Frameworks", "@executable_path/../../Frameworks",
); );
MARKETING_VERSION = 2.0.0; MARKETING_VERSION = 2.2.0;
PRODUCT_BUNDLE_IDENTIFIER = com.atridad.Ascently.SessionStatusLive; PRODUCT_BUNDLE_IDENTIFIER = com.atridad.Ascently.SessionStatusLive;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES; SKIP_INSTALL = YES;
@@ -632,7 +632,7 @@
ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground; ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground;
CODE_SIGN_ENTITLEMENTS = SessionStatusLiveExtension.entitlements; CODE_SIGN_ENTITLEMENTS = SessionStatusLiveExtension.entitlements;
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 26; CURRENT_PROJECT_VERSION = 30;
DEVELOPMENT_TEAM = 4BC9Y2LL4B; DEVELOPMENT_TEAM = 4BC9Y2LL4B;
GENERATE_INFOPLIST_FILE = YES; GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = SessionStatusLive/Info.plist; INFOPLIST_FILE = SessionStatusLive/Info.plist;
@@ -643,7 +643,7 @@
"@executable_path/Frameworks", "@executable_path/Frameworks",
"@executable_path/../../Frameworks", "@executable_path/../../Frameworks",
); );
MARKETING_VERSION = 2.0.0; MARKETING_VERSION = 2.2.0;
PRODUCT_BUNDLE_IDENTIFIER = com.atridad.Ascently.SessionStatusLive; PRODUCT_BUNDLE_IDENTIFIER = com.atridad.Ascently.SessionStatusLive;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES; SKIP_INSTALL = YES;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 7.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 35 KiB

After

Width:  |  Height:  |  Size: 7.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.4 KiB

After

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -1,22 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<svg width="1024" height="1024" viewBox="0 0 1024 1024" xmlns="http://schemas.android.com/2000/svg"> <svg width="1024" height="1024" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg">
<!-- Dark background with rounded corners for iOS -->
<rect width="1024" height="1024" fill="#1A1A1A" rx="180" ry="180"/> <rect width="1024" height="1024" fill="#1A1A1A" rx="180" ry="180"/>
<!-- Transform to match Android layout exactly -->
<g transform="translate(512, 512) scale(4.75) translate(-54, -42.5)"> <g transform="translate(512, 512) scale(4.75) translate(-54, -42.5)">
<!-- Left mountain (yellow/amber) - matches Android coordinates with white border --> <polygon points="8,75 35,14.25 62,75" fill="#FFC107"/>
<polygon points="15,70 35,25 55,70" <polygon points="31.25,75 65,0.75 98.75,75" fill="#F44336"/>
fill="#FFC107"
stroke="#FFFFFF"
stroke-width="3"
stroke-linejoin="round"/>
<!-- Right mountain (red) - matches Android coordinates with white border -->
<polygon points="40,70 65,15 90,70"
fill="#F44336"
stroke="#FFFFFF"
stroke-width="3"
stroke-linejoin="round"/>
</g> </g>
</svg> </svg>

Before

Width:  |  Height:  |  Size: 913 B

After

Width:  |  Height:  |  Size: 411 B

View File

@@ -1,22 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<svg width="1024" height="1024" viewBox="0 0 1024 1024" xmlns="http://schemas.android.com/2000/svg"> <svg width="1024" height="1024" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg">
<!-- White background with rounded corners for iOS -->
<rect width="1024" height="1024" fill="#FFFFFF" rx="180" ry="180"/> <rect width="1024" height="1024" fill="#FFFFFF" rx="180" ry="180"/>
<!-- Transform to match Android layout exactly -->
<g transform="translate(512, 512) scale(4.75) translate(-54, -42.5)"> <g transform="translate(512, 512) scale(4.75) translate(-54, -42.5)">
<!-- Left mountain (yellow/amber) - matches Android coordinates --> <polygon points="8,75 35,14.25 62,75" fill="#FFC107"/>
<polygon points="15,70 35,25 55,70" <polygon points="31.25,75 65,0.75 98.75,75" fill="#F44336"/>
fill="#FFC107"
stroke="#1C1C1C"
stroke-width="3"
stroke-linejoin="round"/>
<!-- Right mountain (red) - matches Android coordinates -->
<polygon points="40,70 65,15 90,70"
fill="#F44336"
stroke="#1C1C1C"
stroke-width="3"
stroke-linejoin="round"/>
</g> </g>
</svg> </svg>

Before

Width:  |  Height:  |  Size: 878 B

After

Width:  |  Height:  |  Size: 411 B

View File

@@ -1,24 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<svg width="1024" height="1024" viewBox="0 0 1024 1024" xmlns="http://schemas.android.com/2000/svg"> <svg width="1024" height="1024" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg">
<!-- Transparent background with rounded corners for iOS tinted mode -->
<rect width="1024" height="1024" fill="transparent" rx="180" ry="180"/> <rect width="1024" height="1024" fill="transparent" rx="180" ry="180"/>
<!-- Transform to match Android layout exactly -->
<g transform="translate(512, 512) scale(4.75) translate(-54, -42.5)"> <g transform="translate(512, 512) scale(4.75) translate(-54, -42.5)">
<!-- Left mountain - matches Android coordinates, black fill for tinting --> <polygon points="8,75 35,14.25 62,75" fill="#000000" opacity="0.8"/>
<polygon points="15,70 35,25 55,70" <polygon points="31.25,75 65,0.75 98.75,75" fill="#000000" opacity="0.9"/>
fill="#000000"
stroke="#000000"
stroke-width="3"
stroke-linejoin="round"
opacity="0.8"/>
<!-- Right mountain - matches Android coordinates, black fill for tinting -->
<polygon points="40,70 65,15 90,70"
fill="#000000"
stroke="#000000"
stroke-width="3"
stroke-linejoin="round"
opacity="0.9"/>
</g> </g>
</svg> </svg>

Before

Width:  |  Height:  |  Size: 981 B

After

Width:  |  Height:  |  Size: 443 B

View File

@@ -111,7 +111,6 @@ struct ContentView: View {
Task { Task {
try? await Task.sleep(nanoseconds: 300_000_000) // 0.3 seconds try? await Task.sleep(nanoseconds: 300_000_000) // 0.3 seconds
await dataManager.onAppBecomeActive() await dataManager.onAppBecomeActive()
// Ensure health integration is verified
await dataManager.healthKitService.verifyAndRestoreIntegration() await dataManager.healthKitService.verifyAndRestoreIntegration()
} }
} }

View File

@@ -55,7 +55,6 @@ struct BackupGym: Codable {
let createdAt: String let createdAt: String
let updatedAt: String let updatedAt: String
/// Initialize from native iOS Gym model
init(from gym: Gym) { init(from gym: Gym) {
self.id = gym.id.uuidString self.id = gym.id.uuidString
self.name = gym.name self.name = gym.name
@@ -71,7 +70,6 @@ struct BackupGym: Codable {
self.updatedAt = formatter.string(from: gym.updatedAt) self.updatedAt = formatter.string(from: gym.updatedAt)
} }
/// Initialize with explicit parameters for import
init( init(
id: String, id: String,
name: String, name: String,
@@ -94,7 +92,6 @@ struct BackupGym: Codable {
self.updatedAt = updatedAt self.updatedAt = updatedAt
} }
/// Convert to native iOS Gym model
func toGym() throws -> Gym { func toGym() throws -> Gym {
let formatter = ISO8601DateFormatter() let formatter = ISO8601DateFormatter()
formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds] formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds]
@@ -137,7 +134,6 @@ struct BackupProblem: Codable {
let createdAt: String let createdAt: String
let updatedAt: String let updatedAt: String
/// Initialize from native iOS Problem model
init(from problem: Problem) { init(from problem: Problem) {
self.id = problem.id.uuidString self.id = problem.id.uuidString
self.gymId = problem.gymId.uuidString self.gymId = problem.gymId.uuidString
@@ -158,7 +154,6 @@ struct BackupProblem: Codable {
self.updatedAt = formatter.string(from: problem.updatedAt) self.updatedAt = formatter.string(from: problem.updatedAt)
} }
/// Initialize with explicit parameters for import
init( init(
id: String, id: String,
gymId: String, gymId: String,
@@ -191,7 +186,6 @@ struct BackupProblem: Codable {
self.updatedAt = updatedAt self.updatedAt = updatedAt
} }
/// Convert to native iOS Problem model
func toProblem() throws -> Problem { func toProblem() throws -> Problem {
let formatter = ISO8601DateFormatter() let formatter = ISO8601DateFormatter()
formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds] formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds]
@@ -224,7 +218,6 @@ struct BackupProblem: Codable {
) )
} }
/// Create a copy with updated image paths for import processing
func withUpdatedImagePaths(_ newImagePaths: [String]) -> BackupProblem { func withUpdatedImagePaths(_ newImagePaths: [String]) -> BackupProblem {
return BackupProblem( return BackupProblem(
id: self.id, id: self.id,
@@ -258,7 +251,6 @@ struct BackupClimbSession: Codable {
let createdAt: String let createdAt: String
let updatedAt: String let updatedAt: String
/// Initialize from native iOS ClimbSession model
init(from session: ClimbSession) { init(from session: ClimbSession) {
self.id = session.id.uuidString self.id = session.id.uuidString
self.gymId = session.gymId.uuidString self.gymId = session.gymId.uuidString
@@ -275,7 +267,6 @@ struct BackupClimbSession: Codable {
self.updatedAt = formatter.string(from: session.updatedAt) self.updatedAt = formatter.string(from: session.updatedAt)
} }
/// Initialize with explicit parameters for import
init( init(
id: String, id: String,
gymId: String, gymId: String,
@@ -300,7 +291,6 @@ struct BackupClimbSession: Codable {
self.updatedAt = updatedAt self.updatedAt = updatedAt
} }
/// Convert to native iOS ClimbSession model
func toClimbSession() throws -> ClimbSession { func toClimbSession() throws -> ClimbSession {
let formatter = ISO8601DateFormatter() let formatter = ISO8601DateFormatter()
formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds] formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds]
@@ -347,7 +337,6 @@ struct BackupAttempt: Codable {
let createdAt: String let createdAt: String
let updatedAt: String? let updatedAt: String?
/// Initialize from native iOS Attempt model
init(from attempt: Attempt) { init(from attempt: Attempt) {
self.id = attempt.id.uuidString self.id = attempt.id.uuidString
self.sessionId = attempt.sessionId.uuidString self.sessionId = attempt.sessionId.uuidString
@@ -365,7 +354,6 @@ struct BackupAttempt: Codable {
self.updatedAt = formatter.string(from: attempt.updatedAt) self.updatedAt = formatter.string(from: attempt.updatedAt)
} }
/// Initialize with explicit parameters for import
init( init(
id: String, id: String,
sessionId: String, sessionId: String,
@@ -392,7 +380,6 @@ struct BackupAttempt: Codable {
self.updatedAt = updatedAt self.updatedAt = updatedAt
} }
/// Convert to native iOS Attempt model
func toAttempt() throws -> Attempt { func toAttempt() throws -> Attempt {
let formatter = ISO8601DateFormatter() let formatter = ISO8601DateFormatter()
formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds] formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds]

View File

@@ -0,0 +1,26 @@
//
// DeltaSyncFormat.swift
// Ascently
//
// Delta sync structures for incremental data synchronization
//
import Foundation
struct DeltaSyncRequest: Codable {
let lastSyncTime: String
let gyms: [BackupGym]
let problems: [BackupProblem]
let sessions: [BackupClimbSession]
let attempts: [BackupAttempt]
let deletedItems: [DeletedItem]
}
struct DeltaSyncResponse: Codable {
let serverTime: String
let gyms: [BackupGym]
let problems: [BackupProblem]
let sessions: [BackupClimbSession]
let attempts: [BackupAttempt]
let deletedItems: [DeletedItem]
}

View File

@@ -31,7 +31,6 @@ class HealthKitService: ObservableObject {
} }
} }
/// Restore active workout state
private func restoreActiveWorkout() { private func restoreActiveWorkout() {
if let startDate = userDefaults.object(forKey: workoutStartDateKey) as? Date, if let startDate = userDefaults.object(forKey: workoutStartDateKey) as? Date,
let sessionIdString = userDefaults.string(forKey: workoutSessionIdKey), let sessionIdString = userDefaults.string(forKey: workoutSessionIdKey),
@@ -43,7 +42,6 @@ class HealthKitService: ObservableObject {
} }
} }
/// Persist active workout state
private func persistActiveWorkout() { private func persistActiveWorkout() {
if let startDate = currentWorkoutStartDate, let sessionId = currentWorkoutSessionId { if let startDate = currentWorkoutStartDate, let sessionId = currentWorkoutSessionId {
userDefaults.set(startDate, forKey: workoutStartDateKey) userDefaults.set(startDate, forKey: workoutStartDateKey)
@@ -54,7 +52,6 @@ class HealthKitService: ObservableObject {
} }
} }
/// Verify and restore health integration
func verifyAndRestoreIntegration() async { func verifyAndRestoreIntegration() async {
guard isEnabled else { return } guard isEnabled else { return }

View File

@@ -136,6 +136,344 @@ class SyncService: ObservableObject {
} }
} }
func performDeltaSync(dataManager: ClimbingDataManager) async throws {
guard isConfigured else {
throw SyncError.notConfigured
}
guard let url = URL(string: "\(serverURL)/sync/delta") else {
throw SyncError.invalidURL
}
// Get last sync time, or use epoch if never synced
let lastSync = lastSyncTime ?? Date(timeIntervalSince1970: 0)
let formatter = ISO8601DateFormatter()
let lastSyncString = formatter.string(from: lastSync)
// Collect items modified since last sync
let modifiedGyms = dataManager.gyms.filter { gym in
gym.updatedAt > lastSync
}.map { BackupGym(from: $0) }
let modifiedProblems = dataManager.problems.filter { problem in
problem.updatedAt > lastSync
}.map { problem -> BackupProblem in
let backupProblem = BackupProblem(from: problem)
if !problem.imagePaths.isEmpty {
let normalizedPaths = problem.imagePaths.enumerated().map { index, _ in
ImageNamingUtils.generateImageFilename(
problemId: problem.id.uuidString, imageIndex: index)
}
return BackupProblem(
id: backupProblem.id,
gymId: backupProblem.gymId,
name: backupProblem.name,
description: backupProblem.description,
climbType: backupProblem.climbType,
difficulty: backupProblem.difficulty,
tags: backupProblem.tags,
location: backupProblem.location,
imagePaths: normalizedPaths,
isActive: backupProblem.isActive,
dateSet: backupProblem.dateSet,
notes: backupProblem.notes,
createdAt: backupProblem.createdAt,
updatedAt: backupProblem.updatedAt
)
}
return backupProblem
}
let modifiedSessions = dataManager.sessions.filter { session in
session.status != .active && session.updatedAt > lastSync
}.map { BackupClimbSession(from: $0) }
let activeSessionIds = Set(
dataManager.sessions.filter { $0.status == .active }.map { $0.id })
let modifiedAttempts = dataManager.attempts.filter { attempt in
!activeSessionIds.contains(attempt.sessionId) && attempt.createdAt > lastSync
}.map { BackupAttempt(from: $0) }
let modifiedDeletions = dataManager.getDeletedItems().filter { item in
if let deletedDate = formatter.date(from: item.deletedAt) {
return deletedDate > lastSync
}
return false
}
print(
"iOS DELTA SYNC: Sending gyms=\(modifiedGyms.count), problems=\(modifiedProblems.count), sessions=\(modifiedSessions.count), attempts=\(modifiedAttempts.count), deletions=\(modifiedDeletions.count)"
)
// Create delta request
let deltaRequest = DeltaSyncRequest(
lastSyncTime: lastSyncString,
gyms: modifiedGyms,
problems: modifiedProblems,
sessions: modifiedSessions,
attempts: modifiedAttempts,
deletedItems: modifiedDeletions
)
let encoder = JSONEncoder()
encoder.dateEncodingStrategy = .iso8601
let jsonData = try encoder.encode(deltaRequest)
var request = URLRequest(url: url)
request.httpMethod = "POST"
request.setValue("Bearer \(authToken)", forHTTPHeaderField: "Authorization")
request.setValue("application/json", forHTTPHeaderField: "Content-Type")
request.setValue("application/json", forHTTPHeaderField: "Accept")
request.httpBody = jsonData
let (data, response) = try await URLSession.shared.data(for: request)
guard let httpResponse = response as? HTTPURLResponse else {
throw SyncError.invalidResponse
}
switch httpResponse.statusCode {
case 200:
break
case 401:
throw SyncError.unauthorized
default:
throw SyncError.serverError(httpResponse.statusCode)
}
let decoder = JSONDecoder()
let deltaResponse = try decoder.decode(DeltaSyncResponse.self, from: data)
print(
"iOS DELTA SYNC: Received gyms=\(deltaResponse.gyms.count), problems=\(deltaResponse.problems.count), sessions=\(deltaResponse.sessions.count), attempts=\(deltaResponse.attempts.count), deletions=\(deltaResponse.deletedItems.count)"
)
// Apply server changes to local data
try await applyDeltaResponse(deltaResponse, dataManager: dataManager)
// Sync only modified problem images
try await syncModifiedImages(modifiedProblems: modifiedProblems, dataManager: dataManager)
// Update last sync time to server time
if let serverTime = formatter.date(from: deltaResponse.serverTime) {
lastSyncTime = serverTime
userDefaults.set(lastSyncTime, forKey: Keys.lastSyncTime)
}
}
private func applyDeltaResponse(_ response: DeltaSyncResponse, dataManager: ClimbingDataManager)
async throws
{
let formatter = ISO8601DateFormatter()
// Merge and apply deletions first to prevent resurrection
let allDeletions = dataManager.getDeletedItems() + response.deletedItems
let uniqueDeletions = Array(Set(allDeletions))
print(
"iOS DELTA SYNC: Applying \(uniqueDeletions.count) deletion records before merging data"
)
applyDeletionsToDataManager(deletions: uniqueDeletions, dataManager: dataManager)
// Build deleted item lookup map
let deletedItemSet = Set(uniqueDeletions.map { $0.type + ":" + $0.id })
// Download images for new/modified problems from server
var imagePathMapping: [String: String] = [:]
for problem in response.problems {
if deletedItemSet.contains("problem:" + problem.id) {
continue
}
guard let imagePaths = problem.imagePaths, !imagePaths.isEmpty else { continue }
for (index, imagePath) in imagePaths.enumerated() {
let serverFilename = URL(fileURLWithPath: imagePath).lastPathComponent
do {
let imageData = try await downloadImage(filename: serverFilename)
let consistentFilename = ImageNamingUtils.generateImageFilename(
problemId: problem.id, imageIndex: index)
let imageManager = ImageManager.shared
_ = try imageManager.saveImportedImage(imageData, filename: consistentFilename)
imagePathMapping[serverFilename] = consistentFilename
} catch SyncError.imageNotFound {
print("Image not found on server: \(serverFilename)")
continue
} catch {
print("Failed to download image \(serverFilename): \(error)")
continue
}
}
}
// Merge gyms
for backupGym in response.gyms {
if deletedItemSet.contains("gym:" + backupGym.id) {
continue
}
if let index = dataManager.gyms.firstIndex(where: { $0.id.uuidString == backupGym.id })
{
let existing = dataManager.gyms[index]
if backupGym.updatedAt >= formatter.string(from: existing.updatedAt) {
dataManager.gyms[index] = try backupGym.toGym()
}
} else {
dataManager.gyms.append(try backupGym.toGym())
}
}
// Merge problems
for backupProblem in response.problems {
if deletedItemSet.contains("problem:" + backupProblem.id) {
continue
}
var problemToMerge = backupProblem
if !imagePathMapping.isEmpty, let imagePaths = backupProblem.imagePaths {
let updatedPaths = imagePaths.compactMap { imagePathMapping[$0] ?? $0 }
problemToMerge = BackupProblem(
id: backupProblem.id,
gymId: backupProblem.gymId,
name: backupProblem.name,
description: backupProblem.description,
climbType: backupProblem.climbType,
difficulty: backupProblem.difficulty,
tags: backupProblem.tags,
location: backupProblem.location,
imagePaths: updatedPaths,
isActive: backupProblem.isActive,
dateSet: backupProblem.dateSet,
notes: backupProblem.notes,
createdAt: backupProblem.createdAt,
updatedAt: backupProblem.updatedAt
)
}
if let index = dataManager.problems.firstIndex(where: {
$0.id.uuidString == problemToMerge.id
}) {
let existing = dataManager.problems[index]
if problemToMerge.updatedAt >= formatter.string(from: existing.updatedAt) {
dataManager.problems[index] = try problemToMerge.toProblem()
}
} else {
dataManager.problems.append(try problemToMerge.toProblem())
}
}
// Merge sessions
for backupSession in response.sessions {
if deletedItemSet.contains("session:" + backupSession.id) {
continue
}
if let index = dataManager.sessions.firstIndex(where: {
$0.id.uuidString == backupSession.id
}) {
let existing = dataManager.sessions[index]
if backupSession.updatedAt >= formatter.string(from: existing.updatedAt) {
dataManager.sessions[index] = try backupSession.toClimbSession()
}
} else {
dataManager.sessions.append(try backupSession.toClimbSession())
}
}
// Merge attempts
for backupAttempt in response.attempts {
if deletedItemSet.contains("attempt:" + backupAttempt.id) {
continue
}
if let index = dataManager.attempts.firstIndex(where: {
$0.id.uuidString == backupAttempt.id
}) {
let existing = dataManager.attempts[index]
if backupAttempt.createdAt >= formatter.string(from: existing.createdAt) {
dataManager.attempts[index] = try backupAttempt.toAttempt()
}
} else {
dataManager.attempts.append(try backupAttempt.toAttempt())
}
}
// Apply deletions again for safety
applyDeletionsToDataManager(deletions: uniqueDeletions, dataManager: dataManager)
// Save all changes
dataManager.saveGyms()
dataManager.saveProblems()
dataManager.saveSessions()
dataManager.saveAttempts()
// Update deletion records
dataManager.clearDeletedItems()
if let data = try? JSONEncoder().encode(uniqueDeletions) {
UserDefaults.standard.set(data, forKey: "ascently_deleted_items")
}
DataStateManager.shared.updateDataState()
}
private func applyDeletionsToDataManager(
deletions: [DeletedItem], dataManager: ClimbingDataManager
) {
let deletedGymIds = Set(deletions.filter { $0.type == "gym" }.map { $0.id })
let deletedProblemIds = Set(deletions.filter { $0.type == "problem" }.map { $0.id })
let deletedSessionIds = Set(deletions.filter { $0.type == "session" }.map { $0.id })
let deletedAttemptIds = Set(deletions.filter { $0.type == "attempt" }.map { $0.id })
dataManager.gyms.removeAll { deletedGymIds.contains($0.id.uuidString) }
dataManager.problems.removeAll { deletedProblemIds.contains($0.id.uuidString) }
dataManager.sessions.removeAll { deletedSessionIds.contains($0.id.uuidString) }
dataManager.attempts.removeAll { deletedAttemptIds.contains($0.id.uuidString) }
}
private func syncModifiedImages(
modifiedProblems: [BackupProblem], dataManager: ClimbingDataManager
) async throws {
guard !modifiedProblems.isEmpty else { return }
print("iOS DELTA SYNC: Syncing images for \(modifiedProblems.count) modified problems")
for backupProblem in modifiedProblems {
guard
let problem = dataManager.problems.first(where: {
$0.id.uuidString == backupProblem.id
})
else {
continue
}
for (index, imagePath) in problem.imagePaths.enumerated() {
let filename = URL(fileURLWithPath: imagePath).lastPathComponent
let consistentFilename = ImageNamingUtils.generateImageFilename(
problemId: problem.id.uuidString, imageIndex: index)
let imageManager = ImageManager.shared
let fullPath = imageManager.imagesDirectory.appendingPathComponent(filename).path
if let imageData = imageManager.loadImageData(fromPath: fullPath) {
do {
if filename != consistentFilename {
let newPath = imageManager.imagesDirectory.appendingPathComponent(
consistentFilename
).path
try? FileManager.default.moveItem(atPath: fullPath, toPath: newPath)
}
try await uploadImage(filename: consistentFilename, imageData: imageData)
print("Uploaded modified problem image: \(consistentFilename)")
} catch {
print("Failed to upload image \(consistentFilename): \(error)")
}
}
}
}
}
func uploadImage(filename: String, imageData: Data) async throws { func uploadImage(filename: String, imageData: Data) async throws {
guard isConfigured else { guard isConfigured else {
throw SyncError.notConfigured throw SyncError.notConfigured
@@ -246,6 +584,17 @@ class SyncService: ObservableObject {
!serverBackup.gyms.isEmpty || !serverBackup.problems.isEmpty !serverBackup.gyms.isEmpty || !serverBackup.problems.isEmpty
|| !serverBackup.sessions.isEmpty || !serverBackup.attempts.isEmpty || !serverBackup.sessions.isEmpty || !serverBackup.attempts.isEmpty
// If both client and server have been synced before, use delta sync
if hasLocalData && hasServerData && lastSyncTime != nil {
print("iOS SYNC: Using delta sync for incremental updates")
try await performDeltaSync(dataManager: dataManager)
// Update last sync time
lastSyncTime = Date()
userDefaults.set(lastSyncTime, forKey: Keys.lastSyncTime)
return
}
if !hasLocalData && hasServerData { if !hasLocalData && hasServerData {
// Case 1: No local data - do full restore from server // Case 1: No local data - do full restore from server
print("iOS SYNC: Case 1 - No local data, performing full restore from server") print("iOS SYNC: Case 1 - No local data, performing full restore from server")
@@ -286,7 +635,6 @@ class SyncService: ObservableObject {
} }
} }
/// Parses ISO8601 timestamp to milliseconds for comparison
private func parseISO8601ToMillis(timestamp: String) -> Int64 { private func parseISO8601ToMillis(timestamp: String) -> Int64 {
let formatter = ISO8601DateFormatter() let formatter = ISO8601DateFormatter()
if let date = formatter.date(from: timestamp) { if let date = formatter.date(from: timestamp) {
@@ -1150,7 +1498,6 @@ class SyncService: ObservableObject {
// Get active session IDs to protect their attempts // Get active session IDs to protect their attempts
let activeSessionIds = Set( let activeSessionIds = Set(
local.compactMap { attempt in local.compactMap { attempt in
// This is a simplified check - in a real implementation you'd want to cross-reference with sessions
return attempt.sessionId return attempt.sessionId
}.filter { sessionId in }.filter { sessionId in
// Check if this session ID belongs to an active session // Check if this session ID belongs to an active session

View File

@@ -37,46 +37,36 @@ class DataStateManager {
print("iOS Data state updated to: \(now)") print("iOS Data state updated to: \(now)")
} }
/// Gets the current data state timestamp. This represents when any data was last modified
/// locally.
func getLastModified() -> String { func getLastModified() -> String {
if let storedTimestamp = userDefaults.string(forKey: Keys.lastModified) { if let storedTimestamp = userDefaults.string(forKey: Keys.lastModified) {
print("iOS DataStateManager returning stored timestamp: \(storedTimestamp)") print("iOS DataStateManager returning stored timestamp: \(storedTimestamp)")
return storedTimestamp return storedTimestamp
} }
// If no timestamp is stored, return epoch time to indicate very old data
// This ensures server data will be considered newer than uninitialized local data
let epochTime = "1970-01-01T00:00:00.000Z" let epochTime = "1970-01-01T00:00:00.000Z"
print("WARNING: No data state timestamp found - returning epoch time: \(epochTime)") print("No data state timestamp found - returning epoch time: \(epochTime)")
return epochTime return epochTime
} }
/// Sets the data state timestamp to a specific value. Used when importing data from server to
/// sync the state.
func setLastModified(_ timestamp: String) { func setLastModified(_ timestamp: String) {
userDefaults.set(timestamp, forKey: Keys.lastModified) userDefaults.set(timestamp, forKey: Keys.lastModified)
print("Data state set to: \(timestamp)") print("Data state set to: \(timestamp)")
} }
/// Resets the data state (for testing or complete data wipe).
func reset() { func reset() {
userDefaults.removeObject(forKey: Keys.lastModified) userDefaults.removeObject(forKey: Keys.lastModified)
userDefaults.removeObject(forKey: Keys.initialized) userDefaults.removeObject(forKey: Keys.initialized)
print("Data state reset") print("Data state reset")
} }
/// Checks if the data state has been initialized.
private func isInitialized() -> Bool { private func isInitialized() -> Bool {
return userDefaults.bool(forKey: Keys.initialized) return userDefaults.bool(forKey: Keys.initialized)
} }
/// Marks the data state as initialized.
private func markAsInitialized() { private func markAsInitialized() {
userDefaults.set(true, forKey: Keys.initialized) userDefaults.set(true, forKey: Keys.initialized)
} }
/// Gets debug information about the current state.
func getDebugInfo() -> String { func getDebugInfo() -> String {
return "DataState(lastModified=\(getLastModified()), initialized=\(isInitialized()))" return "DataState(lastModified=\(getLastModified()), initialized=\(isInitialized()))"
} }

View File

@@ -690,7 +690,6 @@ class ImageManager {
} }
private func cleanupOrphanedFiles() { private func cleanupOrphanedFiles() {
// This would need access to the data manager to check which files are actually referenced
print("Cleanup would require coordination with data manager") print("Cleanup would require coordination with data manager")
} }

View File

@@ -108,7 +108,6 @@ class ImageNamingUtils {
) )
} }
/// Generates the canonical filename that should be used for a problem image
static func getCanonicalImageFilename(problemId: String, imageIndex: Int) -> String { static func getCanonicalImageFilename(problemId: String, imageIndex: Int) -> String {
return generateImageFilename(problemId: problemId, imageIndex: imageIndex) return generateImageFilename(problemId: problemId, imageIndex: imageIndex)
} }

View File

@@ -31,7 +31,7 @@ struct OrientationAwareImage: View {
.onAppear { .onAppear {
loadImageWithCorrectOrientation() loadImageWithCorrectOrientation()
} }
.onChange(of: imagePath) { _ in .onChange(of: imagePath) { _, _ in
loadImageWithCorrectOrientation() loadImageWithCorrectOrientation()
} }
} }

View File

@@ -18,6 +18,7 @@ struct ZipUtils {
var fileEntries: [(name: String, data: Data, offset: UInt32)] = [] var fileEntries: [(name: String, data: Data, offset: UInt32)] = []
var currentOffset: UInt32 = 0 var currentOffset: UInt32 = 0
// Add metadata
let metadata = createMetadata( let metadata = createMetadata(
exportData: exportData, referencedImagePaths: referencedImagePaths) exportData: exportData, referencedImagePaths: referencedImagePaths)
let metadataData = metadata.data(using: .utf8) ?? Data() let metadataData = metadata.data(using: .utf8) ?? Data()
@@ -29,6 +30,7 @@ struct ZipUtils {
currentOffset: &currentOffset currentOffset: &currentOffset
) )
// Encode JSON data
let encoder = JSONEncoder() let encoder = JSONEncoder()
encoder.outputFormatting = .prettyPrinted encoder.outputFormatting = .prettyPrinted
encoder.dateEncodingStrategy = .custom { date, encoder in encoder.dateEncodingStrategy = .custom { date, encoder in
@@ -46,44 +48,49 @@ struct ZipUtils {
currentOffset: &currentOffset currentOffset: &currentOffset
) )
print("Processing \(referencedImagePaths.count) referenced image paths") // Process images in batches for better performance
print("Processing \(referencedImagePaths.count) images for export")
var successfulImages = 0 var successfulImages = 0
let batchSize = 10
let sortedPaths = Array(referencedImagePaths).sorted()
// Pre-allocate capacity for better memory performance
zipData.reserveCapacity(zipData.count + (referencedImagePaths.count * 200_000)) // Estimate 200KB per image
for (index, imagePath) in sortedPaths.enumerated() {
if index % batchSize == 0 {
print("Processing images \(index)/\(sortedPaths.count)")
}
for imagePath in referencedImagePaths {
print("Processing image path: \(imagePath)")
let imageURL = URL(fileURLWithPath: imagePath) let imageURL = URL(fileURLWithPath: imagePath)
let imageName = imageURL.lastPathComponent let imageName = imageURL.lastPathComponent
print("Image name: \(imageName)")
if FileManager.default.fileExists(atPath: imagePath) { guard FileManager.default.fileExists(atPath: imagePath) else {
print("Image file exists at: \(imagePath)") continue
do { }
let imageData = try Data(contentsOf: imageURL)
print("Image data size: \(imageData.count) bytes") do {
if imageData.count > 0 { let imageData = try Data(contentsOf: imageURL)
let imageEntryName = "\(IMAGES_DIR_NAME)/\(imageName)" if imageData.count > 0 {
try addFileToZip( let imageEntryName = "\(IMAGES_DIR_NAME)/\(imageName)"
filename: imageEntryName, try addFileToZip(
fileData: imageData, filename: imageEntryName,
zipData: &zipData, fileData: imageData,
fileEntries: &fileEntries, zipData: &zipData,
currentOffset: &currentOffset fileEntries: &fileEntries,
) currentOffset: &currentOffset
successfulImages += 1 )
print("Successfully added image to ZIP: \(imageEntryName)") successfulImages += 1
} else {
print("Image data is empty for: \(imagePath)")
}
} catch {
print("Failed to read image data for \(imagePath): \(error)")
} }
} else { } catch {
print("Image file does not exist at: \(imagePath)") print("Failed to read image: \(imageName)")
} }
} }
print("Export completed: \(successfulImages)/\(referencedImagePaths.count) images included") print("Export: included \(successfulImages)/\(referencedImagePaths.count) images")
// Build central directory
centralDirectory.reserveCapacity(fileEntries.count * 100) // Estimate 100 bytes per entry
for entry in fileEntries { for entry in fileEntries {
let centralDirEntry = createCentralDirectoryEntry( let centralDirEntry = createCentralDirectoryEntry(
filename: entry.name, filename: entry.name,
@@ -372,12 +379,12 @@ struct ZipUtils {
return data return data
} }
private static func calculateCRC32(data: Data) -> UInt32 { // CRC32 lookup table for faster calculation
private static let crc32Table: [UInt32] = {
let polynomial: UInt32 = 0xEDB8_8320 let polynomial: UInt32 = 0xEDB8_8320
var crc: UInt32 = 0xFFFF_FFFF var table = [UInt32](repeating: 0, count: 256)
for i in 0..<256 {
for byte in data { var crc = UInt32(i)
crc ^= UInt32(byte)
for _ in 0..<8 { for _ in 0..<8 {
if crc & 1 != 0 { if crc & 1 != 0 {
crc = (crc >> 1) ^ polynomial crc = (crc >> 1) ^ polynomial
@@ -385,6 +392,19 @@ struct ZipUtils {
crc >>= 1 crc >>= 1
} }
} }
table[i] = crc
}
return table
}()
private static func calculateCRC32(data: Data) -> UInt32 {
var crc: UInt32 = 0xFFFF_FFFF
data.withUnsafeBytes { (bytes: UnsafeRawBufferPointer) in
for byte in bytes {
let index = Int((crc ^ UInt32(byte)) & 0xFF)
crc = (crc >> 8) ^ crc32Table[index]
}
} }
return ~crc return ~crc

View File

@@ -653,9 +653,6 @@ class ClimbingDataManager: ObservableObject {
return gym(withId: mostUsedGymId) return gym(withId: mostUsedGymId)
} }
/// Clean up orphaned data - removes attempts that reference non-existent sessions
/// and removes duplicate attempts. This ensures data integrity and prevents
/// orphaned attempts from appearing in widgets
private func cleanupOrphanedData() { private func cleanupOrphanedData() {
let validSessionIds = Set(sessions.map { $0.id }) let validSessionIds = Set(sessions.map { $0.id })
let validProblemIds = Set(problems.map { $0.id }) let validProblemIds = Set(problems.map { $0.id })
@@ -761,8 +758,6 @@ class ClimbingDataManager: ObservableObject {
} }
} }
/// Validate data integrity and return a report
/// This can be called manually to check for issues
func validateDataIntegrity() -> String { func validateDataIntegrity() -> String {
let validSessionIds = Set(sessions.map { $0.id }) let validSessionIds = Set(sessions.map { $0.id })
let validProblemIds = Set(problems.map { $0.id }) let validProblemIds = Set(problems.map { $0.id })
@@ -801,8 +796,6 @@ class ClimbingDataManager: ObservableObject {
return report return report
} }
/// Manually trigger cleanup of orphaned data
/// This can be called from settings or debug menu
func manualDataCleanup() { func manualDataCleanup() {
cleanupOrphanedData() cleanupOrphanedData()
successMessage = "Data cleanup completed" successMessage = "Data cleanup completed"
@@ -830,12 +823,12 @@ class ClimbingDataManager: ObservableObject {
} }
} }
func exportData() -> Data? { func exportData() async -> Data? {
do { do {
// Create backup objects on main thread (they access MainActor-isolated properties)
let dateFormatter = DateFormatter() let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSSSS" dateFormatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSSSS"
// Create export data with normalized image paths
let exportData = ClimbDataBackup( let exportData = ClimbDataBackup(
exportedAt: dateFormatter.string(from: Date()), exportedAt: dateFormatter.string(from: Date()),
version: "2.0", version: "2.0",
@@ -846,19 +839,30 @@ class ClimbingDataManager: ObservableObject {
attempts: attempts.map { BackupAttempt(from: $0) } attempts: attempts.map { BackupAttempt(from: $0) }
) )
// Collect actual image paths from disk for the ZIP // Get image manager path info on main thread
let referencedImagePaths = collectReferencedImagePaths() let imagesDirectory = ImageManager.shared.imagesDirectory.path
print("Starting export with \(referencedImagePaths.count) images") let problemsForImages = problems
let zipData = try ZipUtils.createExportZip( // Move heavy I/O operations to background thread
exportData: exportData, let zipData = try await Task.detached(priority: .userInitiated) {
referencedImagePaths: referencedImagePaths // Collect actual image paths from disk for the ZIP
) let referencedImagePaths = await Self.collectReferencedImagePathsStatic(
problems: problemsForImages,
imagesDirectory: imagesDirectory)
print("Starting export with \(referencedImagePaths.count) images")
print("Export completed successfully") let zipData = try await ZipUtils.createExportZip(
successMessage = "Export completed with \(referencedImagePaths.count) images" exportData: exportData,
referencedImagePaths: referencedImagePaths
)
print("Export completed successfully")
return (zipData, referencedImagePaths.count)
}.value
successMessage = "Export completed with \(zipData.1) images"
clearMessageAfterDelay() clearMessageAfterDelay()
return zipData return zipData.0
} catch { } catch {
let errorMessage = "Export failed: \(error.localizedDescription)" let errorMessage = "Export failed: \(error.localizedDescription)"
print("ERROR: \(errorMessage)") print("ERROR: \(errorMessage)")
@@ -955,36 +959,36 @@ class ClimbingDataManager: ObservableObject {
extension ClimbingDataManager { extension ClimbingDataManager {
private func collectReferencedImagePaths() -> Set<String> { private func collectReferencedImagePaths() -> Set<String> {
let imagesDirectory = ImageManager.shared.imagesDirectory.path
return Self.collectReferencedImagePathsStatic(
problems: problems,
imagesDirectory: imagesDirectory)
}
private static func collectReferencedImagePathsStatic(
problems: [Problem], imagesDirectory: String
) -> Set<String> {
var imagePaths = Set<String>() var imagePaths = Set<String>()
print("Starting image path collection...") var missingCount = 0
print("Total problems: \(problems.count)")
for problem in problems { for problem in problems {
if !problem.imagePaths.isEmpty { if !problem.imagePaths.isEmpty {
print(
"Problem '\(problem.name ?? "Unnamed")' has \(problem.imagePaths.count) images"
)
for imagePath in problem.imagePaths { for imagePath in problem.imagePaths {
print(" - Stored path: \(imagePath)")
// Extract just the filename (migration should have normalized these) // Extract just the filename (migration should have normalized these)
let filename = URL(fileURLWithPath: imagePath).lastPathComponent let filename = URL(fileURLWithPath: imagePath).lastPathComponent
let fullPath = ImageManager.shared.getFullPath(from: filename) let fullPath = (imagesDirectory as NSString).appendingPathComponent(filename)
print(" - Full disk path: \(fullPath)")
if FileManager.default.fileExists(atPath: fullPath) { if FileManager.default.fileExists(atPath: fullPath) {
print(" ✓ File exists")
imagePaths.insert(fullPath) imagePaths.insert(fullPath)
} else { } else {
print(" ✗ WARNING: File not found at \(fullPath)") missingCount += 1
// Still add it to let ZipUtils handle the logging
imagePaths.insert(fullPath) imagePaths.insert(fullPath)
} }
} }
} }
} }
print("Collected \(imagePaths.count) total image paths for export") print("Export: Collected \(imagePaths.count) images (\(missingCount) missing)")
return imagePaths return imagePaths
} }
@@ -1273,7 +1277,9 @@ extension ClimbingDataManager {
) { [weak self] notification in ) { [weak self] notification in
if let updateCount = notification.userInfo?["updateCount"] as? Int { if let updateCount = notification.userInfo?["updateCount"] as? Int {
print("🔔 Image migration completed with \(updateCount) updates - reloading data") print("🔔 Image migration completed with \(updateCount) updates - reloading data")
self?.loadProblems() Task { @MainActor in
self?.loadProblems()
}
} }
} }
} }

View File

@@ -103,7 +103,6 @@ struct AddEditProblemView: View {
setupInitialGym() setupInitialGym()
} }
.onChange(of: dataManager.gyms) { .onChange(of: dataManager.gyms) {
// Ensure a gym is selected when gyms are loaded or changed
if selectedGym == nil && !dataManager.gyms.isEmpty { if selectedGym == nil && !dataManager.gyms.isEmpty {
selectedGym = dataManager.gyms.first selectedGym = dataManager.gyms.first
} }

View File

@@ -0,0 +1,338 @@
import SwiftUI
struct CalendarView: View {
@EnvironmentObject var dataManager: ClimbingDataManager
let sessions: [ClimbSession]
@Binding var selectedMonth: Date
@Binding var selectedDate: Date?
let onNavigateToSession: (UUID) -> Void
var calendar: Calendar {
Calendar.current
}
var monthYearString: String {
let formatter = DateFormatter()
formatter.dateFormat = "MMMM yyyy"
return formatter.string(from: selectedMonth)
}
var sessionsByDate: [Date: [ClimbSession]] {
Dictionary(grouping: sessions) { session in
calendar.startOfDay(for: session.date)
}
}
var daysInMonth: [Date?] {
guard let monthInterval = calendar.dateInterval(of: .month, for: selectedMonth),
calendar.dateInterval(of: .weekOfMonth, for: monthInterval.start) != nil
else {
return []
}
let days = calendar.generateDates(
inside: monthInterval,
matching: DateComponents(hour: 0, minute: 0, second: 0)
)
let firstDayOfMonth = days.first ?? monthInterval.start
let firstWeekday = calendar.component(.weekday, from: firstDayOfMonth)
let offset = firstWeekday - 1
var paddedDays: [Date?] = Array(repeating: nil, count: offset)
paddedDays.append(contentsOf: days.map { $0 as Date? })
let remainder = paddedDays.count % 7
if remainder != 0 {
paddedDays.append(contentsOf: Array(repeating: nil, count: 7 - remainder))
}
return paddedDays
}
var body: some View {
ScrollView {
VStack(spacing: 0) {
if let activeSession = dataManager.activeSession,
let gym = dataManager.gym(withId: activeSession.gymId)
{
ActiveSessionBanner(session: activeSession, gym: gym)
.padding(.horizontal, 16)
.padding(.top, 8)
.padding(.bottom, 16)
}
VStack(spacing: 8) {
HStack {
Button(action: { changeMonth(by: -1) }) {
Image(systemName: "chevron.left")
.font(.title2)
.fontWeight(.semibold)
.foregroundColor(.blue)
}
.frame(width: 44, height: 44)
Spacer()
Text(monthYearString)
.font(.title3)
.fontWeight(.semibold)
Spacer()
Button(action: { changeMonth(by: 1) }) {
Image(systemName: "chevron.right")
.font(.title2)
.fontWeight(.semibold)
.foregroundColor(.blue)
}
.frame(width: 44, height: 44)
}
Button(action: {
let today = Date()
selectedMonth = today
selectedDate = today
}) {
Text("Today")
.font(.subheadline)
.fontWeight(.semibold)
.foregroundColor(.white)
.padding(.horizontal, 20)
.padding(.vertical, 8)
.background(Color.blue)
.clipShape(Capsule())
}
}
.padding(.vertical, 16)
.padding(.horizontal)
HStack(spacing: 0) {
ForEach(["S", "M", "T", "W", "T", "F", "S"], id: \.self) { day in
Text(day)
.font(.caption2)
.fontWeight(.semibold)
.foregroundColor(.secondary)
.frame(maxWidth: .infinity)
}
}
.padding(.horizontal)
.padding(.bottom, 8)
LazyVGrid(
columns: Array(repeating: GridItem(.flexible(), spacing: 4), count: 7),
spacing: 4
) {
ForEach(daysInMonth.indices, id: \.self) { index in
if let date = daysInMonth[index] {
CalendarDayCell(
date: date,
sessions: sessionsByDate[calendar.startOfDay(for: date)] ?? [],
isSelected: selectedDate.map {
calendar.isDate($0, inSameDayAs: date)
}
?? false,
isToday: calendar.isDateInToday(date),
isInCurrentMonth: calendar.isDate(
date, equalTo: selectedMonth, toGranularity: .month)
) {
if !sessionsByDate[calendar.startOfDay(for: date), default: []]
.isEmpty
{
if selectedDate.map({ calendar.isDate($0, inSameDayAs: date) })
?? false
{
selectedDate = nil
} else {
selectedDate = date
}
}
}
} else {
Color.clear
.aspectRatio(1, contentMode: .fit)
}
}
}
.padding(.horizontal)
if let selected = selectedDate,
let sessionsOnDate = sessionsByDate[calendar.startOfDay(for: selected)],
!sessionsOnDate.isEmpty
{
Divider()
.padding(.vertical, 16)
.padding(.horizontal)
VStack(alignment: .leading, spacing: 12) {
Text("Sessions on \(formatSelectedDate(selected))")
.font(.headline)
.fontWeight(.semibold)
.padding(.horizontal)
VStack(spacing: 12) {
ForEach(sessionsOnDate) { session in
SessionCard(
session: session,
onTap: {
onNavigateToSession(session.id)
}
)
.padding(.horizontal)
}
}
}
.padding(.bottom, 16)
}
}
}
}
func changeMonth(by value: Int) {
if let newMonth = calendar.date(byAdding: .month, value: value, to: selectedMonth) {
selectedMonth = newMonth
selectedDate = nil
}
}
func formatSelectedDate(_ date: Date) -> String {
let formatter = DateFormatter()
formatter.dateFormat = "MMMM d, yyyy"
return formatter.string(from: date)
}
}
struct CalendarDayCell: View {
let date: Date
let sessions: [ClimbSession]
let isSelected: Bool
let isToday: Bool
let isInCurrentMonth: Bool
let onTap: () -> Void
var dayNumber: String {
let formatter = DateFormatter()
formatter.dateFormat = "d"
return formatter.string(from: date)
}
var body: some View {
Button(action: onTap) {
VStack(spacing: 6) {
Text(dayNumber)
.font(.system(size: 17))
.fontWeight(sessions.isEmpty ? .regular : .medium)
.foregroundColor(
isSelected
? .white
: isToday
? .blue
: !isInCurrentMonth
? .secondary.opacity(0.3)
: sessions.isEmpty ? .secondary : .primary
)
if !sessions.isEmpty {
Circle()
.fill(isSelected ? .white : .blue)
.frame(width: 4, height: 4)
} else {
Spacer()
.frame(height: 4)
}
}
.frame(maxWidth: .infinity)
.frame(height: 50)
.contentShape(Rectangle())
.background(
RoundedRectangle(cornerRadius: 6)
.fill(
isSelected ? Color.blue : isToday ? Color.blue.opacity(0.1) : Color.clear
)
)
.overlay(
RoundedRectangle(cornerRadius: 6)
.stroke(
isToday && !isSelected ? Color.blue.opacity(0.3) : Color.clear, lineWidth: 1
)
)
}
.buttonStyle(PlainButtonStyle())
.disabled(sessions.isEmpty)
}
}
struct SessionCard: View {
@EnvironmentObject var dataManager: ClimbingDataManager
let session: ClimbSession
let onTap: () -> Void
var gym: Gym? {
dataManager.gym(withId: session.gymId)
}
var body: some View {
HStack(spacing: 12) {
VStack(alignment: .leading, spacing: 6) {
Text(gym?.name ?? "Unknown Gym")
.font(.body)
.fontWeight(.semibold)
.foregroundColor(.primary)
if let duration = session.duration {
Text("Duration: \(duration) minutes")
.font(.subheadline)
.foregroundColor(.secondary)
}
if let notes = session.notes, !notes.isEmpty {
Text(notes)
.font(.subheadline)
.foregroundColor(.secondary)
.lineLimit(2)
}
}
Spacer()
Image(systemName: "chevron.right")
.font(.body)
.fontWeight(.semibold)
.foregroundColor(Color(.tertiaryLabel))
}
.padding(16)
.background(
RoundedRectangle(cornerRadius: 10)
.fill(Color(.secondarySystemGroupedBackground))
)
.onTapGesture {
onTap()
}
}
}
extension Calendar {
func generateDates(
inside interval: DateInterval,
matching components: DateComponents
) -> [Date] {
var dates: [Date] = []
dates.append(interval.start)
enumerateDates(
startingAfter: interval.start,
matching: components,
matchingPolicy: .nextTime
) { date, _, stop in
if let date = date {
if date < interval.end {
dates.append(date)
} else {
stop = true
}
}
}
return dates
}
}

View File

@@ -1,9 +1,24 @@
import Combine import Combine
import SwiftUI import SwiftUI
enum SessionViewMode: String {
case list
case calendar
}
struct SessionsView: View { struct SessionsView: View {
@EnvironmentObject var dataManager: ClimbingDataManager @EnvironmentObject var dataManager: ClimbingDataManager
@State private var showingAddSession = false @State private var showingAddSession = false
@AppStorage("sessionViewMode") private var viewMode: SessionViewMode = .list
@State private var selectedMonth = Date()
@State private var selectedDate: Date? = nil
@State private var selectedSessionId: UUID? = nil
private var completedSessions: [ClimbSession] {
dataManager.sessions
.filter { $0.status == .completed }
.sorted { $0.date > $1.date }
}
var body: some View { var body: some View {
NavigationStack { NavigationStack {
@@ -11,7 +26,18 @@ struct SessionsView: View {
if dataManager.sessions.isEmpty && dataManager.activeSession == nil { if dataManager.sessions.isEmpty && dataManager.activeSession == nil {
EmptySessionsView() EmptySessionsView()
} else { } else {
SessionsList() if viewMode == .list {
SessionsList()
} else {
CalendarView(
sessions: completedSessions,
selectedMonth: $selectedMonth,
selectedDate: $selectedDate,
onNavigateToSession: { sessionId in
selectedSessionId = sessionId
}
)
}
} }
} }
.navigationTitle("Sessions") .navigationTitle("Sessions")
@@ -36,6 +62,20 @@ struct SessionsView: View {
) )
} }
// View mode toggle
if !dataManager.sessions.isEmpty || dataManager.activeSession != nil {
Button(action: {
withAnimation(.easeInOut(duration: 0.2)) {
viewMode = viewMode == .list ? .calendar : .list
selectedDate = nil
}
}) {
Image(systemName: viewMode == .list ? "calendar" : "list.bullet")
.font(.body)
.fontWeight(.semibold)
}
}
if dataManager.gyms.isEmpty { if dataManager.gyms.isEmpty {
EmptyView() EmptyView()
} else if dataManager.activeSession == nil { } else if dataManager.activeSession == nil {
@@ -52,6 +92,14 @@ struct SessionsView: View {
.sheet(isPresented: $showingAddSession) { .sheet(isPresented: $showingAddSession) {
AddEditSessionView() AddEditSessionView()
} }
.navigationDestination(isPresented: .constant(selectedSessionId != nil)) {
if let sessionId = selectedSessionId {
SessionDetailView(sessionId: sessionId)
.onDisappear {
selectedSessionId = nil
}
}
}
} }
} }
} }

View File

@@ -180,10 +180,12 @@ struct DataManagementSection: View {
private func exportDataAsync() { private func exportDataAsync() {
isExporting = true isExporting = true
Task { Task {
let data = await MainActor.run { dataManager.exportData() } let data = await dataManager.exportData()
isExporting = false await MainActor.run {
if let data = data { isExporting = false
activeSheet = .export(data) if let data = data {
activeSheet = .export(data)
}
} }
} }
} }

View File

@@ -256,10 +256,6 @@ final class AscentlyTests: XCTestCase {
// MARK: - Active Session Preservation Tests // MARK: - Active Session Preservation Tests
func testActiveSessionPreservationDuringImport() throws { func testActiveSessionPreservationDuringImport() throws {
// Test that active sessions are preserved during import operations
// This tests the fix for the bug where active sessions disappear after sync
// Simulate an active session that exists locally but not in import data
let activeSessionId = UUID() let activeSessionId = UUID()
let gymId = UUID() let gymId = UUID()

View File

@@ -13,7 +13,7 @@ import (
"time" "time"
) )
const VERSION = "2.0.0" const VERSION = "2.2.0"
func min(a, b int) int { func min(a, b int) int {
if a < b { if a < b {
@@ -39,6 +39,24 @@ type ClimbDataBackup struct {
DeletedItems []DeletedItem `json:"deletedItems"` DeletedItems []DeletedItem `json:"deletedItems"`
} }
type DeltaSyncRequest struct {
LastSyncTime string `json:"lastSyncTime"`
Gyms []BackupGym `json:"gyms"`
Problems []BackupProblem `json:"problems"`
Sessions []BackupClimbSession `json:"sessions"`
Attempts []BackupAttempt `json:"attempts"`
DeletedItems []DeletedItem `json:"deletedItems"`
}
type DeltaSyncResponse struct {
ServerTime string `json:"serverTime"`
Gyms []BackupGym `json:"gyms"`
Problems []BackupProblem `json:"problems"`
Sessions []BackupClimbSession `json:"sessions"`
Attempts []BackupAttempt `json:"attempts"`
DeletedItems []DeletedItem `json:"deletedItems"`
}
type BackupGym struct { type BackupGym struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
@@ -154,6 +172,182 @@ func (s *SyncServer) loadData() (*ClimbDataBackup, error) {
return &backup, nil return &backup, nil
} }
func (s *SyncServer) mergeGyms(existing []BackupGym, updates []BackupGym) []BackupGym {
gymMap := make(map[string]BackupGym)
for _, gym := range existing {
gymMap[gym.ID] = gym
}
for _, gym := range updates {
if existingGym, exists := gymMap[gym.ID]; exists {
// Keep newer version based on updatedAt timestamp
if gym.UpdatedAt >= existingGym.UpdatedAt {
gymMap[gym.ID] = gym
}
} else {
gymMap[gym.ID] = gym
}
}
result := make([]BackupGym, 0, len(gymMap))
for _, gym := range gymMap {
result = append(result, gym)
}
return result
}
func (s *SyncServer) mergeProblems(existing []BackupProblem, updates []BackupProblem) []BackupProblem {
problemMap := make(map[string]BackupProblem)
for _, problem := range existing {
problemMap[problem.ID] = problem
}
for _, problem := range updates {
if existingProblem, exists := problemMap[problem.ID]; exists {
if problem.UpdatedAt >= existingProblem.UpdatedAt {
problemMap[problem.ID] = problem
}
} else {
problemMap[problem.ID] = problem
}
}
result := make([]BackupProblem, 0, len(problemMap))
for _, problem := range problemMap {
result = append(result, problem)
}
return result
}
func (s *SyncServer) mergeSessions(existing []BackupClimbSession, updates []BackupClimbSession) []BackupClimbSession {
sessionMap := make(map[string]BackupClimbSession)
for _, session := range existing {
sessionMap[session.ID] = session
}
for _, session := range updates {
if existingSession, exists := sessionMap[session.ID]; exists {
if session.UpdatedAt >= existingSession.UpdatedAt {
sessionMap[session.ID] = session
}
} else {
sessionMap[session.ID] = session
}
}
result := make([]BackupClimbSession, 0, len(sessionMap))
for _, session := range sessionMap {
result = append(result, session)
}
return result
}
func (s *SyncServer) mergeAttempts(existing []BackupAttempt, updates []BackupAttempt) []BackupAttempt {
attemptMap := make(map[string]BackupAttempt)
for _, attempt := range existing {
attemptMap[attempt.ID] = attempt
}
for _, attempt := range updates {
if existingAttempt, exists := attemptMap[attempt.ID]; exists {
if attempt.CreatedAt >= existingAttempt.CreatedAt {
attemptMap[attempt.ID] = attempt
}
} else {
attemptMap[attempt.ID] = attempt
}
}
result := make([]BackupAttempt, 0, len(attemptMap))
for _, attempt := range attemptMap {
result = append(result, attempt)
}
return result
}
func (s *SyncServer) mergeDeletedItems(existing []DeletedItem, updates []DeletedItem) []DeletedItem {
deletedMap := make(map[string]DeletedItem)
for _, item := range existing {
key := item.Type + ":" + item.ID
deletedMap[key] = item
}
for _, item := range updates {
key := item.Type + ":" + item.ID
if existingItem, exists := deletedMap[key]; exists {
if item.DeletedAt >= existingItem.DeletedAt {
deletedMap[key] = item
}
} else {
deletedMap[key] = item
}
}
// Clean up tombstones older than 30 days to prevent unbounded growth
cutoffTime := time.Now().UTC().Add(-30 * 24 * time.Hour)
result := make([]DeletedItem, 0, len(deletedMap))
for _, item := range deletedMap {
deletedTime, err := time.Parse(time.RFC3339, item.DeletedAt)
if err == nil && deletedTime.Before(cutoffTime) {
log.Printf("Cleaning up old deletion record: type=%s, id=%s, deletedAt=%s",
item.Type, item.ID, item.DeletedAt)
continue
}
result = append(result, item)
}
return result
}
func (s *SyncServer) applyDeletions(backup *ClimbDataBackup, deletedItems []DeletedItem) {
deletedMap := make(map[string]map[string]bool)
for _, item := range deletedItems {
if deletedMap[item.Type] == nil {
deletedMap[item.Type] = make(map[string]bool)
}
deletedMap[item.Type][item.ID] = true
}
if deletedMap["gym"] != nil {
filtered := []BackupGym{}
for _, gym := range backup.Gyms {
if !deletedMap["gym"][gym.ID] {
filtered = append(filtered, gym)
}
}
backup.Gyms = filtered
}
if deletedMap["problem"] != nil {
filtered := []BackupProblem{}
for _, problem := range backup.Problems {
if !deletedMap["problem"][problem.ID] {
filtered = append(filtered, problem)
}
}
backup.Problems = filtered
}
if deletedMap["session"] != nil {
filtered := []BackupClimbSession{}
for _, session := range backup.Sessions {
if !deletedMap["session"][session.ID] {
filtered = append(filtered, session)
}
}
backup.Sessions = filtered
}
if deletedMap["attempt"] != nil {
filtered := []BackupAttempt{}
for _, attempt := range backup.Attempts {
if !deletedMap["attempt"][attempt.ID] {
filtered = append(filtered, attempt)
}
}
backup.Attempts = filtered
}
}
func (s *SyncServer) saveData(backup *ClimbDataBackup) error { func (s *SyncServer) saveData(backup *ClimbDataBackup) error {
backup.ExportedAt = time.Now().UTC().Format(time.RFC3339) backup.ExportedAt = time.Now().UTC().Format(time.RFC3339)
@@ -167,7 +361,6 @@ func (s *SyncServer) saveData(backup *ClimbDataBackup) error {
return err return err
} }
// Ensure images directory exists
if err := os.MkdirAll(s.imagesDir, 0755); err != nil { if err := os.MkdirAll(s.imagesDir, 0755); err != nil {
return err return err
} }
@@ -315,6 +508,143 @@ func (s *SyncServer) handleImageDownload(w http.ResponseWriter, r *http.Request)
w.Write(imageData) w.Write(imageData)
} }
func (s *SyncServer) handleDeltaSync(w http.ResponseWriter, r *http.Request) {
if !s.authenticate(r) {
log.Printf("Unauthorized delta sync attempt from %s", r.RemoteAddr)
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
if r.Method != http.MethodPost {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
var deltaRequest DeltaSyncRequest
if err := json.NewDecoder(r.Body).Decode(&deltaRequest); err != nil {
log.Printf("Invalid JSON from %s: %v", r.RemoteAddr, err)
http.Error(w, "Invalid JSON", http.StatusBadRequest)
return
}
log.Printf("Delta sync from %s: lastSyncTime=%s, gyms=%d, problems=%d, sessions=%d, attempts=%d, deletedItems=%d",
r.RemoteAddr, deltaRequest.LastSyncTime,
len(deltaRequest.Gyms), len(deltaRequest.Problems),
len(deltaRequest.Sessions), len(deltaRequest.Attempts),
len(deltaRequest.DeletedItems))
// Load current server data
serverBackup, err := s.loadData()
if err != nil {
log.Printf("Failed to load data: %v", err)
http.Error(w, "Failed to load data", http.StatusInternalServerError)
return
}
// Merge and apply deletions first to prevent resurrection
serverBackup.DeletedItems = s.mergeDeletedItems(serverBackup.DeletedItems, deltaRequest.DeletedItems)
s.applyDeletions(serverBackup, serverBackup.DeletedItems)
log.Printf("Applied deletions: total=%d deletion records", len(serverBackup.DeletedItems))
// Merge client changes into server data
serverBackup.Gyms = s.mergeGyms(serverBackup.Gyms, deltaRequest.Gyms)
serverBackup.Problems = s.mergeProblems(serverBackup.Problems, deltaRequest.Problems)
serverBackup.Sessions = s.mergeSessions(serverBackup.Sessions, deltaRequest.Sessions)
serverBackup.Attempts = s.mergeAttempts(serverBackup.Attempts, deltaRequest.Attempts)
// Save merged data
if err := s.saveData(serverBackup); err != nil {
log.Printf("Failed to save data: %v", err)
http.Error(w, "Failed to save data", http.StatusInternalServerError)
return
}
// Parse client's last sync time
clientLastSync, err := time.Parse(time.RFC3339, deltaRequest.LastSyncTime)
if err != nil {
clientLastSync = time.Time{}
log.Printf("Warning: Could not parse lastSyncTime '%s', sending all data", deltaRequest.LastSyncTime)
}
// Build deleted item lookup map
deletedItemMap := make(map[string]bool)
for _, item := range serverBackup.DeletedItems {
key := item.Type + ":" + item.ID
deletedItemMap[key] = true
}
// Prepare response with items modified since client's last sync
response := DeltaSyncResponse{
ServerTime: time.Now().UTC().Format(time.RFC3339),
Gyms: []BackupGym{},
Problems: []BackupProblem{},
Sessions: []BackupClimbSession{},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
}
// Filter gyms modified after client's last sync
for _, gym := range serverBackup.Gyms {
if deletedItemMap["gym:"+gym.ID] {
continue
}
gymTime, err := time.Parse(time.RFC3339, gym.UpdatedAt)
if err == nil && gymTime.After(clientLastSync) {
response.Gyms = append(response.Gyms, gym)
}
}
// Filter problems modified after client's last sync
for _, problem := range serverBackup.Problems {
if deletedItemMap["problem:"+problem.ID] {
continue
}
problemTime, err := time.Parse(time.RFC3339, problem.UpdatedAt)
if err == nil && problemTime.After(clientLastSync) {
response.Problems = append(response.Problems, problem)
}
}
// Filter sessions modified after client's last sync
for _, session := range serverBackup.Sessions {
if deletedItemMap["session:"+session.ID] {
continue
}
sessionTime, err := time.Parse(time.RFC3339, session.UpdatedAt)
if err == nil && sessionTime.After(clientLastSync) {
response.Sessions = append(response.Sessions, session)
}
}
// Filter attempts created after client's last sync
for _, attempt := range serverBackup.Attempts {
if deletedItemMap["attempt:"+attempt.ID] {
continue
}
attemptTime, err := time.Parse(time.RFC3339, attempt.CreatedAt)
if err == nil && attemptTime.After(clientLastSync) {
response.Attempts = append(response.Attempts, attempt)
}
}
// Filter deletions after client's last sync
for _, deletedItem := range serverBackup.DeletedItems {
deletedTime, err := time.Parse(time.RFC3339, deletedItem.DeletedAt)
if err == nil && deletedTime.After(clientLastSync) {
response.DeletedItems = append(response.DeletedItems, deletedItem)
}
}
log.Printf("Delta sync response to %s: gyms=%d, problems=%d, sessions=%d, attempts=%d, deletedItems=%d",
r.RemoteAddr,
len(response.Gyms), len(response.Problems),
len(response.Sessions), len(response.Attempts),
len(response.DeletedItems))
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(response)
}
func (s *SyncServer) handleSync(w http.ResponseWriter, r *http.Request) { func (s *SyncServer) handleSync(w http.ResponseWriter, r *http.Request) {
switch r.Method { switch r.Method {
case http.MethodGet: case http.MethodGet:
@@ -354,6 +684,7 @@ func main() {
} }
http.HandleFunc("/sync", server.handleSync) http.HandleFunc("/sync", server.handleSync)
http.HandleFunc("/sync/delta", server.handleDeltaSync)
http.HandleFunc("/health", server.handleHealth) http.HandleFunc("/health", server.handleHealth)
http.HandleFunc("/images/upload", server.handleImageUpload) http.HandleFunc("/images/upload", server.handleImageUpload)
http.HandleFunc("/images/download", server.handleImageDownload) http.HandleFunc("/images/download", server.handleImageDownload)
@@ -362,6 +693,8 @@ func main() {
fmt.Printf("Data file: %s\n", dataFile) fmt.Printf("Data file: %s\n", dataFile)
fmt.Printf("Images directory: %s\n", imagesDir) fmt.Printf("Images directory: %s\n", imagesDir)
fmt.Printf("Health check available at /health\n") fmt.Printf("Health check available at /health\n")
fmt.Printf("Delta sync: POST /sync/delta (incremental sync)\n")
fmt.Printf("Full sync: GET /sync (download all), PUT /sync (upload all)\n")
fmt.Printf("Image upload: POST /images/upload?filename=<name>\n") fmt.Printf("Image upload: POST /images/upload?filename=<name>\n")
fmt.Printf("Image download: GET /images/download?filename=<name>\n") fmt.Printf("Image download: GET /images/download?filename=<name>\n")

501
sync/sync_test.go Normal file
View File

@@ -0,0 +1,501 @@
package main
import (
"path/filepath"
"testing"
"time"
)
// TestDeltaSyncDeletedItemResurrection verifies deleted items don't resurrect
func TestDeltaSyncDeletedItemResurrection(t *testing.T) {
tempDir := t.TempDir()
server := &SyncServer{
dataFile: filepath.Join(tempDir, "test.json"),
imagesDir: filepath.Join(tempDir, "images"),
authToken: "test-token",
}
// Initial state: Server has one gym, one problem, one session with 8 attempts
now := time.Now().UTC()
gymID := "gym-1"
problemID := "problem-1"
sessionID := "session-1"
initialBackup := &ClimbDataBackup{
Version: "2.0",
FormatVersion: "2.0",
Gyms: []BackupGym{
{
ID: gymID,
Name: "Test Gym",
SupportedClimbTypes: []string{"BOULDER"},
DifficultySystems: []string{"V"},
CreatedAt: now.Add(-1 * time.Hour).Format(time.RFC3339),
UpdatedAt: now.Add(-1 * time.Hour).Format(time.RFC3339),
},
},
Problems: []BackupProblem{
{
ID: problemID,
GymID: gymID,
ClimbType: "BOULDER",
Difficulty: DifficultyGrade{
System: "V",
Grade: "V5",
NumericValue: 5,
},
IsActive: true,
CreatedAt: now.Add(-1 * time.Hour).Format(time.RFC3339),
UpdatedAt: now.Add(-1 * time.Hour).Format(time.RFC3339),
},
},
Sessions: []BackupClimbSession{
{
ID: sessionID,
GymID: gymID,
Date: now.Format("2006-01-02"),
Status: "completed",
CreatedAt: now.Add(-30 * time.Minute).Format(time.RFC3339),
UpdatedAt: now.Add(-30 * time.Minute).Format(time.RFC3339),
},
},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
}
// Add 8 attempts
for i := 0; i < 8; i++ {
attempt := BackupAttempt{
ID: "attempt-" + string(rune('1'+i)),
SessionID: sessionID,
ProblemID: problemID,
Result: "COMPLETED",
Timestamp: now.Add(time.Duration(-25+i) * time.Minute).Format(time.RFC3339),
CreatedAt: now.Add(time.Duration(-25+i) * time.Minute).Format(time.RFC3339),
}
initialBackup.Attempts = append(initialBackup.Attempts, attempt)
}
if err := server.saveData(initialBackup); err != nil {
t.Fatalf("Failed to save initial data: %v", err)
}
// Client 1 syncs - gets all data
client1LastSync := now.Add(-2 * time.Hour).Format(time.RFC3339)
deltaRequest1 := DeltaSyncRequest{
LastSyncTime: client1LastSync,
Gyms: []BackupGym{},
Problems: []BackupProblem{},
Sessions: []BackupClimbSession{},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
}
// Simulate delta sync for client 1
serverBackup, _ := server.loadData()
serverBackup.DeletedItems = server.mergeDeletedItems(serverBackup.DeletedItems, deltaRequest1.DeletedItems)
server.applyDeletions(serverBackup, serverBackup.DeletedItems)
if len(serverBackup.Sessions) != 1 {
t.Errorf("Expected 1 session after client1 sync, got %d", len(serverBackup.Sessions))
}
if len(serverBackup.Attempts) != 8 {
t.Errorf("Expected 8 attempts after client1 sync, got %d", len(serverBackup.Attempts))
}
// Client 1 deletes the session locally
deleteTime := now.Format(time.RFC3339)
deletions := []DeletedItem{
{ID: sessionID, Type: "session", DeletedAt: deleteTime},
}
// Also track attempt deletions
for _, attempt := range initialBackup.Attempts {
deletions = append(deletions, DeletedItem{
ID: attempt.ID,
Type: "attempt",
DeletedAt: deleteTime,
})
}
// Client 1 syncs deletion
deltaRequest2 := DeltaSyncRequest{
LastSyncTime: now.Add(-5 * time.Minute).Format(time.RFC3339),
Gyms: []BackupGym{},
Problems: []BackupProblem{},
Sessions: []BackupClimbSession{},
Attempts: []BackupAttempt{},
DeletedItems: deletions,
}
// Server processes deletion
serverBackup, _ = server.loadData()
serverBackup.DeletedItems = server.mergeDeletedItems(serverBackup.DeletedItems, deltaRequest2.DeletedItems)
server.applyDeletions(serverBackup, serverBackup.DeletedItems)
server.saveData(serverBackup)
// Verify deletions were applied on server
serverBackup, _ = server.loadData()
if len(serverBackup.Sessions) != 0 {
t.Errorf("Expected 0 sessions after deletion, got %d", len(serverBackup.Sessions))
}
if len(serverBackup.Attempts) != 0 {
t.Errorf("Expected 0 attempts after deletion, got %d", len(serverBackup.Attempts))
}
if len(serverBackup.DeletedItems) != 9 {
t.Errorf("Expected 9 deletion records, got %d", len(serverBackup.DeletedItems))
}
// Client does local reset and pulls from server
deltaRequest3 := DeltaSyncRequest{
LastSyncTime: time.Time{}.Format(time.RFC3339),
Gyms: []BackupGym{},
Problems: []BackupProblem{},
Sessions: []BackupClimbSession{},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
}
serverBackup, _ = server.loadData()
clientLastSync, _ := time.Parse(time.RFC3339, deltaRequest3.LastSyncTime)
// Build response
response := DeltaSyncResponse{
ServerTime: time.Now().UTC().Format(time.RFC3339),
Gyms: []BackupGym{},
Problems: []BackupProblem{},
Sessions: []BackupClimbSession{},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
}
// Build deleted item map
deletedItemMap := make(map[string]bool)
for _, item := range serverBackup.DeletedItems {
key := item.Type + ":" + item.ID
deletedItemMap[key] = true
}
// Filter sessions (excluding deleted)
for _, session := range serverBackup.Sessions {
if deletedItemMap["session:"+session.ID] {
continue
}
sessionTime, _ := time.Parse(time.RFC3339, session.UpdatedAt)
if sessionTime.After(clientLastSync) {
response.Sessions = append(response.Sessions, session)
}
}
// Filter attempts (excluding deleted)
for _, attempt := range serverBackup.Attempts {
if deletedItemMap["attempt:"+attempt.ID] {
continue
}
attemptTime, _ := time.Parse(time.RFC3339, attempt.CreatedAt)
if attemptTime.After(clientLastSync) {
response.Attempts = append(response.Attempts, attempt)
}
}
// Send deletion records
for _, deletion := range serverBackup.DeletedItems {
deletionTime, _ := time.Parse(time.RFC3339, deletion.DeletedAt)
if deletionTime.After(clientLastSync) {
response.DeletedItems = append(response.DeletedItems, deletion)
}
}
if len(response.Sessions) != 0 {
t.Errorf("Deleted session was resurrected! Got %d sessions in response", len(response.Sessions))
}
if len(response.Attempts) != 0 {
t.Errorf("Deleted attempts were resurrected! Got %d attempts in response", len(response.Attempts))
}
if len(response.DeletedItems) < 9 {
t.Errorf("Expected at least 9 deletion records in response, got %d", len(response.DeletedItems))
}
}
// TestDeltaSyncAttemptCount verifies all attempts are preserved
func TestDeltaSyncAttemptCount(t *testing.T) {
tempDir := t.TempDir()
server := &SyncServer{
dataFile: filepath.Join(tempDir, "test.json"),
imagesDir: filepath.Join(tempDir, "images"),
authToken: "test-token",
}
now := time.Now().UTC()
gymID := "gym-1"
problemID := "problem-1"
sessionID := "session-1"
// Create session with 8 attempts
initialBackup := &ClimbDataBackup{
Version: "2.0",
FormatVersion: "2.0",
Gyms: []BackupGym{{ID: gymID, Name: "Test Gym", SupportedClimbTypes: []string{"BOULDER"}, DifficultySystems: []string{"V"}, CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Problems: []BackupProblem{{ID: problemID, GymID: gymID, ClimbType: "BOULDER", Difficulty: DifficultyGrade{System: "V", Grade: "V5", NumericValue: 5}, IsActive: true, CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Sessions: []BackupClimbSession{{ID: sessionID, GymID: gymID, Date: now.Format("2006-01-02"), Status: "completed", CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
}
// Add 8 attempts at different times
baseTime := now.Add(-30 * time.Minute)
for i := 0; i < 8; i++ {
attempt := BackupAttempt{
ID: "attempt-" + string(rune('1'+i)),
SessionID: sessionID,
ProblemID: problemID,
Result: "COMPLETED",
Timestamp: baseTime.Add(time.Duration(i) * time.Minute).Format(time.RFC3339),
CreatedAt: baseTime.Add(time.Duration(i) * time.Minute).Format(time.RFC3339),
}
initialBackup.Attempts = append(initialBackup.Attempts, attempt)
}
if err := server.saveData(initialBackup); err != nil {
t.Fatalf("Failed to save initial data: %v", err)
}
// Client syncs with lastSyncTime BEFORE all attempts were created
clientLastSync := baseTime.Add(-1 * time.Hour)
serverBackup, _ := server.loadData()
// Count attempts that should be returned
attemptCount := 0
for _, attempt := range serverBackup.Attempts {
attemptTime, _ := time.Parse(time.RFC3339, attempt.CreatedAt)
if attemptTime.After(clientLastSync) {
attemptCount++
}
}
if attemptCount != 8 {
t.Errorf("Expected all 8 attempts to be returned, got %d", attemptCount)
}
}
// TestTombstoneCleanup verifies old deletion records are cleaned up
func TestTombstoneCleanup(t *testing.T) {
server := &SyncServer{}
now := time.Now().UTC()
oldDeletion := DeletedItem{
ID: "old-item",
Type: "session",
DeletedAt: now.Add(-31 * 24 * time.Hour).Format(time.RFC3339), // 31 days old
}
recentDeletion := DeletedItem{
ID: "recent-item",
Type: "session",
DeletedAt: now.Add(-1 * 24 * time.Hour).Format(time.RFC3339), // 1 day old
}
existing := []DeletedItem{oldDeletion}
updates := []DeletedItem{recentDeletion}
merged := server.mergeDeletedItems(existing, updates)
// Old deletion should be cleaned up, only recent one remains
if len(merged) != 1 {
t.Errorf("Expected 1 deletion record after cleanup, got %d", len(merged))
}
if len(merged) > 0 && merged[0].ID != "recent-item" {
t.Errorf("Expected recent deletion to remain, got %s", merged[0].ID)
}
}
// TestMergeDeletedItemsDeduplication verifies duplicate deletions are handled
func TestMergeDeletedItemsDeduplication(t *testing.T) {
server := &SyncServer{}
now := time.Now().UTC()
deletion1 := DeletedItem{
ID: "item-1",
Type: "session",
DeletedAt: now.Add(-1 * time.Hour).Format(time.RFC3339),
}
deletion2 := DeletedItem{
ID: "item-1",
Type: "session",
DeletedAt: now.Format(time.RFC3339), // Newer timestamp
}
existing := []DeletedItem{deletion1}
updates := []DeletedItem{deletion2}
merged := server.mergeDeletedItems(existing, updates)
if len(merged) != 1 {
t.Errorf("Expected 1 deletion record, got %d", len(merged))
}
if len(merged) > 0 && merged[0].DeletedAt != deletion2.DeletedAt {
t.Errorf("Expected newer deletion timestamp to be kept")
}
}
// TestApplyDeletions verifies deletions are applied correctly
func TestApplyDeletions(t *testing.T) {
server := &SyncServer{}
now := time.Now().UTC()
backup := &ClimbDataBackup{
Version: "2.0",
FormatVersion: "2.0",
Gyms: []BackupGym{{ID: "gym-1", Name: "Test Gym", SupportedClimbTypes: []string{}, DifficultySystems: []string{}, CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Problems: []BackupProblem{{ID: "problem-1", GymID: "gym-1", ClimbType: "BOULDER", Difficulty: DifficultyGrade{System: "V", Grade: "V5", NumericValue: 5}, IsActive: true, CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Sessions: []BackupClimbSession{{ID: "session-1", GymID: "gym-1", Date: now.Format("2006-01-02"), Status: "completed", CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Attempts: []BackupAttempt{{ID: "attempt-1", SessionID: "session-1", ProblemID: "problem-1", Result: "COMPLETED", Timestamp: now.Format(time.RFC3339), CreatedAt: now.Format(time.RFC3339)}},
DeletedItems: []DeletedItem{},
}
deletions := []DeletedItem{
{ID: "session-1", Type: "session", DeletedAt: now.Format(time.RFC3339)},
{ID: "attempt-1", Type: "attempt", DeletedAt: now.Format(time.RFC3339)},
}
server.applyDeletions(backup, deletions)
if len(backup.Sessions) != 0 {
t.Errorf("Expected 0 sessions after deletion, got %d", len(backup.Sessions))
}
if len(backup.Attempts) != 0 {
t.Errorf("Expected 0 attempts after deletion, got %d", len(backup.Attempts))
}
if len(backup.Gyms) != 1 {
t.Errorf("Expected gym to remain, got %d gyms", len(backup.Gyms))
}
if len(backup.Problems) != 1 {
t.Errorf("Expected problem to remain, got %d problems", len(backup.Problems))
}
}
// TestCascadingDeletions verifies related items are handled properly
func TestCascadingDeletions(t *testing.T) {
server := &SyncServer{}
now := time.Now().UTC()
sessionID := "session-1"
backup := &ClimbDataBackup{
Version: "2.0",
FormatVersion: "2.0",
Gyms: []BackupGym{{ID: "gym-1", Name: "Test Gym", SupportedClimbTypes: []string{}, DifficultySystems: []string{}, CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Problems: []BackupProblem{{ID: "problem-1", GymID: "gym-1", ClimbType: "BOULDER", Difficulty: DifficultyGrade{System: "V", Grade: "V5", NumericValue: 5}, IsActive: true, CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Sessions: []BackupClimbSession{{ID: sessionID, GymID: "gym-1", Date: now.Format("2006-01-02"), Status: "completed", CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
}
// Add multiple attempts for the session
for i := 0; i < 5; i++ {
backup.Attempts = append(backup.Attempts, BackupAttempt{
ID: "attempt-" + string(rune('1'+i)),
SessionID: sessionID,
ProblemID: "problem-1",
Result: "COMPLETED",
Timestamp: now.Format(time.RFC3339),
CreatedAt: now.Format(time.RFC3339),
})
}
// Delete session - attempts should also be tracked as deleted
deletions := []DeletedItem{
{ID: sessionID, Type: "session", DeletedAt: now.Format(time.RFC3339)},
}
for _, attempt := range backup.Attempts {
deletions = append(deletions, DeletedItem{
ID: attempt.ID,
Type: "attempt",
DeletedAt: now.Format(time.RFC3339),
})
}
server.applyDeletions(backup, deletions)
if len(backup.Sessions) != 0 {
t.Errorf("Expected session to be deleted, got %d sessions", len(backup.Sessions))
}
if len(backup.Attempts) != 0 {
t.Errorf("Expected all attempts to be deleted, got %d attempts", len(backup.Attempts))
}
}
// TestFullSyncAfterReset verifies the reported user scenario
func TestFullSyncAfterReset(t *testing.T) {
tempDir := t.TempDir()
server := &SyncServer{
dataFile: filepath.Join(tempDir, "test.json"),
imagesDir: filepath.Join(tempDir, "images"),
authToken: "test-token",
}
now := time.Now().UTC()
// Initial sync with data
initialData := &ClimbDataBackup{
Version: "2.0",
FormatVersion: "2.0",
Gyms: []BackupGym{{ID: "gym-1", Name: "Test Gym", SupportedClimbTypes: []string{"BOULDER"}, DifficultySystems: []string{"V"}, CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Problems: []BackupProblem{{ID: "problem-1", GymID: "gym-1", ClimbType: "BOULDER", Difficulty: DifficultyGrade{System: "V", Grade: "V5", NumericValue: 5}, IsActive: true, CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Sessions: []BackupClimbSession{{ID: "session-1", GymID: "gym-1", Date: now.Format("2006-01-02"), Status: "completed", CreatedAt: now.Format(time.RFC3339), UpdatedAt: now.Format(time.RFC3339)}},
Attempts: []BackupAttempt{},
DeletedItems: []DeletedItem{},
}
for i := 0; i < 8; i++ {
initialData.Attempts = append(initialData.Attempts, BackupAttempt{
ID: "attempt-" + string(rune('1'+i)),
SessionID: "session-1",
ProblemID: "problem-1",
Result: "COMPLETED",
Timestamp: now.Add(time.Duration(i) * time.Minute).Format(time.RFC3339),
CreatedAt: now.Add(time.Duration(i) * time.Minute).Format(time.RFC3339),
})
}
server.saveData(initialData)
// Client deletes everything and syncs
deletions := []DeletedItem{
{ID: "gym-1", Type: "gym", DeletedAt: now.Add(10 * time.Minute).Format(time.RFC3339)},
{ID: "problem-1", Type: "problem", DeletedAt: now.Add(10 * time.Minute).Format(time.RFC3339)},
{ID: "session-1", Type: "session", DeletedAt: now.Add(10 * time.Minute).Format(time.RFC3339)},
}
for i := 0; i < 8; i++ {
deletions = append(deletions, DeletedItem{
ID: "attempt-" + string(rune('1'+i)),
Type: "attempt",
DeletedAt: now.Add(10 * time.Minute).Format(time.RFC3339),
})
}
serverBackup, _ := server.loadData()
serverBackup.DeletedItems = server.mergeDeletedItems(serverBackup.DeletedItems, deletions)
server.applyDeletions(serverBackup, serverBackup.DeletedItems)
server.saveData(serverBackup)
// Client does local reset and pulls from server
serverBackup, _ = server.loadData()
if len(serverBackup.Gyms) != 0 {
t.Errorf("Expected 0 gyms, got %d", len(serverBackup.Gyms))
}
if len(serverBackup.Problems) != 0 {
t.Errorf("Expected 0 problems, got %d", len(serverBackup.Problems))
}
if len(serverBackup.Sessions) != 0 {
t.Errorf("Expected 0 sessions, got %d", len(serverBackup.Sessions))
}
if len(serverBackup.Attempts) != 0 {
t.Errorf("Expected 0 attempts, got %d", len(serverBackup.Attempts))
}
if len(serverBackup.DeletedItems) == 0 {
t.Errorf("Expected deletion records, got 0")
}
}