1187 lines
47 KiB
Swift
1187 lines
47 KiB
Swift
import Combine
|
|
import Foundation
|
|
|
|
class ServerSyncProvider: SyncProvider {
|
|
var type: SyncProviderType { .server }
|
|
|
|
private let userDefaults = UserDefaults.standard
|
|
private let logTag = "ServerSyncProvider"
|
|
|
|
private enum Keys {
|
|
static let serverURL = "sync_server_url"
|
|
static let authToken = "sync_auth_token"
|
|
static let lastSyncTime = "last_sync_time"
|
|
static let isConnected = "is_connected"
|
|
}
|
|
|
|
var serverURL: String {
|
|
get { userDefaults.string(forKey: Keys.serverURL) ?? "" }
|
|
set { userDefaults.set(newValue, forKey: Keys.serverURL) }
|
|
}
|
|
|
|
var authToken: String {
|
|
get { userDefaults.string(forKey: Keys.authToken) ?? "" }
|
|
set { userDefaults.set(newValue, forKey: Keys.authToken) }
|
|
}
|
|
|
|
var isConfigured: Bool {
|
|
return !serverURL.isEmpty && !authToken.isEmpty
|
|
}
|
|
|
|
var isConnected: Bool {
|
|
get { userDefaults.bool(forKey: Keys.isConnected) }
|
|
set { userDefaults.set(newValue, forKey: Keys.isConnected) }
|
|
}
|
|
|
|
var lastSyncTime: Date? {
|
|
get { userDefaults.object(forKey: Keys.lastSyncTime) as? Date }
|
|
set { userDefaults.set(newValue, forKey: Keys.lastSyncTime) }
|
|
}
|
|
|
|
func disconnect() {
|
|
isConnected = false
|
|
lastSyncTime = nil
|
|
userDefaults.removeObject(forKey: Keys.lastSyncTime)
|
|
userDefaults.set(false, forKey: Keys.isConnected)
|
|
}
|
|
|
|
func testConnection() async throws {
|
|
guard isConfigured else {
|
|
throw SyncError.notConfigured
|
|
}
|
|
|
|
guard let url = URL(string: "\(serverURL)/health") else {
|
|
throw SyncError.invalidURL
|
|
}
|
|
|
|
var request = URLRequest(url: url)
|
|
request.httpMethod = "GET"
|
|
request.setValue("application/json", forHTTPHeaderField: "Accept")
|
|
request.timeoutInterval = 10
|
|
|
|
let (_, response) = try await URLSession.shared.data(for: request)
|
|
|
|
guard let httpResponse = response as? HTTPURLResponse else {
|
|
throw SyncError.invalidResponse
|
|
}
|
|
|
|
guard httpResponse.statusCode == 200 else {
|
|
throw SyncError.serverError(httpResponse.statusCode)
|
|
}
|
|
|
|
// Connection successful, mark as connected
|
|
isConnected = true
|
|
}
|
|
|
|
func sync(dataManager: ClimbingDataManager) async throws {
|
|
guard isConfigured else {
|
|
throw SyncError.notConfigured
|
|
}
|
|
|
|
guard isConnected else {
|
|
throw SyncError.notConnected
|
|
}
|
|
|
|
// Get local backup data
|
|
let localBackup = createBackupFromDataManager(dataManager)
|
|
|
|
// Download server data
|
|
let serverBackup = try await downloadData()
|
|
|
|
// Check if we have any local data
|
|
let hasLocalData =
|
|
!dataManager.gyms.isEmpty || !dataManager.problems.isEmpty
|
|
|| !dataManager.sessions.isEmpty || !dataManager.attempts.isEmpty
|
|
|
|
let hasServerData =
|
|
!serverBackup.gyms.isEmpty || !serverBackup.problems.isEmpty
|
|
|| !serverBackup.sessions.isEmpty || !serverBackup.attempts.isEmpty
|
|
|
|
// If both client and server have been synced before, use delta sync
|
|
if hasLocalData && hasServerData && lastSyncTime != nil {
|
|
AppLogger.info("Using delta sync for incremental updates", tag: logTag)
|
|
try await performDeltaSync(dataManager: dataManager)
|
|
|
|
// Update last sync time
|
|
lastSyncTime = Date()
|
|
return
|
|
}
|
|
|
|
if !hasLocalData && hasServerData {
|
|
AppLogger.info("Performing full restore from server", tag: logTag)
|
|
AppLogger.info("Syncing images from server first...", tag: logTag)
|
|
let imagePathMapping = try await syncImagesFromServer(
|
|
backup: serverBackup, dataManager: dataManager)
|
|
AppLogger.info("Importing data after images...", tag: logTag)
|
|
try importBackupToDataManager(
|
|
serverBackup, dataManager: dataManager, imagePathMapping: imagePathMapping)
|
|
AppLogger.info("Full restore completed", tag: logTag)
|
|
} else if hasLocalData && !hasServerData {
|
|
AppLogger.info("Uploading local data to server", tag: logTag)
|
|
let currentBackup = createBackupFromDataManager(dataManager)
|
|
_ = try await uploadData(currentBackup)
|
|
AppLogger.info("Uploading local images to server...", tag: logTag)
|
|
try await syncImagesToServer(dataManager: dataManager)
|
|
AppLogger.info("Initial upload completed", tag: logTag)
|
|
} else if hasLocalData && hasServerData {
|
|
AppLogger.info("Merging local and server data", tag: logTag)
|
|
try await mergeDataSafely(
|
|
localBackup: localBackup,
|
|
serverBackup: serverBackup,
|
|
dataManager: dataManager)
|
|
AppLogger.info("Safe merge completed", tag: logTag)
|
|
} else {
|
|
AppLogger.info("No data to sync", tag: logTag)
|
|
}
|
|
|
|
// Update last sync time
|
|
lastSyncTime = Date()
|
|
}
|
|
|
|
// MARK: - Private Helpers
|
|
|
|
private func downloadData() async throws -> ClimbDataBackup {
|
|
guard let url = URL(string: "\(serverURL)/sync") else {
|
|
throw SyncError.invalidURL
|
|
}
|
|
|
|
var request = URLRequest(url: url)
|
|
request.httpMethod = "GET"
|
|
request.setValue("Bearer \(authToken)", forHTTPHeaderField: "Authorization")
|
|
request.setValue("application/json", forHTTPHeaderField: "Accept")
|
|
|
|
let (data, response) = try await URLSession.shared.data(for: request)
|
|
|
|
guard let httpResponse = response as? HTTPURLResponse else {
|
|
throw SyncError.invalidResponse
|
|
}
|
|
|
|
switch httpResponse.statusCode {
|
|
case 200:
|
|
break
|
|
case 401:
|
|
throw SyncError.unauthorized
|
|
default:
|
|
throw SyncError.serverError(httpResponse.statusCode)
|
|
}
|
|
|
|
do {
|
|
let backup = try JSONDecoder().decode(ClimbDataBackup.self, from: data)
|
|
return backup
|
|
} catch {
|
|
throw SyncError.decodingError(error)
|
|
}
|
|
}
|
|
|
|
private func uploadData(_ backup: ClimbDataBackup) async throws -> ClimbDataBackup {
|
|
guard let url = URL(string: "\(serverURL)/sync") else {
|
|
throw SyncError.invalidURL
|
|
}
|
|
|
|
let encoder = JSONEncoder()
|
|
encoder.dateEncodingStrategy = .iso8601
|
|
|
|
let jsonData = try encoder.encode(backup)
|
|
|
|
var request = URLRequest(url: url)
|
|
request.httpMethod = "PUT"
|
|
request.setValue("Bearer \(authToken)", forHTTPHeaderField: "Authorization")
|
|
request.setValue("application/json", forHTTPHeaderField: "Content-Type")
|
|
request.setValue("application/json", forHTTPHeaderField: "Accept")
|
|
request.httpBody = jsonData
|
|
|
|
let (data, response) = try await URLSession.shared.data(for: request)
|
|
|
|
guard let httpResponse = response as? HTTPURLResponse else {
|
|
throw SyncError.invalidResponse
|
|
}
|
|
|
|
switch httpResponse.statusCode {
|
|
case 200:
|
|
break
|
|
case 401:
|
|
throw SyncError.unauthorized
|
|
case 400:
|
|
throw SyncError.badRequest
|
|
default:
|
|
throw SyncError.serverError(httpResponse.statusCode)
|
|
}
|
|
|
|
do {
|
|
let responseBackup = try JSONDecoder().decode(ClimbDataBackup.self, from: data)
|
|
return responseBackup
|
|
} catch {
|
|
throw SyncError.decodingError(error)
|
|
}
|
|
}
|
|
|
|
private func performDeltaSync(dataManager: ClimbingDataManager) async throws {
|
|
guard let url = URL(string: "\(serverURL)/sync/delta") else {
|
|
throw SyncError.invalidURL
|
|
}
|
|
|
|
// Get last sync time, or use epoch if never synced
|
|
let lastSync = lastSyncTime ?? Date(timeIntervalSince1970: 0)
|
|
let formatter = ISO8601DateFormatter()
|
|
let lastSyncString = formatter.string(from: lastSync)
|
|
|
|
// Collect items modified since last sync
|
|
let modifiedGyms = dataManager.gyms.filter { gym in
|
|
gym.updatedAt > lastSync
|
|
}.map { BackupGym(from: $0) }
|
|
|
|
let modifiedProblems = dataManager.problems.filter { problem in
|
|
problem.updatedAt > lastSync
|
|
}.map { problem -> BackupProblem in
|
|
let backupProblem = BackupProblem(from: problem)
|
|
if !problem.imagePaths.isEmpty {
|
|
let normalizedPaths = problem.imagePaths.indices.map { index in
|
|
ImageNamingUtils.generateImageFilename(
|
|
problemId: problem.id.uuidString, imageIndex: index)
|
|
}
|
|
return BackupProblem(
|
|
id: backupProblem.id,
|
|
gymId: backupProblem.gymId,
|
|
name: backupProblem.name,
|
|
description: backupProblem.description,
|
|
climbType: backupProblem.climbType,
|
|
difficulty: backupProblem.difficulty,
|
|
tags: backupProblem.tags,
|
|
location: backupProblem.location,
|
|
imagePaths: normalizedPaths,
|
|
isActive: backupProblem.isActive,
|
|
dateSet: backupProblem.dateSet,
|
|
notes: backupProblem.notes,
|
|
createdAt: backupProblem.createdAt,
|
|
updatedAt: backupProblem.updatedAt
|
|
)
|
|
}
|
|
return backupProblem
|
|
}
|
|
|
|
let modifiedSessions = dataManager.sessions.filter { session in
|
|
session.status != .active && session.updatedAt > lastSync
|
|
}.map { BackupClimbSession(from: $0) }
|
|
|
|
let activeSessionIds = Set(
|
|
dataManager.sessions.filter { $0.status == .active }.map { $0.id })
|
|
let modifiedAttempts = dataManager.attempts.filter { attempt in
|
|
!activeSessionIds.contains(attempt.sessionId) && attempt.createdAt > lastSync
|
|
}.map { BackupAttempt(from: $0) }
|
|
|
|
let modifiedDeletions = dataManager.getDeletedItems().filter { item in
|
|
if let deletedDate = formatter.date(from: item.deletedAt) {
|
|
return deletedDate > lastSync
|
|
}
|
|
return false
|
|
}
|
|
|
|
AppLogger.info(
|
|
"Delta Sync: Sending gyms=\(modifiedGyms.count), problems=\(modifiedProblems.count), sessions=\(modifiedSessions.count), attempts=\(modifiedAttempts.count), deletions=\(modifiedDeletions.count)",
|
|
tag: logTag
|
|
)
|
|
|
|
// Create delta request
|
|
let deltaRequest = DeltaSyncRequest(
|
|
lastSyncTime: lastSyncString,
|
|
gyms: modifiedGyms,
|
|
problems: modifiedProblems,
|
|
sessions: modifiedSessions,
|
|
attempts: modifiedAttempts,
|
|
deletedItems: modifiedDeletions
|
|
)
|
|
|
|
let encoder = JSONEncoder()
|
|
encoder.dateEncodingStrategy = .iso8601
|
|
let jsonData = try encoder.encode(deltaRequest)
|
|
|
|
var request = URLRequest(url: url)
|
|
request.httpMethod = "POST"
|
|
request.setValue("Bearer \(authToken)", forHTTPHeaderField: "Authorization")
|
|
request.setValue("application/json", forHTTPHeaderField: "Content-Type")
|
|
request.setValue("application/json", forHTTPHeaderField: "Accept")
|
|
request.httpBody = jsonData
|
|
|
|
let (data, response) = try await URLSession.shared.data(for: request)
|
|
|
|
guard let httpResponse = response as? HTTPURLResponse else {
|
|
throw SyncError.invalidResponse
|
|
}
|
|
|
|
switch httpResponse.statusCode {
|
|
case 200:
|
|
break
|
|
case 401:
|
|
throw SyncError.unauthorized
|
|
default:
|
|
throw SyncError.serverError(httpResponse.statusCode)
|
|
}
|
|
|
|
let decoder = JSONDecoder()
|
|
let deltaResponse = try decoder.decode(DeltaSyncResponse.self, from: data)
|
|
|
|
AppLogger.info(
|
|
"Delta Sync: Received gyms=\(deltaResponse.gyms.count), problems=\(deltaResponse.problems.count), sessions=\(deltaResponse.sessions.count), attempts=\(deltaResponse.attempts.count), deletions=\(deltaResponse.deletedItems.count)",
|
|
tag: logTag
|
|
)
|
|
|
|
// Apply server changes to local data
|
|
try await applyDeltaResponse(deltaResponse, dataManager: dataManager)
|
|
|
|
// Sync only modified problem images
|
|
try await syncModifiedImages(modifiedProblems: modifiedProblems, dataManager: dataManager)
|
|
|
|
// Update last sync time to server time
|
|
if let serverTime = formatter.date(from: deltaResponse.serverTime) {
|
|
lastSyncTime = serverTime
|
|
}
|
|
}
|
|
|
|
private func applyDeltaResponse(_ response: DeltaSyncResponse, dataManager: ClimbingDataManager) async throws {
|
|
// Use SyncMerger logic but adapted for DeltaSyncResponse
|
|
// Since SyncMerger works with ClimbDataBackup, we might need to adapt or just do it here since it's specific to DeltaSync
|
|
|
|
// Actually, DeltaSyncResponse is very similar to ClimbDataBackup but with serverTime
|
|
// Let's construct a pseudo-backup for merging or just use the logic here since it handles image downloads too
|
|
|
|
let formatter = ISO8601DateFormatter()
|
|
|
|
// Merge and apply deletions first to prevent resurrection
|
|
let allDeletions = dataManager.getDeletedItems() + response.deletedItems
|
|
let uniqueDeletions = Array(Set(allDeletions))
|
|
|
|
AppLogger.info(
|
|
"Delta Sync: Applying \(uniqueDeletions.count) deletion records before merging data",
|
|
tag: logTag
|
|
)
|
|
applyDeletionsToDataManager(deletions: uniqueDeletions, dataManager: dataManager)
|
|
|
|
// Build deleted item lookup map
|
|
let deletedItemSet = Set(uniqueDeletions.map { $0.type + ":" + $0.id })
|
|
|
|
// Download images for new/modified problems from server
|
|
var imagePathMapping: [String: String] = [:]
|
|
for problem in response.problems {
|
|
if deletedItemSet.contains("problem:" + problem.id) {
|
|
continue
|
|
}
|
|
|
|
guard let imagePaths = problem.imagePaths, !imagePaths.isEmpty else { continue }
|
|
|
|
for (index, imagePath) in imagePaths.enumerated() {
|
|
let serverFilename = URL(fileURLWithPath: imagePath).lastPathComponent
|
|
|
|
do {
|
|
let imageData = try await downloadImage(filename: serverFilename)
|
|
let consistentFilename = ImageNamingUtils.generateImageFilename(
|
|
problemId: problem.id, imageIndex: index)
|
|
let imageManager = ImageManager.shared
|
|
_ = try imageManager.saveImportedImage(imageData, filename: consistentFilename)
|
|
imagePathMapping[serverFilename] = consistentFilename
|
|
} catch SyncError.imageNotFound {
|
|
AppLogger.info("Image not found on server: \(serverFilename)", tag: logTag)
|
|
continue
|
|
} catch {
|
|
AppLogger.info("Failed to download image \(serverFilename): \(error)", tag: logTag)
|
|
continue
|
|
}
|
|
}
|
|
}
|
|
|
|
// Now we can use SyncMerger logic if we convert response to Backup format
|
|
// But SyncMerger.mergeDataSafely does a full merge. Here we are doing delta merge.
|
|
// The logic in SyncService.swift for applyDeltaResponse was:
|
|
// 1. Download images
|
|
// 2. Merge gyms (check timestamps)
|
|
// 3. Merge problems (check timestamps)
|
|
// ...
|
|
|
|
// This logic is slightly different from full merge because it checks timestamps against existing items specifically for delta.
|
|
// Full merge also checks timestamps but assumes full dataset.
|
|
// Let's keep the logic here for now as it is specific to the Delta Sync protocol.
|
|
|
|
// Merge gyms
|
|
for backupGym in response.gyms {
|
|
if deletedItemSet.contains("gym:" + backupGym.id) {
|
|
continue
|
|
}
|
|
|
|
if let index = dataManager.gyms.firstIndex(where: { $0.id.uuidString == backupGym.id })
|
|
{
|
|
let existing = dataManager.gyms[index]
|
|
if backupGym.updatedAt >= formatter.string(from: existing.updatedAt) {
|
|
dataManager.gyms[index] = try backupGym.toGym()
|
|
}
|
|
} else {
|
|
dataManager.gyms.append(try backupGym.toGym())
|
|
}
|
|
}
|
|
|
|
// Merge problems
|
|
for backupProblem in response.problems {
|
|
if deletedItemSet.contains("problem:" + backupProblem.id) {
|
|
continue
|
|
}
|
|
|
|
var problemToMerge = backupProblem
|
|
if !imagePathMapping.isEmpty, let imagePaths = backupProblem.imagePaths {
|
|
let updatedPaths = imagePaths.compactMap { imagePathMapping[$0] ?? $0 }
|
|
problemToMerge = BackupProblem(
|
|
id: backupProblem.id,
|
|
gymId: backupProblem.gymId,
|
|
name: backupProblem.name,
|
|
description: backupProblem.description,
|
|
climbType: backupProblem.climbType,
|
|
difficulty: backupProblem.difficulty,
|
|
tags: backupProblem.tags,
|
|
location: backupProblem.location,
|
|
imagePaths: updatedPaths,
|
|
isActive: backupProblem.isActive,
|
|
dateSet: backupProblem.dateSet,
|
|
notes: backupProblem.notes,
|
|
createdAt: backupProblem.createdAt,
|
|
updatedAt: backupProblem.updatedAt
|
|
)
|
|
}
|
|
|
|
if let index = dataManager.problems.firstIndex(where: {
|
|
$0.id.uuidString == problemToMerge.id
|
|
}) {
|
|
let existing = dataManager.problems[index]
|
|
if problemToMerge.updatedAt >= formatter.string(from: existing.updatedAt) {
|
|
dataManager.problems[index] = try problemToMerge.toProblem()
|
|
}
|
|
} else {
|
|
dataManager.problems.append(try problemToMerge.toProblem())
|
|
}
|
|
}
|
|
|
|
// Merge sessions
|
|
for backupSession in response.sessions {
|
|
if deletedItemSet.contains("session:" + backupSession.id) {
|
|
continue
|
|
}
|
|
|
|
if let index = dataManager.sessions.firstIndex(where: {
|
|
$0.id.uuidString == backupSession.id
|
|
}) {
|
|
let existing = dataManager.sessions[index]
|
|
if backupSession.updatedAt >= formatter.string(from: existing.updatedAt) {
|
|
dataManager.sessions[index] = try backupSession.toClimbSession()
|
|
}
|
|
} else {
|
|
dataManager.sessions.append(try backupSession.toClimbSession())
|
|
}
|
|
}
|
|
|
|
// Merge attempts
|
|
for backupAttempt in response.attempts {
|
|
if deletedItemSet.contains("attempt:" + backupAttempt.id) {
|
|
continue
|
|
}
|
|
|
|
if let index = dataManager.attempts.firstIndex(where: {
|
|
$0.id.uuidString == backupAttempt.id
|
|
}) {
|
|
let existing = dataManager.attempts[index]
|
|
if backupAttempt.createdAt >= formatter.string(from: existing.createdAt) {
|
|
dataManager.attempts[index] = try backupAttempt.toAttempt()
|
|
}
|
|
} else {
|
|
dataManager.attempts.append(try backupAttempt.toAttempt())
|
|
}
|
|
}
|
|
|
|
// Apply deletions again for safety
|
|
applyDeletionsToDataManager(deletions: uniqueDeletions, dataManager: dataManager)
|
|
|
|
// Save all changes
|
|
dataManager.saveGyms()
|
|
dataManager.saveProblems()
|
|
dataManager.saveSessions()
|
|
dataManager.saveAttempts()
|
|
|
|
// Update deletion records
|
|
dataManager.clearDeletedItems()
|
|
if let data = try? JSONEncoder().encode(uniqueDeletions) {
|
|
UserDefaults.standard.set(data, forKey: "ascently_deleted_items")
|
|
}
|
|
|
|
DataStateManager.shared.updateDataState()
|
|
}
|
|
|
|
private func applyDeletionsToDataManager(
|
|
deletions: [DeletedItem], dataManager: ClimbingDataManager
|
|
) {
|
|
let deletedGymIds = Set(deletions.filter { $0.type == "gym" }.map { $0.id })
|
|
let deletedProblemIds = Set(deletions.filter { $0.type == "problem" }.map { $0.id })
|
|
let deletedSessionIds = Set(deletions.filter { $0.type == "session" }.map { $0.id })
|
|
let deletedAttemptIds = Set(deletions.filter { $0.type == "attempt" }.map { $0.id })
|
|
|
|
dataManager.gyms.removeAll { deletedGymIds.contains($0.id.uuidString) }
|
|
dataManager.problems.removeAll { deletedProblemIds.contains($0.id.uuidString) }
|
|
dataManager.sessions.removeAll { deletedSessionIds.contains($0.id.uuidString) }
|
|
dataManager.attempts.removeAll { deletedAttemptIds.contains($0.id.uuidString) }
|
|
}
|
|
|
|
private func syncModifiedImages(
|
|
modifiedProblems: [BackupProblem], dataManager: ClimbingDataManager
|
|
) async throws {
|
|
guard !modifiedProblems.isEmpty else { return }
|
|
|
|
AppLogger.info("Delta Sync: Syncing images for \(modifiedProblems.count) modified problems", tag: logTag)
|
|
|
|
for backupProblem in modifiedProblems {
|
|
guard
|
|
let problem = dataManager.problems.first(where: {
|
|
$0.id.uuidString == backupProblem.id
|
|
})
|
|
else {
|
|
continue
|
|
}
|
|
|
|
for (index, imagePath) in problem.imagePaths.enumerated() {
|
|
let filename = URL(fileURLWithPath: imagePath).lastPathComponent
|
|
let consistentFilename = ImageNamingUtils.generateImageFilename(
|
|
problemId: problem.id.uuidString, imageIndex: index)
|
|
|
|
let imageManager = ImageManager.shared
|
|
let fullPath = imageManager.imagesDirectory.appendingPathComponent(filename).path
|
|
|
|
if let imageData = imageManager.loadImageData(fromPath: fullPath) {
|
|
do {
|
|
if filename != consistentFilename {
|
|
let newPath = imageManager.imagesDirectory.appendingPathComponent(
|
|
consistentFilename
|
|
).path
|
|
try? FileManager.default.moveItem(atPath: fullPath, toPath: newPath)
|
|
}
|
|
|
|
try await uploadImage(filename: consistentFilename, imageData: imageData)
|
|
AppLogger.info("Uploaded modified problem image: \(consistentFilename)", tag: logTag)
|
|
} catch {
|
|
AppLogger.info("Failed to upload image \(consistentFilename): \(error)", tag: logTag)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
private func uploadImage(filename: String, imageData: Data) async throws {
|
|
guard let url = URL(string: "\(serverURL)/images/upload?filename=\(filename)") else {
|
|
throw SyncError.invalidURL
|
|
}
|
|
|
|
var request = URLRequest(url: url)
|
|
request.httpMethod = "POST"
|
|
request.setValue("Bearer \(authToken)", forHTTPHeaderField: "Authorization")
|
|
request.setValue("application/octet-stream", forHTTPHeaderField: "Content-Type")
|
|
request.httpBody = imageData
|
|
|
|
request.timeoutInterval = 60.0
|
|
request.cachePolicy = .reloadIgnoringLocalCacheData
|
|
|
|
let (_, response) = try await URLSession.shared.data(for: request)
|
|
|
|
guard let httpResponse = response as? HTTPURLResponse else {
|
|
throw SyncError.invalidResponse
|
|
}
|
|
|
|
switch httpResponse.statusCode {
|
|
case 200:
|
|
break
|
|
case 401:
|
|
throw SyncError.unauthorized
|
|
default:
|
|
throw SyncError.serverError(httpResponse.statusCode)
|
|
}
|
|
}
|
|
|
|
private func downloadImage(filename: String) async throws -> Data {
|
|
guard let url = URL(string: "\(serverURL)/images/download?filename=\(filename)") else {
|
|
throw SyncError.invalidURL
|
|
}
|
|
|
|
var request = URLRequest(url: url)
|
|
request.httpMethod = "GET"
|
|
request.setValue("Bearer \(authToken)", forHTTPHeaderField: "Authorization")
|
|
|
|
request.timeoutInterval = 45.0
|
|
request.cachePolicy = .returnCacheDataElseLoad
|
|
|
|
let (data, response) = try await URLSession.shared.data(for: request)
|
|
|
|
guard let httpResponse = response as? HTTPURLResponse else {
|
|
throw SyncError.invalidResponse
|
|
}
|
|
|
|
switch httpResponse.statusCode {
|
|
case 200:
|
|
return data
|
|
case 401:
|
|
throw SyncError.unauthorized
|
|
case 404:
|
|
throw SyncError.imageNotFound
|
|
default:
|
|
throw SyncError.serverError(httpResponse.statusCode)
|
|
}
|
|
}
|
|
|
|
private func syncImagesFromServer(backup: ClimbDataBackup, dataManager: ClimbingDataManager)
|
|
async throws -> [String: String]
|
|
{
|
|
var imagePathMapping: [String: String] = [:]
|
|
|
|
for problem in backup.problems {
|
|
guard let imagePaths = problem.imagePaths, !imagePaths.isEmpty else { continue }
|
|
|
|
for (index, imagePath) in imagePaths.enumerated() {
|
|
let serverFilename = URL(fileURLWithPath: imagePath).lastPathComponent
|
|
|
|
do {
|
|
let imageData = try await downloadImage(filename: serverFilename)
|
|
|
|
let consistentFilename = ImageNamingUtils.generateImageFilename(
|
|
problemId: problem.id, imageIndex: index)
|
|
|
|
let imageManager = ImageManager.shared
|
|
_ = try imageManager.saveImportedImage(
|
|
imageData, filename: consistentFilename)
|
|
|
|
imagePathMapping[serverFilename] = consistentFilename
|
|
AppLogger.info("Downloaded and mapped image: \(serverFilename) -> \(consistentFilename)", tag: logTag)
|
|
} catch SyncError.imageNotFound {
|
|
AppLogger.info("Image not found on server: \(serverFilename)", tag: logTag)
|
|
continue
|
|
} catch {
|
|
AppLogger.info("Failed to download image \(serverFilename): \(error)", tag: logTag)
|
|
continue
|
|
}
|
|
}
|
|
}
|
|
|
|
return imagePathMapping
|
|
}
|
|
|
|
private func syncImagesToServer(dataManager: ClimbingDataManager) async throws {
|
|
// Process images by problem to ensure consistent naming
|
|
for problem in dataManager.problems {
|
|
guard !problem.imagePaths.isEmpty else { continue }
|
|
|
|
for (index, imagePath) in problem.imagePaths.enumerated() {
|
|
let filename = URL(fileURLWithPath: imagePath).lastPathComponent
|
|
|
|
let consistentFilename = ImageNamingUtils.generateImageFilename(
|
|
problemId: problem.id.uuidString, imageIndex: index)
|
|
|
|
// Load image data
|
|
let imageManager = ImageManager.shared
|
|
let fullPath = imageManager.imagesDirectory.appendingPathComponent(filename).path
|
|
|
|
if let imageData = imageManager.loadImageData(fromPath: fullPath) {
|
|
do {
|
|
// If filename changed, rename local file
|
|
if filename != consistentFilename {
|
|
let newPath = imageManager.imagesDirectory.appendingPathComponent(
|
|
consistentFilename
|
|
).path
|
|
do {
|
|
try FileManager.default.moveItem(atPath: fullPath, toPath: newPath)
|
|
AppLogger.info("Renamed local image: \(filename) -> \(consistentFilename)", tag: logTag)
|
|
|
|
// Update problem's image path in memory for consistency
|
|
} catch {
|
|
AppLogger.info("Failed to rename local image, using original: \(error)", tag: logTag)
|
|
}
|
|
}
|
|
|
|
try await uploadImage(filename: consistentFilename, imageData: imageData)
|
|
AppLogger.info("Successfully uploaded image: \(consistentFilename)", tag: logTag)
|
|
} catch {
|
|
AppLogger.info("Failed to upload image \(consistentFilename): \(error)", tag: logTag)
|
|
// Continue with other images even if one fails
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
private func createBackupFromDataManager(_ dataManager: ClimbingDataManager) -> ClimbDataBackup
|
|
{
|
|
// Filter out active sessions and their attempts from sync
|
|
let completedSessions = dataManager.sessions.filter { $0.status != .active }
|
|
let activeSessionIds = Set(
|
|
dataManager.sessions.filter { $0.status == .active }.map { $0.id })
|
|
let completedAttempts = dataManager.attempts.filter {
|
|
!activeSessionIds.contains($0.sessionId)
|
|
}
|
|
|
|
AppLogger.info(
|
|
"Excluding \(dataManager.sessions.count - completedSessions.count) active sessions and \(dataManager.attempts.count - completedAttempts.count) active session attempts from sync",
|
|
tag: logTag
|
|
)
|
|
|
|
return ClimbDataBackup(
|
|
exportedAt: DataStateManager.shared.getLastModified(),
|
|
gyms: dataManager.gyms.map { BackupGym(from: $0) },
|
|
problems: dataManager.problems.map { BackupProblem(from: $0) },
|
|
sessions: completedSessions.map { BackupClimbSession(from: $0) },
|
|
attempts: completedAttempts.map { BackupAttempt(from: $0) },
|
|
deletedItems: dataManager.getDeletedItems()
|
|
)
|
|
}
|
|
|
|
private func mergeDataSafely(
|
|
localBackup: ClimbDataBackup,
|
|
serverBackup: ClimbDataBackup,
|
|
dataManager: ClimbingDataManager
|
|
) async throws {
|
|
// Download server images first
|
|
let imagePathMapping = try await syncImagesFromServer(
|
|
backup: serverBackup, dataManager: dataManager)
|
|
|
|
// Use SyncMerger
|
|
let mergedResult = try SyncMerger.mergeDataSafely(
|
|
localBackup: localBackup,
|
|
serverBackup: serverBackup,
|
|
dataManager: dataManager,
|
|
imagePathMapping: imagePathMapping
|
|
)
|
|
|
|
// Update data manager with merged data
|
|
dataManager.gyms = mergedResult.gyms
|
|
dataManager.problems = mergedResult.problems
|
|
dataManager.sessions = mergedResult.sessions
|
|
dataManager.attempts = mergedResult.attempts
|
|
|
|
// Save all data
|
|
dataManager.saveGyms()
|
|
dataManager.saveProblems()
|
|
dataManager.saveSessions()
|
|
dataManager.saveAttempts()
|
|
dataManager.saveActiveSession()
|
|
|
|
// Update local deletions with merged list
|
|
dataManager.clearDeletedItems()
|
|
if let data = try? JSONEncoder().encode(mergedResult.uniqueDeletions) {
|
|
UserDefaults.standard.set(data, forKey: "ascently_deleted_items")
|
|
}
|
|
|
|
// Upload merged data back to server
|
|
let mergedBackup = createBackupFromDataManager(dataManager)
|
|
_ = try await uploadData(mergedBackup)
|
|
try await syncImagesToServer(dataManager: dataManager)
|
|
|
|
// Update timestamp
|
|
DataStateManager.shared.updateDataState()
|
|
}
|
|
|
|
private func importBackupToDataManager(
|
|
_ backup: ClimbDataBackup, dataManager: ClimbingDataManager,
|
|
imagePathMapping: [String: String] = [:]
|
|
) throws {
|
|
// This logic is also in SyncService.swift, it's quite complex as it handles active sessions preservation
|
|
// I'll copy it here.
|
|
|
|
do {
|
|
// Store active sessions and their attempts before import (but exclude any that were deleted)
|
|
let localDeletedItems = dataManager.getDeletedItems()
|
|
let allDeletedSessionIds = Set(
|
|
(backup.deletedItems + localDeletedItems)
|
|
.filter { $0.type == "session" }
|
|
.map { $0.id }
|
|
)
|
|
let activeSessions = dataManager.sessions.filter {
|
|
$0.status == .active && !allDeletedSessionIds.contains($0.id.uuidString)
|
|
}
|
|
let activeSessionIds = Set(activeSessions.map { $0.id })
|
|
let allDeletedAttemptIds = Set(
|
|
(backup.deletedItems + localDeletedItems)
|
|
.filter { $0.type == "attempt" }
|
|
.map { $0.id }
|
|
)
|
|
let activeAttempts = dataManager.attempts.filter {
|
|
activeSessionIds.contains($0.sessionId)
|
|
&& !allDeletedAttemptIds.contains($0.id.uuidString)
|
|
}
|
|
|
|
AppLogger.info(
|
|
"Preserving \(activeSessions.count) active sessions and \(activeAttempts.count) active attempts during import",
|
|
tag: logTag
|
|
)
|
|
|
|
// Update problem image paths to point to downloaded images
|
|
let updatedBackup: ClimbDataBackup
|
|
if !imagePathMapping.isEmpty {
|
|
let updatedProblems = backup.problems.map { problem in
|
|
let updatedImagePaths = problem.imagePaths?.compactMap { oldPath in
|
|
imagePathMapping[oldPath] ?? oldPath
|
|
}
|
|
return BackupProblem(
|
|
id: problem.id,
|
|
gymId: problem.gymId,
|
|
name: problem.name,
|
|
description: problem.description,
|
|
climbType: problem.climbType,
|
|
difficulty: problem.difficulty,
|
|
tags: problem.tags,
|
|
location: problem.location,
|
|
imagePaths: updatedImagePaths,
|
|
isActive: problem.isActive,
|
|
dateSet: problem.dateSet,
|
|
notes: problem.notes,
|
|
createdAt: problem.createdAt,
|
|
updatedAt: problem.updatedAt
|
|
)
|
|
}
|
|
// Filter out deleted items before creating updated backup
|
|
let deletedGymIds = Set(
|
|
backup.deletedItems.filter { $0.type == "gym" }.map { $0.id })
|
|
let deletedProblemIds = Set(
|
|
backup.deletedItems.filter { $0.type == "problem" }.map { $0.id })
|
|
let deletedSessionIds = Set(
|
|
backup.deletedItems.filter { $0.type == "session" }.map { $0.id })
|
|
let deletedAttemptIds = Set(
|
|
backup.deletedItems.filter { $0.type == "attempt" }.map { $0.id })
|
|
|
|
let filteredGyms = backup.gyms.filter { !deletedGymIds.contains($0.id) }
|
|
let filteredProblems = updatedProblems.filter { !deletedProblemIds.contains($0.id) }
|
|
let filteredSessions = backup.sessions.filter { !deletedSessionIds.contains($0.id) }
|
|
let filteredAttempts = backup.attempts.filter { !deletedAttemptIds.contains($0.id) }
|
|
|
|
updatedBackup = ClimbDataBackup(
|
|
exportedAt: backup.exportedAt,
|
|
version: backup.version,
|
|
formatVersion: backup.formatVersion,
|
|
gyms: filteredGyms,
|
|
problems: filteredProblems,
|
|
sessions: filteredSessions,
|
|
attempts: filteredAttempts,
|
|
deletedItems: backup.deletedItems
|
|
)
|
|
} else {
|
|
// Filter out deleted items even when no image path mapping
|
|
let deletedGymIds = Set(
|
|
backup.deletedItems.filter { $0.type == "gym" }.map { $0.id })
|
|
let deletedProblemIds = Set(
|
|
backup.deletedItems.filter { $0.type == "problem" }.map { $0.id })
|
|
let deletedSessionIds = Set(
|
|
backup.deletedItems.filter { $0.type == "session" }.map { $0.id })
|
|
let deletedAttemptIds = Set(
|
|
backup.deletedItems.filter { $0.type == "attempt" }.map { $0.id })
|
|
|
|
let filteredGyms = backup.gyms.filter { !deletedGymIds.contains($0.id) }
|
|
let filteredProblems = backup.problems.filter { !deletedProblemIds.contains($0.id) }
|
|
let filteredSessions = backup.sessions.filter { !deletedSessionIds.contains($0.id) }
|
|
let filteredAttempts = backup.attempts.filter { !deletedAttemptIds.contains($0.id) }
|
|
|
|
updatedBackup = ClimbDataBackup(
|
|
exportedAt: backup.exportedAt,
|
|
version: backup.version,
|
|
formatVersion: backup.formatVersion,
|
|
gyms: filteredGyms,
|
|
problems: filteredProblems,
|
|
sessions: filteredSessions,
|
|
attempts: filteredAttempts,
|
|
deletedItems: backup.deletedItems
|
|
)
|
|
}
|
|
|
|
// Create a minimal ZIP with just the JSON data for existing import mechanism
|
|
// We need to use ZipUtils or similar. SyncService had createMinimalZipFromBackup.
|
|
// I should probably move createMinimalZipFromBackup to ZipUtils or just copy it here.
|
|
// Since ZipUtils exists, let's see if we can use it.
|
|
// ZipUtils.createExportZip creates a full zip.
|
|
// SyncService had a custom implementation for minimal zip.
|
|
// I'll copy the implementation here for now to avoid changing ZipUtils too much, or I can add it to ZipUtils.
|
|
// For now, I'll copy it to keep this file self-contained regarding the sync logic.
|
|
|
|
let zipData = try createMinimalZipFromBackup(updatedBackup)
|
|
|
|
// Use existing import method which properly handles data restoration
|
|
try dataManager.importData(from: zipData, showSuccessMessage: false)
|
|
|
|
// Restore active sessions and their attempts after import
|
|
for session in activeSessions {
|
|
AppLogger.info("Restoring active session: \(session.id)", tag: logTag)
|
|
dataManager.sessions.append(session)
|
|
if session.id == dataManager.activeSession?.id {
|
|
dataManager.activeSession = session
|
|
}
|
|
}
|
|
|
|
for attempt in activeAttempts {
|
|
dataManager.attempts.append(attempt)
|
|
}
|
|
|
|
// Save restored data
|
|
dataManager.saveSessions()
|
|
dataManager.saveAttempts()
|
|
dataManager.saveActiveSession()
|
|
|
|
// Import deletion records to prevent future resurrections
|
|
dataManager.clearDeletedItems()
|
|
if let data = try? JSONEncoder().encode(backup.deletedItems) {
|
|
UserDefaults.standard.set(data, forKey: "ascently_deleted_items")
|
|
AppLogger.info("Imported \(backup.deletedItems.count) deletion records", tag: logTag)
|
|
}
|
|
|
|
// Update local data state to match imported data timestamp
|
|
DataStateManager.shared.setLastModified(backup.exportedAt)
|
|
AppLogger.info("Data state synchronized to imported timestamp: \(backup.exportedAt)", tag: logTag)
|
|
} catch {
|
|
throw SyncError.importFailed(error)
|
|
}
|
|
}
|
|
|
|
// Copied from SyncService.swift
|
|
private func createMinimalZipFromBackup(_ backup: ClimbDataBackup) throws -> Data {
|
|
// Create JSON data
|
|
|
|
let encoder = JSONEncoder()
|
|
encoder.outputFormatting = .prettyPrinted
|
|
encoder.dateEncodingStrategy = .custom { date, encoder in
|
|
let formatter = DateFormatter()
|
|
formatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSSSS"
|
|
var container = encoder.singleValueContainer()
|
|
try container.encode(formatter.string(from: date))
|
|
}
|
|
let jsonData = try encoder.encode(backup)
|
|
|
|
// Collect all images from ImageManager
|
|
let imageManager = ImageManager.shared
|
|
var imageFiles: [(filename: String, data: Data)] = []
|
|
|
|
// Get original problems to access actual image paths on disk
|
|
if let problemsData = userDefaults.data(forKey: "ascently_problems"), // Changed key to match ClimbingDataManager
|
|
let problems = try? JSONDecoder().decode([Problem].self, from: problemsData)
|
|
{
|
|
// Create a mapping from normalized paths to actual paths
|
|
for problem in problems {
|
|
for (index, imagePath) in problem.imagePaths.enumerated() {
|
|
// Get the actual filename on disk
|
|
let actualFilename = URL(fileURLWithPath: imagePath).lastPathComponent
|
|
let fullPath = imageManager.imagesDirectory.appendingPathComponent(
|
|
actualFilename
|
|
).path
|
|
|
|
// Generate the normalized filename for the ZIP
|
|
let normalizedFilename = ImageNamingUtils.generateImageFilename(
|
|
problemId: problem.id.uuidString, imageIndex: index)
|
|
|
|
if let imageData = imageManager.loadImageData(fromPath: fullPath) {
|
|
imageFiles.append((filename: normalizedFilename, data: imageData))
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Create ZIP with data.json, metadata, and images
|
|
var zipData = Data()
|
|
var fileEntries: [(name: String, data: Data, offset: UInt32)] = []
|
|
var currentOffset: UInt32 = 0
|
|
|
|
// Add data.json to ZIP
|
|
try addFileToMinimalZip(
|
|
filename: "data.json",
|
|
fileData: jsonData,
|
|
zipData: &zipData,
|
|
fileEntries: &fileEntries,
|
|
currentOffset: ¤tOffset
|
|
)
|
|
|
|
// Add metadata with correct image count
|
|
let metadata = "export_version=2.0\nformat_version=2.0\nimage_count=\(imageFiles.count)"
|
|
let metadataData = metadata.data(using: .utf8) ?? Data()
|
|
try addFileToMinimalZip(
|
|
filename: "metadata.txt",
|
|
fileData: metadataData,
|
|
zipData: &zipData,
|
|
fileEntries: &fileEntries,
|
|
currentOffset: ¤tOffset
|
|
)
|
|
|
|
// Add images to ZIP in images/ directory
|
|
for imageFile in imageFiles {
|
|
try addFileToMinimalZip(
|
|
filename: "images/\(imageFile.filename)",
|
|
fileData: imageFile.data,
|
|
zipData: &zipData,
|
|
fileEntries: &fileEntries,
|
|
currentOffset: ¤tOffset
|
|
)
|
|
}
|
|
|
|
// Add central directory
|
|
var centralDirectory = Data()
|
|
for entry in fileEntries {
|
|
centralDirectory.append(createCentralDirectoryHeader(entry: entry))
|
|
}
|
|
|
|
// Add end of central directory record
|
|
let endOfCentralDir = createEndOfCentralDirectoryRecord(
|
|
fileCount: UInt16(fileEntries.count),
|
|
centralDirSize: UInt32(centralDirectory.count),
|
|
centralDirOffset: currentOffset
|
|
)
|
|
|
|
zipData.append(centralDirectory)
|
|
zipData.append(endOfCentralDir)
|
|
|
|
return zipData
|
|
}
|
|
|
|
private func addFileToMinimalZip(
|
|
filename: String,
|
|
fileData: Data,
|
|
zipData: inout Data,
|
|
fileEntries: inout [(name: String, data: Data, offset: UInt32)],
|
|
currentOffset: inout UInt32
|
|
) throws {
|
|
let localFileHeader = createLocalFileHeader(
|
|
filename: filename, fileSize: UInt32(fileData.count))
|
|
|
|
fileEntries.append((name: filename, data: fileData, offset: currentOffset))
|
|
|
|
zipData.append(localFileHeader)
|
|
zipData.append(fileData)
|
|
|
|
currentOffset += UInt32(localFileHeader.count + fileData.count)
|
|
}
|
|
|
|
private func createLocalFileHeader(filename: String, fileSize: UInt32) -> Data {
|
|
var header = Data()
|
|
|
|
// Local file header signature
|
|
header.append(Data([0x50, 0x4b, 0x03, 0x04]))
|
|
|
|
// Version needed to extract (2.0)
|
|
header.append(Data([0x14, 0x00]))
|
|
|
|
// General purpose bit flag
|
|
header.append(Data([0x00, 0x00]))
|
|
|
|
// Compression method (no compression)
|
|
header.append(Data([0x00, 0x00]))
|
|
|
|
// Last mod file time & date (dummy values)
|
|
header.append(Data([0x00, 0x00, 0x00, 0x00]))
|
|
|
|
// CRC-32 (dummy - we're not compressing)
|
|
header.append(Data([0x00, 0x00, 0x00, 0x00]))
|
|
|
|
// Compressed size
|
|
withUnsafeBytes(of: fileSize.littleEndian) { header.append(Data($0)) }
|
|
|
|
// Uncompressed size
|
|
withUnsafeBytes(of: fileSize.littleEndian) { header.append(Data($0)) }
|
|
|
|
// File name length
|
|
let filenameData = filename.data(using: .utf8) ?? Data()
|
|
let filenameLength = UInt16(filenameData.count)
|
|
withUnsafeBytes(of: filenameLength.littleEndian) { header.append(Data($0)) }
|
|
|
|
// Extra field length
|
|
header.append(Data([0x00, 0x00]))
|
|
|
|
// File name
|
|
header.append(filenameData)
|
|
|
|
return header
|
|
}
|
|
|
|
private func createCentralDirectoryHeader(entry: (name: String, data: Data, offset: UInt32))
|
|
-> Data
|
|
{
|
|
var header = Data()
|
|
|
|
// Central directory signature
|
|
header.append(Data([0x50, 0x4b, 0x01, 0x02]))
|
|
|
|
// Version made by
|
|
header.append(Data([0x14, 0x00]))
|
|
|
|
// Version needed to extract
|
|
header.append(Data([0x14, 0x00]))
|
|
|
|
// General purpose bit flag
|
|
header.append(Data([0x00, 0x00]))
|
|
|
|
// Compression method
|
|
header.append(Data([0x00, 0x00]))
|
|
|
|
// Last mod file time & date
|
|
header.append(Data([0x00, 0x00, 0x00, 0x00]))
|
|
|
|
// CRC-32
|
|
header.append(Data([0x00, 0x00, 0x00, 0x00]))
|
|
|
|
// Compressed size
|
|
let compressedSize = UInt32(entry.data.count)
|
|
withUnsafeBytes(of: compressedSize.littleEndian) { header.append(Data($0)) }
|
|
|
|
// Uncompressed size
|
|
withUnsafeBytes(of: compressedSize.littleEndian) { header.append(Data($0)) }
|
|
|
|
// File name length
|
|
let filenameData = entry.name.data(using: .utf8) ?? Data()
|
|
let filenameLength = UInt16(filenameData.count)
|
|
withUnsafeBytes(of: filenameLength.littleEndian) { header.append(Data($0)) }
|
|
|
|
// Extra field length
|
|
header.append(Data([0x00, 0x00]))
|
|
|
|
// File comment length
|
|
header.append(Data([0x00, 0x00]))
|
|
|
|
// Disk number start
|
|
header.append(Data([0x00, 0x00]))
|
|
|
|
// Internal file attributes
|
|
header.append(Data([0x00, 0x00]))
|
|
|
|
// External file attributes
|
|
header.append(Data([0x00, 0x00, 0x00, 0x00]))
|
|
|
|
// Relative offset of local header
|
|
withUnsafeBytes(of: entry.offset.littleEndian) { header.append(Data($0)) }
|
|
|
|
// File name
|
|
header.append(filenameData)
|
|
|
|
return header
|
|
}
|
|
|
|
private func createEndOfCentralDirectoryRecord(
|
|
fileCount: UInt16, centralDirSize: UInt32, centralDirOffset: UInt32
|
|
) -> Data {
|
|
var record = Data()
|
|
|
|
// End of central dir signature
|
|
record.append(Data([0x50, 0x4b, 0x05, 0x06]))
|
|
|
|
// Number of this disk
|
|
record.append(Data([0x00, 0x00]))
|
|
|
|
// Number of the disk with the start of the central directory
|
|
record.append(Data([0x00, 0x00]))
|
|
|
|
// Total number of entries in the central directory on this disk
|
|
withUnsafeBytes(of: fileCount.littleEndian) { record.append(Data($0)) }
|
|
|
|
// Total number of entries in the central directory
|
|
withUnsafeBytes(of: fileCount.littleEndian) { record.append(Data($0)) }
|
|
|
|
// Size of the central directory
|
|
withUnsafeBytes(of: centralDirSize.littleEndian) { record.append(Data($0)) }
|
|
|
|
// Offset of start of central directory
|
|
withUnsafeBytes(of: centralDirOffset.littleEndian) { record.append(Data($0)) }
|
|
|
|
// ZIP file comment length
|
|
record.append(Data([0x00, 0x00]))
|
|
|
|
return record
|
|
}
|
|
}
|