Skip to content

Commit e6c6226

Browse files
authored
fix: serialize WebDAV operations in code watch (#331)
* fix: serialize WebDAV operations in code watch to prevent lock conflicts The debounced processChanges could fire concurrently when uploads took longer than the debounce window, causing overlapping WebDAV operations (zip/upload/unzip/delete) that produced server-side lock errors. - Add isProcessing mutex so only one batch runs at a time; new events accumulate in the sets and are picked up by a while loop - Clear opposite set on each file event so the sets always reflect the latest desired action per file - Re-queue files on upload failure so they retry after the 5s rate-limit wait instead of being silently dropped - Rate-limit path now waits instead of returning early (which stranded pending items if no new events arrived) * chore: add changeset for watch serialization fix
1 parent e65bd15 commit e6c6226

2 files changed

Lines changed: 115 additions & 88 deletions

File tree

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@salesforce/b2c-tooling-sdk': patch
3+
---
4+
5+
Fix `code watch` WebDAV lock conflicts by serializing upload and delete operations so only one batch runs at a time. Failed uploads are now retried automatically.

packages/b2c-tooling-sdk/src/operations/code/watch.ts

Lines changed: 110 additions & 88 deletions
Original file line numberDiff line numberDiff line change
@@ -156,111 +156,131 @@ export async function watchCartridges(
156156
const filesToUpload = new Set<string>();
157157
const filesToDelete = new Set<string>();
158158
let lastErrorTime = 0;
159+
let isProcessing = false;
159160

160161
/**
161-
* Processes batched file changes.
162+
* Processes all pending file changes, serializing WebDAV operations.
163+
* Only one instance runs at a time — if new changes accumulate during
164+
* processing, the while loop picks them up in the next iteration.
162165
*/
163-
const processChanges = debounce(async () => {
164-
const now = Date.now();
165-
166-
// Rate limit on errors
167-
if (now - lastErrorTime < 5000) {
168-
logger.debug('Rate limiting after recent error, waiting...');
169-
return;
170-
}
171-
172-
const uploadFiles = Array.from(filesToUpload)
173-
.map((f) => fileToCartridgePath(f, cartridges))
174-
.filter((f): f is NonNullable<typeof f> => f !== undefined);
166+
async function runProcessing(): Promise<void> {
167+
if (isProcessing) return;
168+
isProcessing = true;
169+
170+
try {
171+
while (filesToUpload.size > 0 || filesToDelete.size > 0) {
172+
// Rate limit on errors — wait instead of dropping items
173+
const timeSinceError = Date.now() - lastErrorTime;
174+
if (timeSinceError < 5000) {
175+
const waitTime = 5000 - timeSinceError;
176+
logger.debug({waitTime}, 'Rate limiting after recent error, waiting...');
177+
await new Promise((resolve) => setTimeout(resolve, waitTime));
178+
}
175179

176-
const deleteFiles = Array.from(filesToDelete)
177-
.map((f) => fileToCartridgePath(f, cartridges))
178-
.filter((f): f is NonNullable<typeof f> => f !== undefined);
180+
const uploadFiles = Array.from(filesToUpload)
181+
.map((f) => fileToCartridgePath(f, cartridges))
182+
.filter((f): f is NonNullable<typeof f> => f !== undefined);
179183

180-
filesToUpload.clear();
181-
filesToDelete.clear();
184+
const deleteFiles = Array.from(filesToDelete)
185+
.map((f) => fileToCartridgePath(f, cartridges))
186+
.filter((f): f is NonNullable<typeof f> => f !== undefined);
182187

183-
// Filter out files that no longer exist
184-
const validUploadFiles = uploadFiles.filter((f) => {
185-
if (!fs.existsSync(f.src)) {
186-
logger.debug({file: f.src}, 'Skipping missing file');
187-
return false;
188-
}
189-
return true;
190-
});
188+
filesToUpload.clear();
189+
filesToDelete.clear();
191190

192-
// Upload files
193-
if (validUploadFiles.length > 0) {
194-
const uploadPath = `${webdavLocation}/_upload-${now}.zip`;
191+
// Filter out files that no longer exist
192+
const validUploadFiles = uploadFiles.filter((f) => {
193+
if (!fs.existsSync(f.src)) {
194+
logger.debug({file: f.src}, 'Skipping missing file');
195+
return false;
196+
}
197+
return true;
198+
});
195199

196-
try {
197-
const zip = new JSZip();
200+
// Upload files
201+
if (validUploadFiles.length > 0) {
202+
const uploadPath = `${webdavLocation}/_upload-${Date.now()}.zip`;
198203

199-
for (const f of validUploadFiles) {
200204
try {
201-
const content = await fs.promises.readFile(f.src);
202-
zip.file(f.dest, content);
205+
const zip = new JSZip();
206+
207+
for (const f of validUploadFiles) {
208+
try {
209+
const content = await fs.promises.readFile(f.src);
210+
zip.file(f.dest, content);
211+
} catch (error) {
212+
logger.warn({file: f.src, error}, 'Failed to add file to archive');
213+
}
214+
}
215+
216+
const buffer = await zip.generateAsync({
217+
type: 'nodebuffer',
218+
compression: 'DEFLATE',
219+
compressionOptions: {level: 5},
220+
});
221+
222+
await webdav.put(uploadPath, buffer, 'application/zip');
223+
logger.debug({uploadPath}, 'Archive uploaded');
224+
225+
const response = await webdav.request(uploadPath, {
226+
method: 'POST',
227+
body: UNZIP_BODY,
228+
headers: {
229+
'Content-Type': 'application/x-www-form-urlencoded',
230+
},
231+
});
232+
233+
if (!response.ok) {
234+
throw new Error(`Unzip failed: ${response.status}`);
235+
}
236+
237+
await webdav.delete(uploadPath);
238+
239+
logger.debug(
240+
{fileCount: validUploadFiles.length, server: instance.config.hostname},
241+
`Uploaded ${validUploadFiles.length} file(s)`,
242+
);
243+
244+
options.onUpload?.(validUploadFiles.map((f) => f.dest));
203245
} catch (error) {
204-
logger.warn({file: f.src, error}, 'Failed to add file to archive');
246+
lastErrorTime = Date.now();
247+
// Re-queue so the while loop retries after rate-limit wait
248+
for (const f of validUploadFiles) {
249+
filesToUpload.add(f.src);
250+
}
251+
const err = error instanceof Error ? error : new Error(String(error));
252+
logger.error({error: err}, `Upload error: ${err.message}`);
253+
options.onError?.(err);
205254
}
206255
}
207256

208-
const buffer = await zip.generateAsync({
209-
type: 'nodebuffer',
210-
compression: 'DEFLATE',
211-
compressionOptions: {level: 5},
212-
});
213-
214-
await webdav.put(uploadPath, buffer, 'application/zip');
215-
logger.debug({uploadPath}, 'Archive uploaded');
216-
217-
const response = await webdav.request(uploadPath, {
218-
method: 'POST',
219-
body: UNZIP_BODY,
220-
headers: {
221-
'Content-Type': 'application/x-www-form-urlencoded',
222-
},
223-
});
257+
// Skip deletes for any file that was also uploaded in this batch (disk state wins)
258+
const uploadedPaths = new Set(validUploadFiles.map((f) => f.dest));
259+
const filesToDeleteFiltered = deleteFiles.filter((f) => !uploadedPaths.has(f.dest));
260+
261+
if (filesToDeleteFiltered.length > 0) {
262+
logger.debug({fileCount: filesToDeleteFiltered.length}, `Deleting ${filesToDeleteFiltered.length} file(s)`);
263+
264+
for (const f of filesToDeleteFiltered) {
265+
const deletePath = `${webdavLocation}/${f.dest}`;
266+
try {
267+
await webdav.delete(deletePath);
268+
logger.info({path: deletePath}, `Deleted: ${deletePath}`);
269+
} catch (error) {
270+
logger.debug({path: deletePath, error}, `Failed to delete ${deletePath}`);
271+
}
272+
}
224273

225-
if (!response.ok) {
226-
throw new Error(`Unzip failed: ${response.status}`);
274+
options.onDelete?.(filesToDeleteFiltered.map((f) => f.dest));
227275
}
228-
229-
await webdav.delete(uploadPath);
230-
231-
logger.debug(
232-
{fileCount: validUploadFiles.length, server: instance.config.hostname},
233-
`Uploaded ${validUploadFiles.length} file(s)`,
234-
);
235-
236-
options.onUpload?.(validUploadFiles.map((f) => f.dest));
237-
} catch (error) {
238-
lastErrorTime = now;
239-
const err = error instanceof Error ? error : new Error(String(error));
240-
logger.error({error: err}, `Upload error: ${err.message}`);
241-
options.onError?.(err);
242276
}
277+
} finally {
278+
isProcessing = false;
243279
}
280+
}
244281

245-
// Delete files (filter out recently uploaded to prevent conflicts)
246-
const recentlyUploadedPaths = new Set(validUploadFiles.map((f) => f.dest));
247-
const filesToDeleteFiltered = deleteFiles.filter((f) => !recentlyUploadedPaths.has(f.dest));
248-
249-
if (filesToDeleteFiltered.length > 0) {
250-
logger.debug({fileCount: filesToDeleteFiltered.length}, `Deleting ${filesToDeleteFiltered.length} file(s)`);
251-
252-
for (const f of filesToDeleteFiltered) {
253-
const deletePath = `${webdavLocation}/${f.dest}`;
254-
try {
255-
await webdav.delete(deletePath);
256-
logger.info({path: deletePath}, `Deleted: ${deletePath}`);
257-
} catch (error) {
258-
logger.debug({path: deletePath, error}, `Failed to delete ${deletePath}`);
259-
}
260-
}
261-
262-
options.onDelete?.(filesToDeleteFiltered.map((f) => f.dest));
263-
}
282+
const scheduleProcessing = debounce(() => {
283+
void runProcessing();
264284
}, debounceTime);
265285

266286
// Set up file watcher
@@ -278,10 +298,12 @@ export async function watchCartridges(
278298

279299
if (event === 'change' || event === 'add') {
280300
filesToUpload.add(fullPath);
281-
processChanges();
301+
filesToDelete.delete(fullPath);
302+
scheduleProcessing();
282303
} else if (event === 'unlink') {
283304
filesToDelete.add(fullPath);
284-
processChanges();
305+
filesToUpload.delete(fullPath);
306+
scheduleProcessing();
285307
}
286308
});
287309

0 commit comments

Comments
 (0)