mirror of
				https://gitee.com/actions-mirror/cache.git
				synced 2025-11-04 12:31:20 +08:00 
			
		
		
		
	Make uploads serial
This commit is contained in:
		
							parent
							
								
									64668e22dd
								
							
						
					
					
						commit
						83f86c103f
					
				
							
								
								
									
										11
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							@ -1623,18 +1623,19 @@ function saveCache(cacheId, archivePath) {
 | 
				
			|||||||
        const fileSize = fs.statSync(archivePath).size;
 | 
					        const fileSize = fs.statSync(archivePath).size;
 | 
				
			||||||
        const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
 | 
					        const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
 | 
				
			||||||
        const uploads = [];
 | 
					        const uploads = [];
 | 
				
			||||||
 | 
					        const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 | 
				
			||||||
        let offset = 0;
 | 
					        let offset = 0;
 | 
				
			||||||
        while (offset < fileSize) {
 | 
					        while (offset < fileSize) {
 | 
				
			||||||
            const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
 | 
					            const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
 | 
				
			||||||
            const end = offset + chunkSize - 1;
 | 
					            const end = offset + chunkSize - 1;
 | 
				
			||||||
            core.debug(`Offset: ${offset}`);
 | 
					            const chunk = fs.createReadStream(archivePath, { fd, start: offset, end });
 | 
				
			||||||
            const chunk = fs.createReadStream(archivePath, { start: offset, end });
 | 
					            uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
 | 
				
			||||||
            uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
 | 
					 | 
				
			||||||
            offset += MAX_CHUNK_SIZE;
 | 
					            offset += MAX_CHUNK_SIZE;
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
					        fs.closeSync(fd);
 | 
				
			||||||
        core.debug("Awaiting all uploads");
 | 
					        core.debug("Awaiting all uploads");
 | 
				
			||||||
        const responses = yield Promise.all(uploads);
 | 
					        //const responses = await Promise.all(uploads);
 | 
				
			||||||
        const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
 | 
					        const failedResponse = uploads.find(x => !isSuccessStatusCode(x.statusCode));
 | 
				
			||||||
        if (failedResponse) {
 | 
					        if (failedResponse) {
 | 
				
			||||||
            throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
 | 
					            throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
				
			|||||||
							
								
								
									
										11
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							@ -1623,18 +1623,19 @@ function saveCache(cacheId, archivePath) {
 | 
				
			|||||||
        const fileSize = fs.statSync(archivePath).size;
 | 
					        const fileSize = fs.statSync(archivePath).size;
 | 
				
			||||||
        const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
 | 
					        const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
 | 
				
			||||||
        const uploads = [];
 | 
					        const uploads = [];
 | 
				
			||||||
 | 
					        const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 | 
				
			||||||
        let offset = 0;
 | 
					        let offset = 0;
 | 
				
			||||||
        while (offset < fileSize) {
 | 
					        while (offset < fileSize) {
 | 
				
			||||||
            const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
 | 
					            const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
 | 
				
			||||||
            const end = offset + chunkSize - 1;
 | 
					            const end = offset + chunkSize - 1;
 | 
				
			||||||
            core.debug(`Offset: ${offset}`);
 | 
					            const chunk = fs.createReadStream(archivePath, { fd, start: offset, end });
 | 
				
			||||||
            const chunk = fs.createReadStream(archivePath, { start: offset, end });
 | 
					            uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
 | 
				
			||||||
            uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
 | 
					 | 
				
			||||||
            offset += MAX_CHUNK_SIZE;
 | 
					            offset += MAX_CHUNK_SIZE;
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
					        fs.closeSync(fd);
 | 
				
			||||||
        core.debug("Awaiting all uploads");
 | 
					        core.debug("Awaiting all uploads");
 | 
				
			||||||
        const responses = yield Promise.all(uploads);
 | 
					        //const responses = await Promise.all(uploads);
 | 
				
			||||||
        const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
 | 
					        const failedResponse = uploads.find(x => !isSuccessStatusCode(x.statusCode));
 | 
				
			||||||
        if (failedResponse) {
 | 
					        if (failedResponse) {
 | 
				
			||||||
            throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
 | 
					            throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
				
			|||||||
@ -184,21 +184,24 @@ export async function saveCache(
 | 
				
			|||||||
    // Upload Chunks
 | 
					    // Upload Chunks
 | 
				
			||||||
    const fileSize = fs.statSync(archivePath).size;
 | 
					    const fileSize = fs.statSync(archivePath).size;
 | 
				
			||||||
    const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
 | 
					    const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
 | 
				
			||||||
    const uploads: Promise<IRestResponse<void>>[] = [];
 | 
					    const uploads: IRestResponse<void>[] = [];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 | 
				
			||||||
    let offset = 0;
 | 
					    let offset = 0;
 | 
				
			||||||
    while (offset < fileSize) {
 | 
					    while (offset < fileSize) {
 | 
				
			||||||
        const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
 | 
					        const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
 | 
				
			||||||
        const end = offset + chunkSize - 1;
 | 
					        const end = offset + chunkSize - 1;
 | 
				
			||||||
        core.debug(`Offset: ${offset}`);
 | 
					        const chunk = fs.createReadStream(archivePath, { fd, start: offset, end });
 | 
				
			||||||
        const chunk = fs.createReadStream(archivePath, { start: offset, end });
 | 
					        uploads.push(await uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
 | 
				
			||||||
        uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
 | 
					 | 
				
			||||||
        offset += MAX_CHUNK_SIZE;
 | 
					        offset += MAX_CHUNK_SIZE;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    core.debug("Awaiting all uploads");
 | 
					    fs.closeSync(fd);
 | 
				
			||||||
    const responses = await Promise.all(uploads);
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    const failedResponse = responses.find(
 | 
					    core.debug("Awaiting all uploads");
 | 
				
			||||||
 | 
					    //const responses = await Promise.all(uploads);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    const failedResponse = uploads.find(
 | 
				
			||||||
        x => !isSuccessStatusCode(x.statusCode)
 | 
					        x => !isSuccessStatusCode(x.statusCode)
 | 
				
			||||||
    );
 | 
					    );
 | 
				
			||||||
    if (failedResponse) {
 | 
					    if (failedResponse) {
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
		Reference in New Issue
	
	Block a user