mirror of
				https://github.com/actions/cache.git
				synced 2025-10-31 10:24:19 +08:00 
			
		
		
		
	Test out 16 concurrency with 32mb chunks
This commit is contained in:
		
							parent
							
								
									8c77f01f0b
								
							
						
					
					
						commit
						2ce22df8c4
					
				
							
								
								
									
										10
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1497,7 +1497,6 @@ const Handlers_1 = __webpack_require__(941); | |||||||
| const HttpClient_1 = __webpack_require__(874); | const HttpClient_1 = __webpack_require__(874); | ||||||
| const RestClient_1 = __webpack_require__(105); | const RestClient_1 = __webpack_require__(105); | ||||||
| const utils = __importStar(__webpack_require__(443)); | const utils = __importStar(__webpack_require__(443)); | ||||||
| const MAX_CHUNK_SIZE = 4000000; // 4 MB Chunks
 |  | ||||||
| function isSuccessStatusCode(statusCode) { | function isSuccessStatusCode(statusCode) { | ||||||
|     return statusCode >= 200 && statusCode < 300; |     return statusCode >= 200 && statusCode < 300; | ||||||
| } | } | ||||||
| @ -1621,13 +1620,14 @@ function uploadFile(restClient, cacheId, archivePath) { | |||||||
|         const fileSize = fs.statSync(archivePath).size; |         const fileSize = fs.statSync(archivePath).size; | ||||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|         const responses = []; |         const responses = []; | ||||||
|         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 |         const fd = fs.openSync(archivePath, "r"); | ||||||
|         const concurrency = 16; // # of HTTP requests in parallel
 |         const concurrency = 16; // # of HTTP requests in parallel
 | ||||||
|         core.debug(`Concurrency: ${concurrency}`); |         const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
 | ||||||
|         const threads = [...new Array(concurrency).keys()]; |         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||||
|  |         const parallelUploads = [...new Array(concurrency).keys()]; | ||||||
|         core.debug("Awaiting all uploads"); |         core.debug("Awaiting all uploads"); | ||||||
|         let offset = 0; |         let offset = 0; | ||||||
|         yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { |         yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||||
|             while (offset < fileSize) { |             while (offset < fileSize) { | ||||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||||
|                 const start = offset; |                 const start = offset; | ||||||
|  | |||||||
							
								
								
									
										10
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1497,7 +1497,6 @@ const Handlers_1 = __webpack_require__(941); | |||||||
| const HttpClient_1 = __webpack_require__(874); | const HttpClient_1 = __webpack_require__(874); | ||||||
| const RestClient_1 = __webpack_require__(105); | const RestClient_1 = __webpack_require__(105); | ||||||
| const utils = __importStar(__webpack_require__(443)); | const utils = __importStar(__webpack_require__(443)); | ||||||
| const MAX_CHUNK_SIZE = 4000000; // 4 MB Chunks
 |  | ||||||
| function isSuccessStatusCode(statusCode) { | function isSuccessStatusCode(statusCode) { | ||||||
|     return statusCode >= 200 && statusCode < 300; |     return statusCode >= 200 && statusCode < 300; | ||||||
| } | } | ||||||
| @ -1621,13 +1620,14 @@ function uploadFile(restClient, cacheId, archivePath) { | |||||||
|         const fileSize = fs.statSync(archivePath).size; |         const fileSize = fs.statSync(archivePath).size; | ||||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|         const responses = []; |         const responses = []; | ||||||
|         const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 |         const fd = fs.openSync(archivePath, "r"); | ||||||
|         const concurrency = 16; // # of HTTP requests in parallel
 |         const concurrency = 16; // # of HTTP requests in parallel
 | ||||||
|         core.debug(`Concurrency: ${concurrency}`); |         const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
 | ||||||
|         const threads = [...new Array(concurrency).keys()]; |         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||||
|  |         const parallelUploads = [...new Array(concurrency).keys()]; | ||||||
|         core.debug("Awaiting all uploads"); |         core.debug("Awaiting all uploads"); | ||||||
|         let offset = 0; |         let offset = 0; | ||||||
|         yield Promise.all(threads.map(() => __awaiter(this, void 0, void 0, function* () { |         yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||||
|             while (offset < fileSize) { |             while (offset < fileSize) { | ||||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||||
|                 const start = offset; |                 const start = offset; | ||||||
|  | |||||||
| @ -16,8 +16,6 @@ import { | |||||||
| } from "./contracts"; | } from "./contracts"; | ||||||
| import * as utils from "./utils/actionUtils"; | import * as utils from "./utils/actionUtils"; | ||||||
| 
 | 
 | ||||||
| const MAX_CHUNK_SIZE = 4000000; // 4 MB Chunks
 |  | ||||||
| 
 |  | ||||||
| function isSuccessStatusCode(statusCode: number): boolean { | function isSuccessStatusCode(statusCode: number): boolean { | ||||||
|     return statusCode >= 200 && statusCode < 300; |     return statusCode >= 200 && statusCode < 300; | ||||||
| } | } | ||||||
| @ -179,14 +177,15 @@ async function uploadFile(restClient: RestClient, cacheId: number, archivePath: | |||||||
|     const fileSize = fs.statSync(archivePath).size; |     const fileSize = fs.statSync(archivePath).size; | ||||||
|     const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); |     const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|     const responses: IRestResponse<void>[] = []; |     const responses: IRestResponse<void>[] = []; | ||||||
|     const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
 |     const fd = fs.openSync(archivePath, "r"); | ||||||
| 
 | 
 | ||||||
|     const concurrency = 16; // # of HTTP requests in parallel
 |     const concurrency = 16; // # of HTTP requests in parallel
 | ||||||
|     core.debug(`Concurrency: ${concurrency}`); |     const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
 | ||||||
|     const threads = [...new Array(concurrency).keys()]; |     core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||||
|  |     const parallelUploads = [...new Array(concurrency).keys()]; | ||||||
|     core.debug("Awaiting all uploads"); |     core.debug("Awaiting all uploads"); | ||||||
|     let offset = 0; |     let offset = 0; | ||||||
|     await Promise.all(threads.map(async () => { |     await Promise.all(parallelUploads.map(async () => { | ||||||
|         while (offset < fileSize) { |         while (offset < fileSize) { | ||||||
|             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; |             const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||||
|             const start = offset; |             const start = offset; | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	 Josh Gross
						Josh Gross