From cdec5dec0db8456760a3d6b1a5d6f7c34f1f7fdd Mon Sep 17 00:00:00 2001
From: Josh Gross <jogros@microsoft.com>
Date: Wed, 18 Dec 2019 10:59:51 -0500
Subject: [PATCH] Format and update tests

---
 __tests__/save.test.ts |  68 +++++++++++++---------
 src/cacheHttpClient.ts | 129 ++++++++++++++++++++++++-----------------
 src/restore.ts         |   1 -
 src/save.ts            |   5 +-
 4 files changed, 122 insertions(+), 81 deletions(-)

diff --git a/__tests__/save.test.ts b/__tests__/save.test.ts
index b0eb462..221e722 100644
--- a/__tests__/save.test.ts
+++ b/__tests__/save.test.ts
@@ -212,14 +212,14 @@ test("save with large cache outputs warning", async () => {
     const IS_WINDOWS = process.platform === "win32";
     const args = IS_WINDOWS
         ? [
-              "-cz",
-              "--force-local",
-              "-f",
-              archivePath.replace(/\\/g, "/"),
-              "-C",
-              cachePath.replace(/\\/g, "/"),
-              "."
-          ]
+            "-cz",
+            "--force-local",
+            "-f",
+            archivePath.replace(/\\/g, "/"),
+            "-C",
+            cachePath.replace(/\\/g, "/"),
+            "."
+        ]
         : ["-cz", "-f", archivePath, "-C", cachePath, "."];
 
     expect(execMock).toHaveBeenCalledTimes(1);
@@ -259,6 +259,11 @@ test("save with server error outputs warning", async () => {
     const cachePath = path.resolve(inputPath);
     testUtils.setInput(Inputs.Path, inputPath);
 
+    const cacheId = 4;
+    const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => {
+        return Promise.resolve(cacheId);
+    });
+
     const execMock = jest.spyOn(exec, "exec");
 
     const saveCacheMock = jest
@@ -269,26 +274,29 @@ test("save with server error outputs warning", async () => {
 
     await run();
 
+    expect(reserveCacheMock).toHaveBeenCalledTimes(1);
+    expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
+
     const archivePath = path.join("/foo/bar", "cache.tgz");
 
     const IS_WINDOWS = process.platform === "win32";
     const args = IS_WINDOWS
         ? [
-              "-cz",
-              "--force-local",
-              "-f",
-              archivePath.replace(/\\/g, "/"),
-              "-C",
-              cachePath.replace(/\\/g, "/"),
-              "."
-          ]
+            "-cz",
+            "--force-local",
+            "-f",
+            archivePath.replace(/\\/g, "/"),
+            "-C",
+            cachePath.replace(/\\/g, "/"),
+            "."
+        ]
         : ["-cz", "-f", archivePath, "-C", cachePath, "."];
 
     expect(execMock).toHaveBeenCalledTimes(1);
     expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
 
     expect(saveCacheMock).toHaveBeenCalledTimes(1);
-    expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath);
+    expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
 
     expect(logWarningMock).toHaveBeenCalledTimes(1);
     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
@@ -321,32 +329,40 @@ test("save with valid inputs uploads a cache", async () => {
     const cachePath = path.resolve(inputPath);
     testUtils.setInput(Inputs.Path, inputPath);
 
+    const cacheId = 4;
+    const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => {
+        return Promise.resolve(cacheId);
+    });
+
     const execMock = jest.spyOn(exec, "exec");
 
     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
 
     await run();
 
+    expect(reserveCacheMock).toHaveBeenCalledTimes(1);
+    expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
+
     const archivePath = path.join("/foo/bar", "cache.tgz");
 
     const IS_WINDOWS = process.platform === "win32";
     const args = IS_WINDOWS
         ? [
-              "-cz",
-              "--force-local",
-              "-f",
-              archivePath.replace(/\\/g, "/"),
-              "-C",
-              cachePath.replace(/\\/g, "/"),
-              "."
-          ]
+            "-cz",
+            "--force-local",
+            "-f",
+            archivePath.replace(/\\/g, "/"),
+            "-C",
+            cachePath.replace(/\\/g, "/"),
+            "."
+        ]
         : ["-cz", "-f", archivePath, "-C", cachePath, "."];
 
     expect(execMock).toHaveBeenCalledTimes(1);
     expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
 
     expect(saveCacheMock).toHaveBeenCalledTimes(1);
-    expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath);
+    expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
 
     expect(failedMock).toHaveBeenCalledTimes(0);
 });
diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts
index 24a22dc..9501762 100644
--- a/src/cacheHttpClient.ts
+++ b/src/cacheHttpClient.ts
@@ -107,9 +107,7 @@ export async function downloadCache(
 }
 
 // Reserve Cache
-export async function reserveCache(
-    key: string
-): Promise<number> {
+export async function reserveCache(key: string): Promise<number> {
     const restClient = createRestClient();
 
     const reserveCacheRequest: ReserveCacheRequest = {
@@ -133,14 +131,6 @@ function getContentRange(start: number, end: number): string {
     return `bytes ${start}-${end}/*`;
 }
 
-// function bufferToStream(buffer: Buffer): NodeJS.ReadableStream {
-//     const stream = new Duplex();
-//     stream.push(buffer);
-//     stream.push(null);
-
-//     return stream;
-// }
-
 async function uploadChunk(
     restClient: RestClient,
     resourceUrl: string,
@@ -148,14 +138,87 @@ async function uploadChunk(
     start: number,
     end: number
 ): Promise<IRestResponse<void>> {
-    core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
+    core.debug(
+        `Uploading chunk of size ${end -
+            start +
+            1} bytes at offset ${start} with content range: ${getContentRange(
+            start,
+            end
+        )}`
+    );
     const requestOptions = getRequestOptions();
     requestOptions.additionalHeaders = {
         "Content-Type": "application/octet-stream",
         "Content-Range": getContentRange(start, end)
     };
 
-    return await restClient.uploadStream<void>("PATCH", resourceUrl, data, requestOptions);
+    return await restClient.uploadStream<void>(
+        "PATCH",
+        resourceUrl,
+        data,
+        requestOptions
+    );
+}
+
+async function uploadFile(
+    restClient: RestClient,
+    cacheId: number,
+    archivePath: string
+): Promise<void> {
+    // Upload Chunks
+    const fileSize = fs.statSync(archivePath).size;
+    const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
+    const responses: IRestResponse<void>[] = [];
+    const fd = fs.openSync(archivePath, "r");
+
+    const concurrency = 4; // # of HTTP requests in parallel
+    const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
+    core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
+
+    const parallelUploads = [...new Array(concurrency).keys()];
+    core.debug("Awaiting all uploads");
+    let offset = 0;
+    await Promise.all(
+        parallelUploads.map(async () => {
+            while (offset < fileSize) {
+                const chunkSize =
+                    offset + MAX_CHUNK_SIZE > fileSize
+                        ? fileSize - offset
+                        : MAX_CHUNK_SIZE;
+                const start = offset;
+                const end = offset + chunkSize - 1;
+                offset += MAX_CHUNK_SIZE;
+                const chunk = fs.createReadStream(archivePath, {
+                    fd,
+                    start,
+                    end,
+                    autoClose: false
+                });
+                responses.push(
+                    await uploadChunk(
+                        restClient,
+                        resourceUrl,
+                        chunk,
+                        start,
+                        end
+                    )
+                );
+            }
+        })
+    );
+
+    fs.closeSync(fd);
+
+    const failedResponse = responses.find(
+        x => !isSuccessStatusCode(x.statusCode)
+    );
+    if (failedResponse) {
+        throw new Error(
+            `Cache service responded with ${failedResponse.statusCode} during chunk upload.`
+        );
+    }
+
+    return;
 }
 
 async function commitCache(
@@ -172,44 +235,6 @@ async function commitCache(
     );
 }
 
-async function uploadFile(restClient: RestClient, cacheId: number, archivePath: string): Promise<void> {
-    // Upload Chunks
-    const fileSize = fs.statSync(archivePath).size;
-    const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
-    const responses: IRestResponse<void>[] = [];
-    const fd = fs.openSync(archivePath, "r");
-
-    const concurrency = 16; // # of HTTP requests in parallel
-    const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
-    core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
-    const parallelUploads = [...new Array(concurrency).keys()];
-    core.debug("Awaiting all uploads");
-    let offset = 0;
-    await Promise.all(parallelUploads.map(async () => {
-        while (offset < fileSize) {
-            const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
-            const start = offset;
-            const end = offset + chunkSize - 1;
-            offset += MAX_CHUNK_SIZE;
-            const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
-            responses.push(await uploadChunk(restClient, resourceUrl, chunk, start, end));
-        }
-    }));
-
-    fs.closeSync(fd);
-
-    const failedResponse = responses.find(
-        x => !isSuccessStatusCode(x.statusCode)
-    );
-    if (failedResponse) {
-        throw new Error(
-            `Cache service responded with ${failedResponse.statusCode} during chunk upload.`
-        );
-    }
-
-    return;
-}
-
 export async function saveCache(
     cacheId: number,
     archivePath: string
@@ -219,8 +244,8 @@ export async function saveCache(
     core.debug("Upload cache");
     await uploadFile(restClient, cacheId, archivePath);
 
-    core.debug("Commiting cache");
     // Commit Cache
+    core.debug("Commiting cache");
     const cacheSize = utils.getArchiveFileSize(archivePath);
     const commitCacheResponse = await commitCache(
         restClient,
diff --git a/src/restore.ts b/src/restore.ts
index 562d455..09a3d2f 100644
--- a/src/restore.ts
+++ b/src/restore.ts
@@ -82,7 +82,6 @@ async function run(): Promise<void> {
                 cacheEntry?.archiveLocation,
                 archivePath
             );
-            await exec(`md5sum`, [archivePath]);
 
             const archiveFileSize = utils.getArchiveFileSize(archivePath);
             core.info(
diff --git a/src/save.ts b/src/save.ts
index 78f5733..418d698 100644
--- a/src/save.ts
+++ b/src/save.ts
@@ -38,7 +38,9 @@ async function run(): Promise<void> {
         core.debug("Reserving Cache");
         const cacheId = await cacheHttpClient.reserveCache(primaryKey);
         if (cacheId < 0) {
-            core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
+            core.info(
+                `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
+            );
             return;
         }
         core.debug(`Cache ID: ${cacheId}`);
@@ -84,7 +86,6 @@ async function run(): Promise<void> {
             return;
         }
 
-        await exec(`md5sum`, [archivePath]);
         core.debug("Saving Cache");
         await cacheHttpClient.saveCache(cacheId, archivePath);
     } catch (error) {