summaryrefslogtreecommitdiff
path: root/packages/BackupEncryption
diff options
context:
space:
mode:
Diffstat (limited to 'packages/BackupEncryption')
-rw-r--r--packages/BackupEncryption/src/com/android/server/backup/encryption/chunking/BackupFileBuilder.java232
-rw-r--r--packages/BackupEncryption/src/com/android/server/backup/encryption/client/CryptoBackupServer.java67
-rw-r--r--packages/BackupEncryption/src/com/android/server/backup/encryption/tasks/EncryptedBackupTask.java243
-rw-r--r--packages/BackupEncryption/src/com/android/server/backup/encryption/tasks/KvBackupEncrypter.java179
-rw-r--r--packages/BackupEncryption/test/robolectric/Android.bp2
-rw-r--r--packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/chunking/BackupFileBuilderTest.java614
-rw-r--r--packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/tasks/EncryptedBackupTaskTest.java397
-rw-r--r--packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/tasks/KvBackupEncrypterTest.java287
-rw-r--r--packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/testing/DiffScriptProcessor.java256
-rw-r--r--packages/BackupEncryption/test/robolectric/src/com/android/server/testing/CryptoTestUtils.java72
-rw-r--r--packages/BackupEncryption/test/robolectric/src/com/android/server/testing/shadows/DataEntity.java100
-rw-r--r--packages/BackupEncryption/test/robolectric/src/com/android/server/testing/shadows/ShadowBackupDataInput.java106
12 files changed, 2554 insertions, 1 deletions
diff --git a/packages/BackupEncryption/src/com/android/server/backup/encryption/chunking/BackupFileBuilder.java b/packages/BackupEncryption/src/com/android/server/backup/encryption/chunking/BackupFileBuilder.java
new file mode 100644
index 000000000000..3d3fb552bb58
--- /dev/null
+++ b/packages/BackupEncryption/src/com/android/server/backup/encryption/chunking/BackupFileBuilder.java
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.backup.encryption.chunking;
+
+import static com.android.internal.util.Preconditions.checkArgument;
+import static com.android.internal.util.Preconditions.checkNotNull;
+import static com.android.internal.util.Preconditions.checkState;
+
+import android.annotation.Nullable;
+import android.util.Slog;
+
+import com.android.server.backup.encryption.chunk.ChunkHash;
+import com.android.server.backup.encryption.chunk.ChunkListingMap;
+import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Writes batches of {@link EncryptedChunk} to a diff script, and generates the associated {@link
+ * ChunksMetadataProto.ChunkListing} and {@link ChunksMetadataProto.ChunkOrdering}.
+ */
+public class BackupFileBuilder {
+ private static final String TAG = "BackupFileBuilder";
+
+ private static final int BYTES_PER_KILOBYTE = 1024;
+
+ private final BackupWriter mBackupWriter;
+ private final EncryptedChunkEncoder mEncryptedChunkEncoder;
+ private final ChunkListingMap mOldChunkListing;
+ private final ChunksMetadataProto.ChunkListing mNewChunkListing;
+ private final ChunksMetadataProto.ChunkOrdering mChunkOrdering;
+ private final List<ChunksMetadataProto.Chunk> mKnownChunks = new ArrayList<>();
+ private final List<Integer> mKnownStarts = new ArrayList<>();
+ private final Map<ChunkHash, Long> mChunkStartPositions;
+
+ private long mNewChunksSizeBytes;
+ private boolean mFinished;
+
+ /**
+ * Constructs a new instance which writes raw data to the given {@link OutputStream}, without
+ * generating a diff.
+ *
+ * <p>This class never closes the output stream.
+ */
+ public static BackupFileBuilder createForNonIncremental(OutputStream outputStream) {
+ return new BackupFileBuilder(
+ new RawBackupWriter(outputStream), new ChunksMetadataProto.ChunkListing());
+ }
+
+ /**
+ * Constructs a new instance which writes a diff script to the given {@link OutputStream} using
+ * a {@link SingleStreamDiffScriptWriter}.
+ *
+ * <p>This class never closes the output stream.
+ *
+ * @param oldChunkListing against which the diff will be generated.
+ */
+ public static BackupFileBuilder createForIncremental(
+ OutputStream outputStream, ChunksMetadataProto.ChunkListing oldChunkListing) {
+ return new BackupFileBuilder(
+ DiffScriptBackupWriter.newInstance(outputStream), oldChunkListing);
+ }
+
+ private BackupFileBuilder(
+ BackupWriter backupWriter, ChunksMetadataProto.ChunkListing oldChunkListing) {
+ this.mBackupWriter = backupWriter;
+ // TODO(b/77188289): Use InlineLengthsEncryptedChunkEncoder for key-value backups
+ this.mEncryptedChunkEncoder = new LengthlessEncryptedChunkEncoder();
+ this.mOldChunkListing = ChunkListingMap.fromProto(oldChunkListing);
+
+ mNewChunkListing = new ChunksMetadataProto.ChunkListing();
+ mNewChunkListing.cipherType = ChunksMetadataProto.AES_256_GCM;
+ mNewChunkListing.chunkOrderingType = ChunksMetadataProto.CHUNK_ORDERING_TYPE_UNSPECIFIED;
+
+ mChunkOrdering = new ChunksMetadataProto.ChunkOrdering();
+ mChunkStartPositions = new HashMap<>();
+ }
+
+ /**
+ * Writes the given chunks to the output stream, and adds them to the new chunk listing and
+ * chunk ordering.
+ *
+ * <p>Sorts the chunks in lexicographical order before writing.
+ *
+ * @param allChunks The hashes of all the chunks, in the order they appear in the plaintext.
+ * @param newChunks A map from hash to {@link EncryptedChunk} containing the new chunks not
+ * present in the previous backup.
+ */
+ public void writeChunks(List<ChunkHash> allChunks, Map<ChunkHash, EncryptedChunk> newChunks)
+ throws IOException {
+ checkState(!mFinished, "Cannot write chunks after flushing.");
+
+ List<ChunkHash> sortedChunks = new ArrayList<>(allChunks);
+ Collections.sort(sortedChunks);
+ for (ChunkHash chunkHash : sortedChunks) {
+ // As we have already included this chunk in the backup file, don't add it again to
+ // deduplicate identical chunks.
+ if (!mChunkStartPositions.containsKey(chunkHash)) {
+ // getBytesWritten() gives us the start of the chunk.
+ mChunkStartPositions.put(chunkHash, mBackupWriter.getBytesWritten());
+
+ writeChunkToFileAndListing(chunkHash, newChunks);
+ }
+ }
+
+ long totalSizeKb = mBackupWriter.getBytesWritten() / BYTES_PER_KILOBYTE;
+ long newChunksSizeKb = mNewChunksSizeBytes / BYTES_PER_KILOBYTE;
+ Slog.d(
+ TAG,
+ "Total backup size: "
+ + totalSizeKb
+ + " kb, new chunks size: "
+ + newChunksSizeKb
+ + " kb");
+
+ for (ChunkHash chunkHash : allChunks) {
+ mKnownStarts.add(mChunkStartPositions.get(chunkHash).intValue());
+ }
+ }
+
+ /**
+ * Returns a new listing for all of the chunks written so far, setting the given fingerprint
+ * mixer salt (this overrides the {@link ChunksMetadataProto.ChunkListing#fingerprintMixerSalt}
+ * in the old {@link ChunksMetadataProto.ChunkListing} passed into the
+ * {@link #BackupFileBuilder).
+ */
+ public ChunksMetadataProto.ChunkListing getNewChunkListing(
+ @Nullable byte[] fingerprintMixerSalt) {
+ // TODO: b/141537803 Add check to ensure this is called only once per instance
+ mNewChunkListing.fingerprintMixerSalt =
+ fingerprintMixerSalt != null
+ ? Arrays.copyOf(fingerprintMixerSalt, fingerprintMixerSalt.length)
+ : new byte[0];
+ mNewChunkListing.chunks = mKnownChunks.toArray(new ChunksMetadataProto.Chunk[0]);
+ return mNewChunkListing;
+ }
+
+ /** Returns a new ordering for all of the chunks written so far, setting the given checksum. */
+ public ChunksMetadataProto.ChunkOrdering getNewChunkOrdering(byte[] checksum) {
+ // TODO: b/141537803 Add check to ensure this is called only once per instance
+ mChunkOrdering.starts = new int[mKnownStarts.size()];
+ for (int i = 0; i < mKnownStarts.size(); i++) {
+ mChunkOrdering.starts[i] = mKnownStarts.get(i).intValue();
+ }
+ mChunkOrdering.checksum = Arrays.copyOf(checksum, checksum.length);
+ return mChunkOrdering;
+ }
+
+ /**
+ * Finishes the backup file by writing the chunk metadata and metadata position.
+ *
+ * <p>Once this is called, calling {@link #writeChunks(List, Map)} will throw {@link
+ * IllegalStateException}.
+ */
+ public void finish(ChunksMetadataProto.ChunksMetadata metadata) throws IOException {
+ checkNotNull(metadata, "Metadata cannot be null");
+
+ long startOfMetadata = mBackupWriter.getBytesWritten();
+ mBackupWriter.writeBytes(ChunksMetadataProto.ChunksMetadata.toByteArray(metadata));
+ mBackupWriter.writeBytes(toByteArray(startOfMetadata));
+
+ mBackupWriter.flush();
+ mFinished = true;
+ }
+
+ /**
+ * Checks if the given chunk hash references an existing chunk or a new chunk, and adds this
+ * chunk to the backup file and new chunk listing.
+ */
+ private void writeChunkToFileAndListing(
+ ChunkHash chunkHash, Map<ChunkHash, EncryptedChunk> newChunks) throws IOException {
+ checkNotNull(chunkHash, "Hash cannot be null");
+
+ if (mOldChunkListing.hasChunk(chunkHash)) {
+ ChunkListingMap.Entry oldChunk = mOldChunkListing.getChunkEntry(chunkHash);
+ mBackupWriter.writeChunk(oldChunk.getStart(), oldChunk.getLength());
+
+ checkArgument(oldChunk.getLength() >= 0, "Chunk must have zero or positive length");
+ addChunk(chunkHash.getHash(), oldChunk.getLength());
+ } else if (newChunks.containsKey(chunkHash)) {
+ EncryptedChunk newChunk = newChunks.get(chunkHash);
+ mEncryptedChunkEncoder.writeChunkToWriter(mBackupWriter, newChunk);
+ int length = mEncryptedChunkEncoder.getEncodedLengthOfChunk(newChunk);
+ mNewChunksSizeBytes += length;
+
+ checkArgument(length >= 0, "Chunk must have zero or positive length");
+ addChunk(chunkHash.getHash(), length);
+ } else {
+ throw new IllegalArgumentException(
+ "Chunk did not exist in old chunks or new chunks: " + chunkHash);
+ }
+ }
+
+ private void addChunk(byte[] chunkHash, int length) {
+ ChunksMetadataProto.Chunk chunk = new ChunksMetadataProto.Chunk();
+ chunk.hash = Arrays.copyOf(chunkHash, chunkHash.length);
+ chunk.length = length;
+ mKnownChunks.add(chunk);
+ }
+
+ private static byte[] toByteArray(long value) {
+ // Note that this code needs to stay compatible with GWT, which has known
+ // bugs when narrowing byte casts of long values occur.
+ byte[] result = new byte[8];
+ for (int i = 7; i >= 0; i--) {
+ result[i] = (byte) (value & 0xffL);
+ value >>= 8;
+ }
+ return result;
+ }
+}
diff --git a/packages/BackupEncryption/src/com/android/server/backup/encryption/client/CryptoBackupServer.java b/packages/BackupEncryption/src/com/android/server/backup/encryption/client/CryptoBackupServer.java
new file mode 100644
index 000000000000..d7f7dc7d0472
--- /dev/null
+++ b/packages/BackupEncryption/src/com/android/server/backup/encryption/client/CryptoBackupServer.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.backup.encryption.client;
+
+import com.android.server.backup.encryption.protos.nano.WrappedKeyProto;
+
+import java.util.Map;
+
+/**
+ * Contains methods for communicating with the parts of the backup server relevant to encryption.
+ */
+public interface CryptoBackupServer {
+ /**
+ * Uploads an incremental backup to the server.
+ *
+ * <p>Handles setting up and tearing down the connection.
+ *
+ * @param packageName the package to associate the data with
+ * @param oldDocId the id of the previous backup doc in Drive
+ * @param diffScript containing the actual backup data
+ * @param tertiaryKey the wrapped key used to encrypt this backup
+ * @return the id of the new backup doc in Drive.
+ */
+ String uploadIncrementalBackup(
+ String packageName,
+ String oldDocId,
+ byte[] diffScript,
+ WrappedKeyProto.WrappedKey tertiaryKey);
+
+ /**
+ * Uploads non-incremental backup to the server.
+ *
+ * <p>Handles setting up and tearing down the connection.
+ *
+ * @param packageName the package to associate the data with
+ * @param data the actual backup data
+ * @param tertiaryKey the wrapped key used to encrypt this backup
+ * @return the id of the new backup doc in Drive.
+ */
+ String uploadNonIncrementalBackup(
+ String packageName, byte[] data, WrappedKeyProto.WrappedKey tertiaryKey);
+
+ /**
+ * Sets the alias of the active secondary key. This is the alias used to refer to the key in the
+ * {@link java.security.KeyStore}. It is also used to key storage for tertiary keys on the
+ * backup server. Also has to upload all existing tertiary keys, wrapped with the new key.
+ *
+ * @param keyAlias The ID of the secondary key.
+ * @param tertiaryKeys The tertiary keys, wrapped with the new secondary key.
+ */
+ void setActiveSecondaryKeyAlias(
+ String keyAlias, Map<String, WrappedKeyProto.WrappedKey> tertiaryKeys);
+}
diff --git a/packages/BackupEncryption/src/com/android/server/backup/encryption/tasks/EncryptedBackupTask.java b/packages/BackupEncryption/src/com/android/server/backup/encryption/tasks/EncryptedBackupTask.java
new file mode 100644
index 000000000000..ef13f23e799d
--- /dev/null
+++ b/packages/BackupEncryption/src/com/android/server/backup/encryption/tasks/EncryptedBackupTask.java
@@ -0,0 +1,243 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.backup.encryption.tasks;
+
+import android.annotation.Nullable;
+import android.annotation.TargetApi;
+import android.os.Build.VERSION_CODES;
+import android.util.Slog;
+
+import com.android.server.backup.encryption.chunk.ChunkHash;
+import com.android.server.backup.encryption.chunking.BackupFileBuilder;
+import com.android.server.backup.encryption.chunking.EncryptedChunk;
+import com.android.server.backup.encryption.client.CryptoBackupServer;
+import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto;
+import com.android.server.backup.encryption.protos.nano.WrappedKeyProto;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.security.GeneralSecurityException;
+import java.security.InvalidAlgorithmParameterException;
+import java.security.InvalidKeyException;
+import java.security.NoSuchAlgorithmException;
+import java.security.SecureRandom;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CancellationException;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import javax.crypto.BadPaddingException;
+import javax.crypto.Cipher;
+import javax.crypto.IllegalBlockSizeException;
+import javax.crypto.NoSuchPaddingException;
+import javax.crypto.SecretKey;
+import javax.crypto.ShortBufferException;
+import javax.crypto.spec.GCMParameterSpec;
+
+/**
+ * Task which reads encrypted chunks from a {@link BackupEncrypter}, builds a backup file and
+ * uploads it to the server.
+ */
+@TargetApi(VERSION_CODES.P)
+public class EncryptedBackupTask {
+ private static final String CIPHER_ALGORITHM = "AES/GCM/NoPadding";
+ private static final int GCM_NONCE_LENGTH_BYTES = 12;
+ private static final int GCM_TAG_LENGTH_BYTES = 16;
+ private static final int BITS_PER_BYTE = 8;
+
+ private static final String TAG = "EncryptedBackupTask";
+
+ private final CryptoBackupServer mCryptoBackupServer;
+ private final SecureRandom mSecureRandom;
+ private final String mPackageName;
+ private final ByteArrayOutputStream mBackupDataOutput;
+ private final BackupEncrypter mBackupEncrypter;
+ private final AtomicBoolean mCancelled;
+
+ /** Creates a new instance which reads data from the given input stream. */
+ public EncryptedBackupTask(
+ CryptoBackupServer cryptoBackupServer,
+ SecureRandom secureRandom,
+ String packageName,
+ BackupEncrypter backupEncrypter) {
+ mCryptoBackupServer = cryptoBackupServer;
+ mSecureRandom = secureRandom;
+ mPackageName = packageName;
+ mBackupEncrypter = backupEncrypter;
+
+ mBackupDataOutput = new ByteArrayOutputStream();
+ mCancelled = new AtomicBoolean(false);
+ }
+
+ /**
+ * Creates a non-incremental backup file and uploads it to the server.
+ *
+ * @param fingerprintMixerSalt Fingerprint mixer salt used for content-defined chunking during a
+ * full backup. May be {@code null} for a key-value backup.
+ */
+ public ChunksMetadataProto.ChunkListing performNonIncrementalBackup(
+ SecretKey tertiaryKey,
+ WrappedKeyProto.WrappedKey wrappedTertiaryKey,
+ @Nullable byte[] fingerprintMixerSalt)
+ throws IOException, GeneralSecurityException {
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ performBackup(
+ tertiaryKey,
+ fingerprintMixerSalt,
+ BackupFileBuilder.createForNonIncremental(mBackupDataOutput),
+ new HashSet<>());
+
+ throwIfCancelled();
+
+ newChunkListing.documentId =
+ mCryptoBackupServer.uploadNonIncrementalBackup(
+ mPackageName, mBackupDataOutput.toByteArray(), wrappedTertiaryKey);
+
+ return newChunkListing;
+ }
+
+ /** Creates an incremental backup file and uploads it to the server. */
+ public ChunksMetadataProto.ChunkListing performIncrementalBackup(
+ SecretKey tertiaryKey,
+ WrappedKeyProto.WrappedKey wrappedTertiaryKey,
+ ChunksMetadataProto.ChunkListing oldChunkListing)
+ throws IOException, GeneralSecurityException {
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ performBackup(
+ tertiaryKey,
+ oldChunkListing.fingerprintMixerSalt,
+ BackupFileBuilder.createForIncremental(mBackupDataOutput, oldChunkListing),
+ getChunkHashes(oldChunkListing));
+
+ throwIfCancelled();
+
+ String oldDocumentId = oldChunkListing.documentId;
+ Slog.v(TAG, "Old doc id: " + oldDocumentId);
+
+ newChunkListing.documentId =
+ mCryptoBackupServer.uploadIncrementalBackup(
+ mPackageName,
+ oldDocumentId,
+ mBackupDataOutput.toByteArray(),
+ wrappedTertiaryKey);
+ return newChunkListing;
+ }
+
+ /**
+ * Signals to the task that the backup has been cancelled. If the upload has not yet started
+ * then the task will not upload any data to the server or save the new chunk listing.
+ */
+ public void cancel() {
+ mCancelled.getAndSet(true);
+ }
+
+ private void throwIfCancelled() {
+ if (mCancelled.get()) {
+ throw new CancellationException("EncryptedBackupTask was cancelled");
+ }
+ }
+
+ private ChunksMetadataProto.ChunkListing performBackup(
+ SecretKey tertiaryKey,
+ @Nullable byte[] fingerprintMixerSalt,
+ BackupFileBuilder backupFileBuilder,
+ Set<ChunkHash> existingChunkHashes)
+ throws IOException, GeneralSecurityException {
+ BackupEncrypter.Result result =
+ mBackupEncrypter.backup(tertiaryKey, fingerprintMixerSalt, existingChunkHashes);
+ backupFileBuilder.writeChunks(result.getAllChunks(), buildChunkMap(result.getNewChunks()));
+
+ ChunksMetadataProto.ChunkOrdering chunkOrdering =
+ backupFileBuilder.getNewChunkOrdering(result.getDigest());
+ backupFileBuilder.finish(buildMetadata(tertiaryKey, chunkOrdering));
+
+ return backupFileBuilder.getNewChunkListing(fingerprintMixerSalt);
+ }
+
+ /** Returns a set containing the hashes of every chunk in the given listing. */
+ private static Set<ChunkHash> getChunkHashes(ChunksMetadataProto.ChunkListing chunkListing) {
+ Set<ChunkHash> hashes = new HashSet<>();
+ for (ChunksMetadataProto.Chunk chunk : chunkListing.chunks) {
+ hashes.add(new ChunkHash(chunk.hash));
+ }
+ return hashes;
+ }
+
+ /** Returns a map from chunk hash to chunk containing every chunk in the given list. */
+ private static Map<ChunkHash, EncryptedChunk> buildChunkMap(List<EncryptedChunk> chunks) {
+ Map<ChunkHash, EncryptedChunk> chunkMap = new HashMap<>();
+ for (EncryptedChunk chunk : chunks) {
+ chunkMap.put(chunk.key(), chunk);
+ }
+ return chunkMap;
+ }
+
+ private ChunksMetadataProto.ChunksMetadata buildMetadata(
+ SecretKey tertiaryKey, ChunksMetadataProto.ChunkOrdering chunkOrdering)
+ throws InvalidKeyException, IllegalBlockSizeException, BadPaddingException,
+ InvalidAlgorithmParameterException, NoSuchAlgorithmException,
+ ShortBufferException, NoSuchPaddingException {
+ ChunksMetadataProto.ChunksMetadata metaData = new ChunksMetadataProto.ChunksMetadata();
+ metaData.cipherType = ChunksMetadataProto.AES_256_GCM;
+ metaData.checksumType = ChunksMetadataProto.SHA_256;
+ metaData.chunkOrdering = encryptChunkOrdering(tertiaryKey, chunkOrdering);
+ return metaData;
+ }
+
+ private byte[] encryptChunkOrdering(
+ SecretKey tertiaryKey, ChunksMetadataProto.ChunkOrdering chunkOrdering)
+ throws InvalidKeyException, IllegalBlockSizeException, BadPaddingException,
+ NoSuchPaddingException, NoSuchAlgorithmException,
+ InvalidAlgorithmParameterException, ShortBufferException {
+ Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM);
+
+ byte[] nonce = generateNonce();
+
+ cipher.init(
+ Cipher.ENCRYPT_MODE,
+ tertiaryKey,
+ new GCMParameterSpec(GCM_TAG_LENGTH_BYTES * BITS_PER_BYTE, nonce));
+
+ byte[] orderingBytes = ChunksMetadataProto.ChunkOrdering.toByteArray(chunkOrdering);
+ // We prepend the nonce to the ordering.
+ byte[] output =
+ Arrays.copyOf(
+ nonce,
+ GCM_NONCE_LENGTH_BYTES + orderingBytes.length + GCM_TAG_LENGTH_BYTES);
+
+ cipher.doFinal(
+ orderingBytes,
+ /*inputOffset=*/ 0,
+ /*inputLen=*/ orderingBytes.length,
+ output,
+ /*outputOffset=*/ GCM_NONCE_LENGTH_BYTES);
+
+ return output;
+ }
+
+ private byte[] generateNonce() {
+ byte[] nonce = new byte[GCM_NONCE_LENGTH_BYTES];
+ mSecureRandom.nextBytes(nonce);
+ return nonce;
+ }
+}
diff --git a/packages/BackupEncryption/src/com/android/server/backup/encryption/tasks/KvBackupEncrypter.java b/packages/BackupEncryption/src/com/android/server/backup/encryption/tasks/KvBackupEncrypter.java
new file mode 100644
index 000000000000..d20cd4c07f88
--- /dev/null
+++ b/packages/BackupEncryption/src/com/android/server/backup/encryption/tasks/KvBackupEncrypter.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.backup.encryption.tasks;
+
+import static com.android.internal.util.Preconditions.checkState;
+
+import android.annotation.Nullable;
+import android.app.backup.BackupDataInput;
+
+import com.android.server.backup.encryption.chunk.ChunkHash;
+import com.android.server.backup.encryption.chunking.ChunkEncryptor;
+import com.android.server.backup.encryption.chunking.ChunkHasher;
+import com.android.server.backup.encryption.chunking.EncryptedChunk;
+import com.android.server.backup.encryption.kv.KeyValueListingBuilder;
+import com.android.server.backup.encryption.protos.nano.KeyValueListingProto;
+import com.android.server.backup.encryption.protos.nano.KeyValuePairProto;
+
+import java.io.IOException;
+import java.security.GeneralSecurityException;
+import java.security.InvalidKeyException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.security.SecureRandom;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+
+import javax.crypto.IllegalBlockSizeException;
+import javax.crypto.SecretKey;
+
+/**
+ * Reads key value backup data from an input, converts each pair into a chunk and encrypts the
+ * chunks.
+ *
+ * <p>The caller should pass in the key value listing from the previous backup, if there is one.
+ * This class emits chunks for both existing and new pairs, using the provided listing to
+ * determine the hashes of pairs that already exist. During the backup it computes the new listing,
+ * which the caller should store on disk and pass in at the start of the next backup.
+ *
+ * <p>Also computes the message digest, which is {@code SHA-256(chunk hashes sorted
+ * lexicographically)}.
+ */
+public class KvBackupEncrypter implements BackupEncrypter {
+ private final BackupDataInput mBackupDataInput;
+
+ private KeyValueListingProto.KeyValueListing mOldKeyValueListing;
+ @Nullable private KeyValueListingBuilder mNewKeyValueListing;
+
+ /**
+ * Constructs a new instance which reads data from the given input.
+ *
+ * <p>By default this performs non-incremental backup, call {@link #setOldKeyValueListing} to
+ * perform incremental backup.
+ */
+ public KvBackupEncrypter(BackupDataInput backupDataInput) {
+ mBackupDataInput = backupDataInput;
+ mOldKeyValueListing = KeyValueListingBuilder.emptyListing();
+ }
+
+ /** Sets the old listing to perform incremental backup against. */
+ public void setOldKeyValueListing(KeyValueListingProto.KeyValueListing oldKeyValueListing) {
+ mOldKeyValueListing = oldKeyValueListing;
+ }
+
+ @Override
+ public Result backup(
+ SecretKey secretKey,
+ @Nullable byte[] unusedFingerprintMixerSalt,
+ Set<ChunkHash> unusedExistingChunks)
+ throws IOException, GeneralSecurityException {
+ ChunkHasher chunkHasher = new ChunkHasher(secretKey);
+ ChunkEncryptor chunkEncryptor = new ChunkEncryptor(secretKey, new SecureRandom());
+ mNewKeyValueListing = new KeyValueListingBuilder();
+ List<ChunkHash> allChunks = new ArrayList<>();
+ List<EncryptedChunk> newChunks = new ArrayList<>();
+
+ Map<String, ChunkHash> existingChunksToReuse = buildPairMap(mOldKeyValueListing);
+
+ while (mBackupDataInput.readNextHeader()) {
+ String key = mBackupDataInput.getKey();
+ Optional<byte[]> value = readEntireValue(mBackupDataInput);
+
+ // As this pair exists in the new backup, we don't need to add it from the previous
+ // backup.
+ existingChunksToReuse.remove(key);
+
+ // If the value is not present then this key has been deleted.
+ if (value.isPresent()) {
+ EncryptedChunk newChunk =
+ createEncryptedChunk(chunkHasher, chunkEncryptor, key, value.get());
+ allChunks.add(newChunk.key());
+ newChunks.add(newChunk);
+ mNewKeyValueListing.addPair(key, newChunk.key());
+ }
+ }
+
+ allChunks.addAll(existingChunksToReuse.values());
+
+ mNewKeyValueListing.addAll(existingChunksToReuse);
+
+ return new Result(allChunks, newChunks, createMessageDigest(allChunks));
+ }
+
+ /**
+ * Returns a listing containing the pairs in the new backup.
+ *
+ * <p>You must call {@link #backup} first.
+ */
+ public KeyValueListingProto.KeyValueListing getNewKeyValueListing() {
+ checkState(mNewKeyValueListing != null, "Must call backup() first");
+ return mNewKeyValueListing.build();
+ }
+
+ private static Map<String, ChunkHash> buildPairMap(
+ KeyValueListingProto.KeyValueListing listing) {
+ Map<String, ChunkHash> map = new HashMap<>();
+ for (KeyValueListingProto.KeyValueEntry entry : listing.entries) {
+ map.put(entry.key, new ChunkHash(entry.hash));
+ }
+ return map;
+ }
+
+ private EncryptedChunk createEncryptedChunk(
+ ChunkHasher chunkHasher, ChunkEncryptor chunkEncryptor, String key, byte[] value)
+ throws InvalidKeyException, IllegalBlockSizeException {
+ KeyValuePairProto.KeyValuePair pair = new KeyValuePairProto.KeyValuePair();
+ pair.key = key;
+ pair.value = Arrays.copyOf(value, value.length);
+
+ byte[] plaintext = KeyValuePairProto.KeyValuePair.toByteArray(pair);
+ return chunkEncryptor.encrypt(chunkHasher.computeHash(plaintext), plaintext);
+ }
+
+ private static byte[] createMessageDigest(List<ChunkHash> allChunks)
+ throws NoSuchAlgorithmException {
+ MessageDigest messageDigest =
+ MessageDigest.getInstance(BackupEncrypter.MESSAGE_DIGEST_ALGORITHM);
+ // TODO:b/141531271 Extract sorted chunks code to utility class
+ List<ChunkHash> sortedChunks = new ArrayList<>(allChunks);
+ Collections.sort(sortedChunks);
+ for (ChunkHash hash : sortedChunks) {
+ messageDigest.update(hash.getHash());
+ }
+ return messageDigest.digest();
+ }
+
+ private static Optional<byte[]> readEntireValue(BackupDataInput input) throws IOException {
+ // A negative data size indicates that this key should be deleted.
+ if (input.getDataSize() < 0) {
+ return Optional.empty();
+ }
+
+ byte[] value = new byte[input.getDataSize()];
+ int bytesRead = 0;
+ while (bytesRead < value.length) {
+ bytesRead += input.readEntityData(value, bytesRead, value.length - bytesRead);
+ }
+ return Optional.of(value);
+ }
+}
diff --git a/packages/BackupEncryption/test/robolectric/Android.bp b/packages/BackupEncryption/test/robolectric/Android.bp
index 4e42ce7366f0..2a36dcf0baba 100644
--- a/packages/BackupEncryption/test/robolectric/Android.bp
+++ b/packages/BackupEncryption/test/robolectric/Android.bp
@@ -16,7 +16,7 @@ android_robolectric_test {
name: "BackupEncryptionRoboTests",
srcs: [
"src/**/*.java",
- ":FrameworksServicesRoboShadows",
+// ":FrameworksServicesRoboShadows",
],
java_resource_dirs: ["config"],
libs: [
diff --git a/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/chunking/BackupFileBuilderTest.java b/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/chunking/BackupFileBuilderTest.java
new file mode 100644
index 000000000000..590938efe148
--- /dev/null
+++ b/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/chunking/BackupFileBuilderTest.java
@@ -0,0 +1,614 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.backup.encryption.chunking;
+
+import static com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.AES_256_GCM;
+import static com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.CHUNK_ORDERING_TYPE_UNSPECIFIED;
+import static com.android.server.backup.testing.CryptoTestUtils.newChunk;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assertWithMessage;
+
+import static junit.framework.Assert.fail;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.testng.Assert.assertThrows;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import android.platform.test.annotations.Presubmit;
+
+import com.android.server.backup.encryption.chunk.ChunkHash;
+import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto;
+import com.android.server.backup.encryption.testing.DiffScriptProcessor;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.io.Files;
+import com.google.common.primitives.Bytes;
+import com.google.common.primitives.Longs;
+
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.robolectric.RobolectricTestRunner;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+@RunWith(RobolectricTestRunner.class)
+@Presubmit
+public class BackupFileBuilderTest {
+ private static final String TEST_DATA_1 =
+ "I'm already there or close to [T7-9/executive level] in terms of big-picture vision";
+ private static final String TEST_DATA_2 =
+ "I was known for Real Games and should have been brought in for advice";
+ private static final String TEST_DATA_3 =
+ "Pride is rooted in the delusional belief held by all humans in an unchanging self";
+
+ private static final byte[] TEST_FINGERPRINT_MIXER_SALT =
+ Arrays.copyOf(new byte[] {22}, ChunkHash.HASH_LENGTH_BYTES);
+
+ private static final ChunkHash TEST_HASH_1 =
+ new ChunkHash(Arrays.copyOf(new byte[] {0}, EncryptedChunk.KEY_LENGTH_BYTES));
+ private static final ChunkHash TEST_HASH_2 =
+ new ChunkHash(Arrays.copyOf(new byte[] {1}, EncryptedChunk.KEY_LENGTH_BYTES));
+ private static final ChunkHash TEST_HASH_3 =
+ new ChunkHash(Arrays.copyOf(new byte[] {2}, EncryptedChunk.KEY_LENGTH_BYTES));
+
+ private static final byte[] TEST_NONCE =
+ Arrays.copyOf(new byte[] {3}, EncryptedChunk.NONCE_LENGTH_BYTES);
+
+ private static final EncryptedChunk TEST_CHUNK_1 =
+ EncryptedChunk.create(TEST_HASH_1, TEST_NONCE, TEST_DATA_1.getBytes(UTF_8));
+ private static final EncryptedChunk TEST_CHUNK_2 =
+ EncryptedChunk.create(TEST_HASH_2, TEST_NONCE, TEST_DATA_2.getBytes(UTF_8));
+ private static final EncryptedChunk TEST_CHUNK_3 =
+ EncryptedChunk.create(TEST_HASH_3, TEST_NONCE, TEST_DATA_3.getBytes(UTF_8));
+
+ private static final byte[] TEST_CHECKSUM = {1, 2, 3, 4, 5, 6};
+
+ @Rule public TemporaryFolder mTemporaryFolder = new TemporaryFolder();
+
+ private File mOldFile;
+ private ChunksMetadataProto.ChunkListing mOldChunkListing;
+ private EncryptedChunkEncoder mEncryptedChunkEncoder;
+
+ @Before
+ public void setUp() {
+ mEncryptedChunkEncoder = new LengthlessEncryptedChunkEncoder();
+ }
+
+ @Test
+ public void writeChunks_nonIncremental_writesCorrectRawData() throws Exception {
+ ByteArrayOutputStream output = new ByteArrayOutputStream();
+ BackupFileBuilder backupFileBuilder = BackupFileBuilder.createForNonIncremental(output);
+
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2),
+ getNewChunkMap(TEST_HASH_1, TEST_HASH_2));
+
+ byte[] actual = output.toByteArray();
+ byte[] expected =
+ Bytes.concat(
+ TEST_CHUNK_1.nonce(),
+ TEST_CHUNK_1.encryptedBytes(),
+ TEST_CHUNK_2.nonce(),
+ TEST_CHUNK_2.encryptedBytes());
+ assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
+ }
+
+ @Test
+ public void writeChunks_nonIncrementalWithDuplicates_writesEachChunkOnlyOnce()
+ throws Exception {
+ ByteArrayOutputStream output = new ByteArrayOutputStream();
+ BackupFileBuilder backupFileBuilder = BackupFileBuilder.createForNonIncremental(output);
+
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_1),
+ getNewChunkMap(TEST_HASH_1, TEST_HASH_2));
+
+ byte[] actual = output.toByteArray();
+ byte[] expected =
+ Bytes.concat(
+ TEST_CHUNK_1.nonce(),
+ TEST_CHUNK_1.encryptedBytes(),
+ TEST_CHUNK_2.nonce(),
+ TEST_CHUNK_2.encryptedBytes());
+ assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
+ }
+
+ @Test
+ public void writeChunks_incremental_writesParsableDiffScript() throws Exception {
+ // We will insert chunk 2 in between chunks 1 and 3.
+ setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1, TEST_CHUNK_3));
+ ByteArrayOutputStream diffOutputStream = new ByteArrayOutputStream();
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(diffOutputStream, mOldChunkListing);
+
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_3),
+ getNewChunkMap(TEST_HASH_2));
+ backupFileBuilder.finish(getTestMetadata());
+
+ byte[] actual =
+ stripMetadataAndPositionFromOutput(parseDiffScript(diffOutputStream.toByteArray()));
+ byte[] expected =
+ Bytes.concat(
+ TEST_CHUNK_1.nonce(),
+ TEST_CHUNK_1.encryptedBytes(),
+ TEST_CHUNK_2.nonce(),
+ TEST_CHUNK_2.encryptedBytes(),
+ TEST_CHUNK_3.nonce(),
+ TEST_CHUNK_3.encryptedBytes());
+ assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
+ }
+
+ @Test
+ public void writeChunks_incrementalWithDuplicates_writesEachChunkOnlyOnce() throws Exception {
+ // We will insert chunk 2 twice in between chunks 1 and 3.
+ setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1, TEST_CHUNK_3));
+ ByteArrayOutputStream diffOutputStream = new ByteArrayOutputStream();
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(diffOutputStream, mOldChunkListing);
+
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_2, TEST_HASH_3),
+ getNewChunkMap(TEST_HASH_2));
+ backupFileBuilder.finish(getTestMetadata());
+
+ byte[] actual =
+ stripMetadataAndPositionFromOutput(parseDiffScript(diffOutputStream.toByteArray()));
+ byte[] expected =
+ Bytes.concat(
+ TEST_CHUNK_1.nonce(),
+ TEST_CHUNK_1.encryptedBytes(),
+ TEST_CHUNK_2.nonce(),
+ TEST_CHUNK_2.encryptedBytes(),
+ TEST_CHUNK_3.nonce(),
+ TEST_CHUNK_3.encryptedBytes());
+ assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
+ }
+
+ @Test
+ public void writeChunks_writesChunksInOrderOfHash() throws Exception {
+ setUpOldBackupWithChunks(ImmutableList.of());
+ ByteArrayOutputStream diffOutputStream = new ByteArrayOutputStream();
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(diffOutputStream, mOldChunkListing);
+
+ // Write chunks out of order.
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_2, TEST_HASH_1),
+ getNewChunkMap(TEST_HASH_2, TEST_HASH_1));
+ backupFileBuilder.finish(getTestMetadata());
+
+ byte[] actual =
+ stripMetadataAndPositionFromOutput(parseDiffScript(diffOutputStream.toByteArray()));
+ byte[] expected =
+ Bytes.concat(
+ TEST_CHUNK_1.nonce(),
+ TEST_CHUNK_1.encryptedBytes(),
+ TEST_CHUNK_2.nonce(),
+ TEST_CHUNK_2.encryptedBytes());
+ assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
+ }
+
+ @Test
+ public void writeChunks_alreadyFlushed_throwsException() throws Exception {
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), new ChunksMetadataProto.ChunkListing());
+ backupFileBuilder.finish(getTestMetadata());
+
+ assertThrows(
+ IllegalStateException.class,
+ () -> backupFileBuilder.writeChunks(ImmutableList.of(), getNewChunkMap()));
+ }
+
+ @Test
+ public void getNewChunkListing_hasChunksInOrderOfKey() throws Exception {
+ // We will insert chunk 2 in between chunks 1 and 3.
+ setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1, TEST_CHUNK_3));
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), mOldChunkListing);
+
+ // Write chunks out of order.
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_3, TEST_HASH_2),
+ getNewChunkMap(TEST_HASH_2));
+ backupFileBuilder.finish(getTestMetadata());
+
+ ChunksMetadataProto.ChunkListing expected = expectedChunkListing();
+ ChunksMetadataProto.ChunkListing actual =
+ backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
+ assertListingsEqual(actual, expected);
+ }
+
+ @Test
+ public void getNewChunkListing_writeChunksInTwoBatches_returnsListingContainingAllChunks()
+ throws Exception {
+ // We will insert chunk 2 in between chunks 1 and 3.
+ setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1, TEST_CHUNK_3));
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), mOldChunkListing);
+
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2), getNewChunkMap(TEST_HASH_2));
+ backupFileBuilder.writeChunks(ImmutableList.of(TEST_HASH_3), getNewChunkMap(TEST_HASH_2));
+ backupFileBuilder.finish(getTestMetadata());
+
+ ChunksMetadataProto.ChunkListing expected = expectedChunkListing();
+ ChunksMetadataProto.ChunkListing actual =
+ backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
+ assertListingsEqual(actual, expected);
+ }
+
+ @Test
+ public void getNewChunkListing_writeDuplicateChunks_writesEachChunkOnlyOnce() throws Exception {
+ // We will append [2][3][3][2] onto [1].
+ setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1));
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), mOldChunkListing);
+
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_3),
+ getNewChunkMap(TEST_HASH_3, TEST_HASH_2));
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_3, TEST_HASH_2),
+ getNewChunkMap(TEST_HASH_3, TEST_HASH_2));
+ backupFileBuilder.finish(getTestMetadata());
+
+ ChunksMetadataProto.ChunkListing expected = expectedChunkListing();
+ ChunksMetadataProto.ChunkListing actual =
+ backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
+ assertListingsEqual(actual, expected);
+ }
+
+ @Test
+ public void getNewChunkListing_nonIncrementalWithNoSalt_doesNotThrowOnSerialisation() {
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForNonIncremental(new ByteArrayOutputStream());
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
+
+ // Does not throw.
+ ChunksMetadataProto.ChunkListing.toByteArray(newChunkListing);
+ }
+
+ @Test
+ public void getNewChunkListing_incrementalWithNoSalt_doesNotThrowOnSerialisation()
+ throws Exception {
+
+ setUpOldBackupWithChunks(ImmutableList.of());
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), mOldChunkListing);
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
+
+ // Does not throw.
+ ChunksMetadataProto.ChunkListing.toByteArray(newChunkListing);
+ }
+
+ @Test
+ public void getNewChunkListing_nonIncrementalWithNoSalt_hasEmptySalt() {
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForNonIncremental(new ByteArrayOutputStream());
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
+
+ assertThat(newChunkListing.fingerprintMixerSalt).isEmpty();
+ }
+
+ @Test
+ public void getNewChunkListing_incrementalWithNoSalt_hasEmptySalt() throws Exception {
+ setUpOldBackupWithChunks(ImmutableList.of());
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), mOldChunkListing);
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
+
+ assertThat(newChunkListing.fingerprintMixerSalt).isEmpty();
+ }
+
+ @Test
+ public void getNewChunkListing_nonIncrementalWithSalt_hasGivenSalt() {
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForNonIncremental(new ByteArrayOutputStream());
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
+
+ assertThat(newChunkListing.fingerprintMixerSalt).isEqualTo(TEST_FINGERPRINT_MIXER_SALT);
+ }
+
+ @Test
+ public void getNewChunkListing_incrementalWithSalt_hasGivenSalt() throws Exception {
+ setUpOldBackupWithChunks(ImmutableList.of());
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), mOldChunkListing);
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
+
+ assertThat(newChunkListing.fingerprintMixerSalt).isEqualTo(TEST_FINGERPRINT_MIXER_SALT);
+ }
+
+ @Test
+ public void getNewChunkListing_nonIncremental_hasCorrectCipherTypeAndChunkOrderingType() {
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForNonIncremental(new ByteArrayOutputStream());
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
+
+ assertThat(newChunkListing.cipherType).isEqualTo(ChunksMetadataProto.AES_256_GCM);
+ assertThat(newChunkListing.chunkOrderingType)
+ .isEqualTo(ChunksMetadataProto.CHUNK_ORDERING_TYPE_UNSPECIFIED);
+ }
+
+ @Test
+ public void getNewChunkListing_incremental_hasCorrectCipherTypeAndChunkOrderingType()
+ throws Exception {
+ setUpOldBackupWithChunks(ImmutableList.of());
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), mOldChunkListing);
+
+ ChunksMetadataProto.ChunkListing newChunkListing =
+ backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
+
+ assertThat(newChunkListing.cipherType).isEqualTo(ChunksMetadataProto.AES_256_GCM);
+ assertThat(newChunkListing.chunkOrderingType)
+ .isEqualTo(ChunksMetadataProto.CHUNK_ORDERING_TYPE_UNSPECIFIED);
+ }
+
+ @Test
+ public void getNewChunkOrdering_chunksHaveCorrectStartPositions() throws Exception {
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), new ChunksMetadataProto.ChunkListing());
+
+ // Write out of order by key to check that ordering is maintained.
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_3, TEST_HASH_2),
+ getNewChunkMap(TEST_HASH_1, TEST_HASH_3, TEST_HASH_2));
+ backupFileBuilder.finish(getTestMetadata());
+
+ ChunksMetadataProto.ChunkOrdering actual =
+ backupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM);
+ // The chunks are listed in the order they are written above, but the start positions are
+ // determined by the order in the encrypted blob (which is lexicographical by key).
+ int chunk1Start = 0;
+ int chunk2Start =
+ chunk1Start + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_1);
+ int chunk3Start =
+ chunk2Start + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_2);
+
+ int[] expected = {chunk1Start, chunk3Start, chunk2Start};
+ assertThat(actual.starts.length).isEqualTo(expected.length);
+ for (int i = 0; i < actual.starts.length; i++) {
+ assertThat(expected[i]).isEqualTo(actual.starts[i]);
+ }
+ }
+
+ @Test
+ public void getNewChunkOrdering_duplicateChunks_writesDuplicates() throws Exception {
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), new ChunksMetadataProto.ChunkListing());
+
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_2),
+ getNewChunkMap(TEST_HASH_1, TEST_HASH_2));
+ backupFileBuilder.writeChunks(
+ ImmutableList.of(TEST_HASH_3, TEST_HASH_3), getNewChunkMap(TEST_HASH_3));
+ backupFileBuilder.finish(getTestMetadata());
+
+ ChunksMetadataProto.ChunkOrdering actual =
+ backupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM);
+ int chunk1Start = 0;
+ int chunk2Start =
+ chunk1Start + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_1);
+ int chunk3Start =
+ chunk2Start + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_2);
+
+ int[] expected = {chunk1Start, chunk2Start, chunk2Start, chunk3Start, chunk3Start};
+ assertThat(actual.starts.length).isEqualTo(expected.length);
+ for (int i = 0; i < actual.starts.length; i++) {
+ assertThat(expected[i]).isEqualTo(actual.starts[i]);
+ }
+ }
+
+ @Test
+ public void getNewChunkOrdering_returnsOrderingWithChecksum() throws Exception {
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ new ByteArrayOutputStream(), new ChunksMetadataProto.ChunkListing());
+
+ backupFileBuilder.writeChunks(ImmutableList.of(TEST_HASH_1), getNewChunkMap(TEST_HASH_1));
+ backupFileBuilder.finish(getTestMetadata());
+
+ ChunksMetadataProto.ChunkOrdering actual =
+ backupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM);
+ assertThat(actual.checksum).isEqualTo(TEST_CHECKSUM);
+ }
+
+ @Test
+ public void finish_writesMetadata() throws Exception {
+ ByteArrayOutputStream output = new ByteArrayOutputStream();
+ BackupFileBuilder builder = BackupFileBuilder.createForNonIncremental(output);
+ ChunksMetadataProto.ChunksMetadata expectedMetadata = getTestMetadata();
+
+ builder.finish(expectedMetadata);
+
+ // The output is [metadata]+[long giving size of metadata].
+ byte[] metadataBytes =
+ Arrays.copyOfRange(output.toByteArray(), 0, output.size() - Long.BYTES);
+ ChunksMetadataProto.ChunksMetadata actualMetadata =
+ ChunksMetadataProto.ChunksMetadata.parseFrom(metadataBytes);
+ assertThat(actualMetadata.checksumType).isEqualTo(ChunksMetadataProto.SHA_256);
+ assertThat(actualMetadata.cipherType).isEqualTo(ChunksMetadataProto.AES_256_GCM);
+ }
+
+ @Test
+ public void finish_writesMetadataPosition() throws Exception {
+ ByteArrayOutputStream output = new ByteArrayOutputStream();
+ BackupFileBuilder builder = BackupFileBuilder.createForNonIncremental(output);
+
+ builder.writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2),
+ getNewChunkMap(TEST_HASH_1, TEST_HASH_2));
+ builder.writeChunks(ImmutableList.of(TEST_HASH_3), getNewChunkMap(TEST_HASH_3));
+ builder.finish(getTestMetadata());
+
+ long expectedPosition =
+ (long) mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_1)
+ + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_2)
+ + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_3);
+ long actualPosition =
+ Longs.fromByteArray(
+ Arrays.copyOfRange(
+ output.toByteArray(), output.size() - Long.BYTES, output.size()));
+ assertThat(actualPosition).isEqualTo(expectedPosition);
+ }
+
+ @Test
+ public void finish_flushesOutputStream() throws Exception {
+ OutputStream diffOutputStream = mock(OutputStream.class);
+ BackupFileBuilder backupFileBuilder =
+ BackupFileBuilder.createForIncremental(
+ diffOutputStream, new ChunksMetadataProto.ChunkListing());
+
+ backupFileBuilder.writeChunks(ImmutableList.of(TEST_HASH_1), getNewChunkMap(TEST_HASH_1));
+ diffOutputStream.flush();
+
+ verify(diffOutputStream).flush();
+ }
+
+ private void setUpOldBackupWithChunks(List<EncryptedChunk> chunks) throws Exception {
+ mOldFile = mTemporaryFolder.newFile();
+ ChunksMetadataProto.ChunkListing chunkListing = new ChunksMetadataProto.ChunkListing();
+ chunkListing.fingerprintMixerSalt =
+ Arrays.copyOf(TEST_FINGERPRINT_MIXER_SALT, TEST_FINGERPRINT_MIXER_SALT.length);
+ chunkListing.cipherType = AES_256_GCM;
+ chunkListing.chunkOrderingType = CHUNK_ORDERING_TYPE_UNSPECIFIED;
+
+ List<ChunksMetadataProto.Chunk> knownChunks = new ArrayList<>();
+ try (FileOutputStream outputStream = new FileOutputStream(mOldFile)) {
+ for (EncryptedChunk chunk : chunks) {
+ // Chunks are encoded in the format [nonce]+[data].
+ outputStream.write(chunk.nonce());
+ outputStream.write(chunk.encryptedBytes());
+
+ knownChunks.add(createChunkFor(chunk));
+ }
+
+ outputStream.flush();
+ }
+
+ chunkListing.chunks = knownChunks.toArray(new ChunksMetadataProto.Chunk[0]);
+ mOldChunkListing = chunkListing;
+ }
+
+ private byte[] parseDiffScript(byte[] diffScript) throws Exception {
+ File newFile = mTemporaryFolder.newFile();
+ new DiffScriptProcessor(mOldFile, newFile).process(new ByteArrayInputStream(diffScript));
+ return Files.toByteArray(newFile);
+ }
+
+ private void assertListingsEqual(
+ ChunksMetadataProto.ChunkListing result, ChunksMetadataProto.ChunkListing expected) {
+ assertThat(result.chunks.length).isEqualTo(expected.chunks.length);
+ for (int i = 0; i < result.chunks.length; i++) {
+ assertWithMessage("Chunk " + i)
+ .that(result.chunks[i].length)
+ .isEqualTo(expected.chunks[i].length);
+ assertWithMessage("Chunk " + i)
+ .that(result.chunks[i].hash)
+ .isEqualTo(expected.chunks[i].hash);
+ }
+ }
+
+ private static ImmutableMap<ChunkHash, EncryptedChunk> getNewChunkMap(ChunkHash... hashes) {
+ ImmutableMap.Builder<ChunkHash, EncryptedChunk> builder = ImmutableMap.builder();
+ for (ChunkHash hash : hashes) {
+ if (TEST_HASH_1.equals(hash)) {
+ builder.put(TEST_HASH_1, TEST_CHUNK_1);
+ } else if (TEST_HASH_2.equals(hash)) {
+ builder.put(TEST_HASH_2, TEST_CHUNK_2);
+ } else if (TEST_HASH_3.equals(hash)) {
+ builder.put(TEST_HASH_3, TEST_CHUNK_3);
+ } else {
+ fail("Hash was not recognised: " + hash);
+ }
+ }
+ return builder.build();
+ }
+
+ private static ChunksMetadataProto.ChunksMetadata getTestMetadata() {
+ ChunksMetadataProto.ChunksMetadata metadata = new ChunksMetadataProto.ChunksMetadata();
+ metadata.checksumType = ChunksMetadataProto.SHA_256;
+ metadata.cipherType = AES_256_GCM;
+ return metadata;
+ }
+
+ private static byte[] stripMetadataAndPositionFromOutput(byte[] output) {
+ long metadataStart =
+ Longs.fromByteArray(
+ Arrays.copyOfRange(output, output.length - Long.BYTES, output.length));
+ return Arrays.copyOfRange(output, 0, (int) metadataStart);
+ }
+
+ private ChunksMetadataProto.ChunkListing expectedChunkListing() {
+ ChunksMetadataProto.ChunkListing chunkListing = new ChunksMetadataProto.ChunkListing();
+ chunkListing.fingerprintMixerSalt =
+ Arrays.copyOf(TEST_FINGERPRINT_MIXER_SALT, TEST_FINGERPRINT_MIXER_SALT.length);
+ chunkListing.cipherType = AES_256_GCM;
+ chunkListing.chunkOrderingType = CHUNK_ORDERING_TYPE_UNSPECIFIED;
+ chunkListing.chunks = new ChunksMetadataProto.Chunk[3];
+ chunkListing.chunks[0] = createChunkFor(TEST_CHUNK_1);
+ chunkListing.chunks[1] = createChunkFor(TEST_CHUNK_2);
+ chunkListing.chunks[2] = createChunkFor(TEST_CHUNK_3);
+ return chunkListing;
+ }
+
+ private ChunksMetadataProto.Chunk createChunkFor(EncryptedChunk encryptedChunk) {
+ byte[] chunkHash = encryptedChunk.key().getHash();
+ byte[] hashCopy = Arrays.copyOf(chunkHash, chunkHash.length);
+ return newChunk(hashCopy, mEncryptedChunkEncoder.getEncodedLengthOfChunk(encryptedChunk));
+ }
+}
diff --git a/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/tasks/EncryptedBackupTaskTest.java b/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/tasks/EncryptedBackupTaskTest.java
new file mode 100644
index 000000000000..f6914efd6d83
--- /dev/null
+++ b/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/tasks/EncryptedBackupTaskTest.java
@@ -0,0 +1,397 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.backup.encryption.tasks;
+
+import static com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.AES_256_GCM;
+import static com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.CHUNK_ORDERING_TYPE_UNSPECIFIED;
+import static com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.SHA_256;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertThrows;
+
+import android.platform.test.annotations.Presubmit;
+
+import com.android.server.backup.encryption.chunk.ChunkHash;
+import com.android.server.backup.encryption.chunking.BackupFileBuilder;
+import com.android.server.backup.encryption.chunking.EncryptedChunk;
+import com.android.server.backup.encryption.chunking.EncryptedChunkEncoder;
+import com.android.server.backup.encryption.chunking.LengthlessEncryptedChunkEncoder;
+import com.android.server.backup.encryption.client.CryptoBackupServer;
+import com.android.server.backup.encryption.keys.TertiaryKeyGenerator;
+import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto;
+import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.ChunkListing;
+import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.ChunkOrdering;
+import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.ChunksMetadata;
+import com.android.server.backup.encryption.protos.nano.WrappedKeyProto.WrappedKey;
+import com.android.server.backup.encryption.tasks.BackupEncrypter.Result;
+import com.android.server.backup.testing.CryptoTestUtils;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.protobuf.nano.MessageNano;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.robolectric.RobolectricTestRunner;
+import org.robolectric.annotation.Config;
+import org.robolectric.annotation.Implementation;
+import org.robolectric.annotation.Implements;
+
+import java.io.OutputStream;
+import java.security.SecureRandom;
+import java.util.Arrays;
+import java.util.concurrent.CancellationException;
+
+import javax.crypto.Cipher;
+import javax.crypto.SecretKey;
+import javax.crypto.spec.GCMParameterSpec;
+
+@Config(shadows = {EncryptedBackupTaskTest.ShadowBackupFileBuilder.class})
+@RunWith(RobolectricTestRunner.class)
+@Presubmit
+public class EncryptedBackupTaskTest {
+
+ private static final String CIPHER_ALGORITHM = "AES/GCM/NoPadding";
+ private static final int GCM_NONCE_LENGTH_BYTES = 12;
+ private static final int GCM_TAG_LENGTH_BYTES = 16;
+ private static final int BITS_PER_BYTE = 8;
+
+ private static final byte[] TEST_FINGERPRINT_MIXER_SALT =
+ Arrays.copyOf(new byte[] {22}, ChunkHash.HASH_LENGTH_BYTES);
+
+ private static final byte[] TEST_NONCE =
+ Arrays.copyOf(new byte[] {55}, EncryptedChunk.NONCE_LENGTH_BYTES);
+
+ private static final ChunkHash TEST_HASH_1 =
+ new ChunkHash(Arrays.copyOf(new byte[] {1}, ChunkHash.HASH_LENGTH_BYTES));
+ private static final ChunkHash TEST_HASH_2 =
+ new ChunkHash(Arrays.copyOf(new byte[] {2}, ChunkHash.HASH_LENGTH_BYTES));
+ private static final ChunkHash TEST_HASH_3 =
+ new ChunkHash(Arrays.copyOf(new byte[] {3}, ChunkHash.HASH_LENGTH_BYTES));
+
+ private static final EncryptedChunk TEST_CHUNK_1 =
+ EncryptedChunk.create(TEST_HASH_1, TEST_NONCE, new byte[] {1, 2, 3, 4, 5});
+ private static final EncryptedChunk TEST_CHUNK_2 =
+ EncryptedChunk.create(TEST_HASH_2, TEST_NONCE, new byte[] {6, 7, 8, 9, 10});
+ private static final EncryptedChunk TEST_CHUNK_3 =
+ EncryptedChunk.create(TEST_HASH_3, TEST_NONCE, new byte[] {11, 12, 13, 14, 15});
+
+ private static final byte[] TEST_CHECKSUM = Arrays.copyOf(new byte[] {10}, 258 / 8);
+ private static final String TEST_PACKAGE_NAME = "com.example.package";
+ private static final String TEST_OLD_DOCUMENT_ID = "old_doc_1";
+ private static final String TEST_NEW_DOCUMENT_ID = "new_doc_1";
+
+ @Captor private ArgumentCaptor<ChunksMetadata> mMetadataCaptor;
+
+ @Mock private CryptoBackupServer mCryptoBackupServer;
+ @Mock private BackupEncrypter mBackupEncrypter;
+ @Mock private BackupFileBuilder mBackupFileBuilder;
+
+ private ChunkListing mOldChunkListing;
+ private SecretKey mTertiaryKey;
+ private WrappedKey mWrappedTertiaryKey;
+ private EncryptedChunkEncoder mEncryptedChunkEncoder;
+ private EncryptedBackupTask mTask;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+
+ SecureRandom secureRandom = new SecureRandom();
+ mTertiaryKey = new TertiaryKeyGenerator(secureRandom).generate();
+ mWrappedTertiaryKey = new WrappedKey();
+
+ mEncryptedChunkEncoder = new LengthlessEncryptedChunkEncoder();
+
+ ShadowBackupFileBuilder.sInstance = mBackupFileBuilder;
+
+ mTask =
+ new EncryptedBackupTask(
+ mCryptoBackupServer, secureRandom, TEST_PACKAGE_NAME, mBackupEncrypter);
+ }
+
+ @Test
+ public void performNonIncrementalBackup_performsBackup() throws Exception {
+ setUpWithoutExistingBackup();
+
+ // Chunk listing and ordering don't matter for this test.
+ when(mBackupFileBuilder.getNewChunkListing(any())).thenReturn(new ChunkListing());
+ when(mBackupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM)).thenReturn(new ChunkOrdering());
+
+ when(mCryptoBackupServer.uploadNonIncrementalBackup(eq(TEST_PACKAGE_NAME), any(), any()))
+ .thenReturn(TEST_NEW_DOCUMENT_ID);
+
+ mTask.performNonIncrementalBackup(
+ mTertiaryKey, mWrappedTertiaryKey, TEST_FINGERPRINT_MIXER_SALT);
+
+ verify(mBackupFileBuilder)
+ .writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2),
+ ImmutableMap.of(TEST_HASH_1, TEST_CHUNK_1, TEST_HASH_2, TEST_CHUNK_2));
+ verify(mBackupFileBuilder).finish(any());
+ verify(mCryptoBackupServer)
+ .uploadNonIncrementalBackup(eq(TEST_PACKAGE_NAME), any(), eq(mWrappedTertiaryKey));
+ }
+
+ @Test
+ public void performIncrementalBackup_performsBackup() throws Exception {
+ setUpWithExistingBackup();
+
+ // Chunk listing and ordering don't matter for this test.
+ when(mBackupFileBuilder.getNewChunkListing(any())).thenReturn(new ChunkListing());
+ when(mBackupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM)).thenReturn(new ChunkOrdering());
+
+ when(mCryptoBackupServer.uploadIncrementalBackup(
+ eq(TEST_PACKAGE_NAME), eq(TEST_OLD_DOCUMENT_ID), any(), any()))
+ .thenReturn(TEST_NEW_DOCUMENT_ID);
+
+ mTask.performIncrementalBackup(mTertiaryKey, mWrappedTertiaryKey, mOldChunkListing);
+
+ verify(mBackupFileBuilder)
+ .writeChunks(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_3),
+ ImmutableMap.of(TEST_HASH_2, TEST_CHUNK_2));
+ verify(mBackupFileBuilder).finish(any());
+ verify(mCryptoBackupServer)
+ .uploadIncrementalBackup(
+ eq(TEST_PACKAGE_NAME),
+ eq(TEST_OLD_DOCUMENT_ID),
+ any(),
+ eq(mWrappedTertiaryKey));
+ }
+
+ @Test
+ public void performIncrementalBackup_returnsNewChunkListingWithDocId() throws Exception {
+ setUpWithExistingBackup();
+
+ ChunkListing chunkListingWithoutDocId =
+ CryptoTestUtils.newChunkListingWithoutDocId(
+ TEST_FINGERPRINT_MIXER_SALT,
+ AES_256_GCM,
+ CHUNK_ORDERING_TYPE_UNSPECIFIED,
+ createChunkProtoFor(TEST_HASH_1, TEST_CHUNK_1),
+ createChunkProtoFor(TEST_HASH_2, TEST_CHUNK_2));
+ when(mBackupFileBuilder.getNewChunkListing(any())).thenReturn(chunkListingWithoutDocId);
+
+ // Chunk ordering doesn't matter for this test.
+ when(mBackupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM)).thenReturn(new ChunkOrdering());
+
+ when(mCryptoBackupServer.uploadIncrementalBackup(
+ eq(TEST_PACKAGE_NAME), eq(TEST_OLD_DOCUMENT_ID), any(), any()))
+ .thenReturn(TEST_NEW_DOCUMENT_ID);
+
+ ChunkListing actualChunkListing =
+ mTask.performIncrementalBackup(mTertiaryKey, mWrappedTertiaryKey, mOldChunkListing);
+
+ ChunkListing expectedChunkListing = CryptoTestUtils.clone(chunkListingWithoutDocId);
+ expectedChunkListing.documentId = TEST_NEW_DOCUMENT_ID;
+ assertChunkListingsAreEqual(actualChunkListing, expectedChunkListing);
+ }
+
+ @Test
+ public void performNonIncrementalBackup_returnsNewChunkListingWithDocId() throws Exception {
+ setUpWithoutExistingBackup();
+
+ ChunkListing chunkListingWithoutDocId =
+ CryptoTestUtils.newChunkListingWithoutDocId(
+ TEST_FINGERPRINT_MIXER_SALT,
+ AES_256_GCM,
+ CHUNK_ORDERING_TYPE_UNSPECIFIED,
+ createChunkProtoFor(TEST_HASH_1, TEST_CHUNK_1),
+ createChunkProtoFor(TEST_HASH_2, TEST_CHUNK_2));
+ when(mBackupFileBuilder.getNewChunkListing(any())).thenReturn(chunkListingWithoutDocId);
+
+ // Chunk ordering doesn't matter for this test.
+ when(mBackupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM)).thenReturn(new ChunkOrdering());
+
+ when(mCryptoBackupServer.uploadNonIncrementalBackup(eq(TEST_PACKAGE_NAME), any(), any()))
+ .thenReturn(TEST_NEW_DOCUMENT_ID);
+
+ ChunkListing actualChunkListing =
+ mTask.performNonIncrementalBackup(
+ mTertiaryKey, mWrappedTertiaryKey, TEST_FINGERPRINT_MIXER_SALT);
+
+ ChunkListing expectedChunkListing = CryptoTestUtils.clone(chunkListingWithoutDocId);
+ expectedChunkListing.documentId = TEST_NEW_DOCUMENT_ID;
+ assertChunkListingsAreEqual(actualChunkListing, expectedChunkListing);
+ }
+
+ @Test
+ public void performNonIncrementalBackup_buildsCorrectChunkMetadata() throws Exception {
+ setUpWithoutExistingBackup();
+
+ // Chunk listing doesn't matter for this test.
+ when(mBackupFileBuilder.getNewChunkListing(any())).thenReturn(new ChunkListing());
+
+ ChunkOrdering expectedOrdering =
+ CryptoTestUtils.newChunkOrdering(new int[10], TEST_CHECKSUM);
+ when(mBackupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM)).thenReturn(expectedOrdering);
+
+ when(mCryptoBackupServer.uploadNonIncrementalBackup(eq(TEST_PACKAGE_NAME), any(), any()))
+ .thenReturn(TEST_NEW_DOCUMENT_ID);
+
+ mTask.performNonIncrementalBackup(
+ mTertiaryKey, mWrappedTertiaryKey, TEST_FINGERPRINT_MIXER_SALT);
+
+ verify(mBackupFileBuilder).finish(mMetadataCaptor.capture());
+
+ ChunksMetadata actualMetadata = mMetadataCaptor.getValue();
+ assertThat(actualMetadata.checksumType).isEqualTo(SHA_256);
+ assertThat(actualMetadata.cipherType).isEqualTo(AES_256_GCM);
+
+ ChunkOrdering actualOrdering = decryptChunkOrdering(actualMetadata.chunkOrdering);
+ assertThat(actualOrdering.checksum).isEqualTo(TEST_CHECKSUM);
+ assertThat(actualOrdering.starts).isEqualTo(expectedOrdering.starts);
+ }
+
+ @Test
+ public void cancel_incrementalBackup_doesNotUploadOrSaveChunkListing() throws Exception {
+ setUpWithExistingBackup();
+
+ // Chunk listing and ordering don't matter for this test.
+ when(mBackupFileBuilder.getNewChunkListing(any())).thenReturn(new ChunkListing());
+ when(mBackupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM)).thenReturn(new ChunkOrdering());
+
+ mTask.cancel();
+ assertThrows(
+ CancellationException.class,
+ () ->
+ mTask.performIncrementalBackup(
+ mTertiaryKey, mWrappedTertiaryKey, mOldChunkListing));
+
+ verify(mCryptoBackupServer, never()).uploadIncrementalBackup(any(), any(), any(), any());
+ verify(mCryptoBackupServer, never()).uploadNonIncrementalBackup(any(), any(), any());
+ }
+
+ @Test
+ public void cancel_nonIncrementalBackup_doesNotUploadOrSaveChunkListing() throws Exception {
+ setUpWithoutExistingBackup();
+
+ // Chunk listing and ordering don't matter for this test.
+ when(mBackupFileBuilder.getNewChunkListing(any())).thenReturn(new ChunkListing());
+ when(mBackupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM)).thenReturn(new ChunkOrdering());
+
+ mTask.cancel();
+ assertThrows(
+ CancellationException.class,
+ () ->
+ mTask.performNonIncrementalBackup(
+ mTertiaryKey, mWrappedTertiaryKey, TEST_FINGERPRINT_MIXER_SALT));
+
+ verify(mCryptoBackupServer, never()).uploadIncrementalBackup(any(), any(), any(), any());
+ verify(mCryptoBackupServer, never()).uploadNonIncrementalBackup(any(), any(), any());
+ }
+
+ /** Sets up a backup of [CHUNK 1][CHUNK 2] with no existing data. */
+ private void setUpWithoutExistingBackup() throws Exception {
+ Result result =
+ new Result(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2),
+ ImmutableList.of(TEST_CHUNK_1, TEST_CHUNK_2),
+ TEST_CHECKSUM);
+ when(mBackupEncrypter.backup(any(), eq(TEST_FINGERPRINT_MIXER_SALT), eq(ImmutableSet.of())))
+ .thenReturn(result);
+ }
+
+ /**
+ * Sets up a backup of [CHUNK 1][CHUNK 2][CHUNK 3] where the previous backup contained [CHUNK
+ * 1][CHUNK 3].
+ */
+ private void setUpWithExistingBackup() throws Exception {
+ mOldChunkListing =
+ CryptoTestUtils.newChunkListing(
+ TEST_OLD_DOCUMENT_ID,
+ TEST_FINGERPRINT_MIXER_SALT,
+ AES_256_GCM,
+ CHUNK_ORDERING_TYPE_UNSPECIFIED,
+ createChunkProtoFor(TEST_HASH_1, TEST_CHUNK_1),
+ createChunkProtoFor(TEST_HASH_3, TEST_CHUNK_3));
+
+ Result result =
+ new Result(
+ ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_3),
+ ImmutableList.of(TEST_CHUNK_2),
+ TEST_CHECKSUM);
+ when(mBackupEncrypter.backup(
+ any(),
+ eq(TEST_FINGERPRINT_MIXER_SALT),
+ eq(ImmutableSet.of(TEST_HASH_1, TEST_HASH_3))))
+ .thenReturn(result);
+ }
+
+ private ChunksMetadataProto.Chunk createChunkProtoFor(
+ ChunkHash chunkHash, EncryptedChunk encryptedChunk) {
+ return CryptoTestUtils.newChunk(
+ chunkHash, mEncryptedChunkEncoder.getEncodedLengthOfChunk(encryptedChunk));
+ }
+
+ private ChunkOrdering decryptChunkOrdering(byte[] encryptedOrdering) throws Exception {
+ Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM);
+ cipher.init(
+ Cipher.DECRYPT_MODE,
+ mTertiaryKey,
+ new GCMParameterSpec(
+ GCM_TAG_LENGTH_BYTES * BITS_PER_BYTE,
+ encryptedOrdering,
+ /*offset=*/ 0,
+ GCM_NONCE_LENGTH_BYTES));
+ byte[] decrypted =
+ cipher.doFinal(
+ encryptedOrdering,
+ GCM_NONCE_LENGTH_BYTES,
+ encryptedOrdering.length - GCM_NONCE_LENGTH_BYTES);
+ return ChunkOrdering.parseFrom(decrypted);
+ }
+
+ // This method is needed because nano protobuf generated classes dont implmenent
+ // .equals
+ private void assertChunkListingsAreEqual(ChunkListing a, ChunkListing b) {
+ byte[] aBytes = MessageNano.toByteArray(a);
+ byte[] bBytes = MessageNano.toByteArray(b);
+
+ assertThat(aBytes).isEqualTo(bBytes);
+ }
+
+ @Implements(BackupFileBuilder.class)
+ public static class ShadowBackupFileBuilder {
+
+ private static BackupFileBuilder sInstance;
+
+ @Implementation
+ public static BackupFileBuilder createForNonIncremental(OutputStream outputStream) {
+ return sInstance;
+ }
+
+ @Implementation
+ public static BackupFileBuilder createForIncremental(
+ OutputStream outputStream, ChunkListing oldChunkListing) {
+ return sInstance;
+ }
+ }
+}
diff --git a/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/tasks/KvBackupEncrypterTest.java b/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/tasks/KvBackupEncrypterTest.java
new file mode 100644
index 000000000000..ccfbfa4b25e9
--- /dev/null
+++ b/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/tasks/KvBackupEncrypterTest.java
@@ -0,0 +1,287 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.backup.encryption.tasks;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import static org.testng.Assert.assertThrows;
+
+import android.app.backup.BackupDataInput;
+import android.platform.test.annotations.Presubmit;
+import android.util.Pair;
+
+import com.android.server.backup.encryption.chunk.ChunkHash;
+import com.android.server.backup.encryption.chunking.ChunkHasher;
+import com.android.server.backup.encryption.chunking.EncryptedChunk;
+import com.android.server.backup.encryption.kv.KeyValueListingBuilder;
+import com.android.server.backup.encryption.protos.nano.KeyValueListingProto.KeyValueListing;
+import com.android.server.backup.encryption.protos.nano.KeyValuePairProto.KeyValuePair;
+import com.android.server.backup.encryption.tasks.BackupEncrypter.Result;
+import com.android.server.testing.shadows.DataEntity;
+import com.android.server.testing.shadows.ShadowBackupDataInput;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Ordering;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.robolectric.RobolectricTestRunner;
+import org.robolectric.annotation.Config;
+
+import java.security.MessageDigest;
+import java.util.Arrays;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import javax.crypto.Cipher;
+import javax.crypto.SecretKey;
+import javax.crypto.spec.GCMParameterSpec;
+import javax.crypto.spec.SecretKeySpec;
+
+@RunWith(RobolectricTestRunner.class)
+@Presubmit
+@Config(shadows = {ShadowBackupDataInput.class})
+public class KvBackupEncrypterTest {
+ private static final String KEY_ALGORITHM = "AES";
+ private static final String CIPHER_ALGORITHM = "AES/GCM/NoPadding";
+ private static final int GCM_TAG_LENGTH_BYTES = 16;
+
+ private static final byte[] TEST_TERTIARY_KEY = Arrays.copyOf(new byte[0], 256 / Byte.SIZE);
+ private static final String TEST_KEY_1 = "test_key_1";
+ private static final String TEST_KEY_2 = "test_key_2";
+ private static final String TEST_KEY_3 = "test_key_3";
+ private static final byte[] TEST_VALUE_1 = {10, 11, 12};
+ private static final byte[] TEST_VALUE_2 = {13, 14, 15};
+ private static final byte[] TEST_VALUE_2B = {13, 14, 15, 16};
+ private static final byte[] TEST_VALUE_3 = {16, 17, 18};
+
+ private SecretKey mSecretKey;
+ private ChunkHasher mChunkHasher;
+
+ @Before
+ public void setUp() {
+ mSecretKey = new SecretKeySpec(TEST_TERTIARY_KEY, KEY_ALGORITHM);
+ mChunkHasher = new ChunkHasher(mSecretKey);
+
+ ShadowBackupDataInput.reset();
+ }
+
+ private KvBackupEncrypter createEncrypter(KeyValueListing keyValueListing) {
+ KvBackupEncrypter encrypter = new KvBackupEncrypter(new BackupDataInput(null));
+ encrypter.setOldKeyValueListing(keyValueListing);
+ return encrypter;
+ }
+
+ @Test
+ public void backup_noExistingBackup_encryptsAllPairs() throws Exception {
+ ShadowBackupDataInput.addEntity(TEST_KEY_1, TEST_VALUE_1);
+ ShadowBackupDataInput.addEntity(TEST_KEY_2, TEST_VALUE_2);
+
+ KeyValueListing emptyKeyValueListing = new KeyValueListingBuilder().build();
+ ImmutableSet<ChunkHash> emptyExistingChunks = ImmutableSet.of();
+ KvBackupEncrypter encrypter = createEncrypter(emptyKeyValueListing);
+
+ Result result =
+ encrypter.backup(
+ mSecretKey, /*unusedFingerprintMixerSalt=*/ null, emptyExistingChunks);
+
+ assertThat(result.getAllChunks()).hasSize(2);
+ EncryptedChunk chunk1 = result.getNewChunks().get(0);
+ EncryptedChunk chunk2 = result.getNewChunks().get(1);
+ assertThat(chunk1.key()).isEqualTo(getChunkHash(TEST_KEY_1, TEST_VALUE_1));
+ KeyValuePair pair1 = decryptChunk(chunk1);
+ assertThat(pair1.key).isEqualTo(TEST_KEY_1);
+ assertThat(pair1.value).isEqualTo(TEST_VALUE_1);
+ assertThat(chunk2.key()).isEqualTo(getChunkHash(TEST_KEY_2, TEST_VALUE_2));
+ KeyValuePair pair2 = decryptChunk(chunk2);
+ assertThat(pair2.key).isEqualTo(TEST_KEY_2);
+ assertThat(pair2.value).isEqualTo(TEST_VALUE_2);
+ }
+
+ @Test
+ public void backup_existingBackup_encryptsNewAndUpdatedPairs() throws Exception {
+ Pair<KeyValueListing, Set<ChunkHash>> initialResult = runInitialBackupOfPairs1And2();
+
+ // Update key 2 and add the new key 3.
+ ShadowBackupDataInput.reset();
+ ShadowBackupDataInput.addEntity(TEST_KEY_2, TEST_VALUE_2B);
+ ShadowBackupDataInput.addEntity(TEST_KEY_3, TEST_VALUE_3);
+
+ KvBackupEncrypter encrypter = createEncrypter(initialResult.first);
+ BackupEncrypter.Result secondResult =
+ encrypter.backup(
+ mSecretKey, /*unusedFingerprintMixerSalt=*/ null, initialResult.second);
+
+ assertThat(secondResult.getAllChunks()).hasSize(3);
+ assertThat(secondResult.getNewChunks()).hasSize(2);
+ EncryptedChunk newChunk2 = secondResult.getNewChunks().get(0);
+ EncryptedChunk newChunk3 = secondResult.getNewChunks().get(1);
+ assertThat(newChunk2.key()).isEqualTo(getChunkHash(TEST_KEY_2, TEST_VALUE_2B));
+ assertThat(decryptChunk(newChunk2).value).isEqualTo(TEST_VALUE_2B);
+ assertThat(newChunk3.key()).isEqualTo(getChunkHash(TEST_KEY_3, TEST_VALUE_3));
+ assertThat(decryptChunk(newChunk3).value).isEqualTo(TEST_VALUE_3);
+ }
+
+ @Test
+ public void backup_allChunksContainsHashesOfAllChunks() throws Exception {
+ Pair<KeyValueListing, Set<ChunkHash>> initialResult = runInitialBackupOfPairs1And2();
+
+ ShadowBackupDataInput.reset();
+ ShadowBackupDataInput.addEntity(TEST_KEY_3, TEST_VALUE_3);
+
+ KvBackupEncrypter encrypter = createEncrypter(initialResult.first);
+ BackupEncrypter.Result secondResult =
+ encrypter.backup(
+ mSecretKey, /*unusedFingerprintMixerSalt=*/ null, initialResult.second);
+
+ assertThat(secondResult.getAllChunks())
+ .containsExactly(
+ getChunkHash(TEST_KEY_1, TEST_VALUE_1),
+ getChunkHash(TEST_KEY_2, TEST_VALUE_2),
+ getChunkHash(TEST_KEY_3, TEST_VALUE_3));
+ }
+
+ @Test
+ public void backup_negativeSize_deletesKeyFromExistingBackup() throws Exception {
+ Pair<KeyValueListing, Set<ChunkHash>> initialResult = runInitialBackupOfPairs1And2();
+
+ ShadowBackupDataInput.reset();
+ ShadowBackupDataInput.addEntity(new DataEntity(TEST_KEY_2));
+
+ KvBackupEncrypter encrypter = createEncrypter(initialResult.first);
+ Result secondResult =
+ encrypter.backup(
+ mSecretKey, /*unusedFingerprintMixerSalt=*/ null, initialResult.second);
+
+ assertThat(secondResult.getAllChunks())
+ .containsExactly(getChunkHash(TEST_KEY_1, TEST_VALUE_1));
+ assertThat(secondResult.getNewChunks()).isEmpty();
+ }
+
+ @Test
+ public void backup_returnsMessageDigestOverChunkHashes() throws Exception {
+ Pair<KeyValueListing, Set<ChunkHash>> initialResult = runInitialBackupOfPairs1And2();
+
+ ShadowBackupDataInput.reset();
+ ShadowBackupDataInput.addEntity(TEST_KEY_3, TEST_VALUE_3);
+
+ KvBackupEncrypter encrypter = createEncrypter(initialResult.first);
+ Result secondResult =
+ encrypter.backup(
+ mSecretKey, /*unusedFingerprintMixerSalt=*/ null, initialResult.second);
+
+ MessageDigest messageDigest =
+ MessageDigest.getInstance(BackupEncrypter.MESSAGE_DIGEST_ALGORITHM);
+ ImmutableList<ChunkHash> sortedHashes =
+ Ordering.natural()
+ .immutableSortedCopy(
+ ImmutableList.of(
+ getChunkHash(TEST_KEY_1, TEST_VALUE_1),
+ getChunkHash(TEST_KEY_2, TEST_VALUE_2),
+ getChunkHash(TEST_KEY_3, TEST_VALUE_3)));
+ messageDigest.update(sortedHashes.get(0).getHash());
+ messageDigest.update(sortedHashes.get(1).getHash());
+ messageDigest.update(sortedHashes.get(2).getHash());
+ assertThat(secondResult.getDigest()).isEqualTo(messageDigest.digest());
+ }
+
+ @Test
+ public void getNewKeyValueListing_noExistingBackup_returnsCorrectListing() throws Exception {
+ KeyValueListing keyValueListing = runInitialBackupOfPairs1And2().first;
+
+ assertThat(keyValueListing.entries.length).isEqualTo(2);
+ assertThat(keyValueListing.entries[0].key).isEqualTo(TEST_KEY_1);
+ assertThat(keyValueListing.entries[0].hash)
+ .isEqualTo(getChunkHash(TEST_KEY_1, TEST_VALUE_1).getHash());
+ assertThat(keyValueListing.entries[1].key).isEqualTo(TEST_KEY_2);
+ assertThat(keyValueListing.entries[1].hash)
+ .isEqualTo(getChunkHash(TEST_KEY_2, TEST_VALUE_2).getHash());
+ }
+
+ @Test
+ public void getNewKeyValueListing_existingBackup_returnsCorrectListing() throws Exception {
+ Pair<KeyValueListing, Set<ChunkHash>> initialResult = runInitialBackupOfPairs1And2();
+
+ ShadowBackupDataInput.reset();
+ ShadowBackupDataInput.addEntity(TEST_KEY_2, TEST_VALUE_2B);
+ ShadowBackupDataInput.addEntity(TEST_KEY_3, TEST_VALUE_3);
+
+ KvBackupEncrypter encrypter = createEncrypter(initialResult.first);
+ encrypter.backup(mSecretKey, /*unusedFingerprintMixerSalt=*/ null, initialResult.second);
+
+ ImmutableMap<String, ChunkHash> keyValueListing =
+ listingToMap(encrypter.getNewKeyValueListing());
+ assertThat(keyValueListing).hasSize(3);
+ assertThat(keyValueListing)
+ .containsEntry(TEST_KEY_1, getChunkHash(TEST_KEY_1, TEST_VALUE_1));
+ assertThat(keyValueListing)
+ .containsEntry(TEST_KEY_2, getChunkHash(TEST_KEY_2, TEST_VALUE_2B));
+ assertThat(keyValueListing)
+ .containsEntry(TEST_KEY_3, getChunkHash(TEST_KEY_3, TEST_VALUE_3));
+ }
+
+ @Test
+ public void getNewKeyValueChunkListing_beforeBackup_throws() throws Exception {
+ KvBackupEncrypter encrypter = createEncrypter(new KeyValueListing());
+ assertThrows(IllegalStateException.class, encrypter::getNewKeyValueListing);
+ }
+
+ private ImmutableMap<String, ChunkHash> listingToMap(KeyValueListing listing) {
+ // We can't use the ImmutableMap collector directly because it isn't supported in Android
+ // guava.
+ return ImmutableMap.copyOf(
+ Arrays.stream(listing.entries)
+ .collect(
+ Collectors.toMap(
+ entry -> entry.key, entry -> new ChunkHash(entry.hash))));
+ }
+
+ private Pair<KeyValueListing, Set<ChunkHash>> runInitialBackupOfPairs1And2() throws Exception {
+ ShadowBackupDataInput.addEntity(TEST_KEY_1, TEST_VALUE_1);
+ ShadowBackupDataInput.addEntity(TEST_KEY_2, TEST_VALUE_2);
+
+ KeyValueListing initialKeyValueListing = new KeyValueListingBuilder().build();
+ ImmutableSet<ChunkHash> initialExistingChunks = ImmutableSet.of();
+ KvBackupEncrypter encrypter = createEncrypter(initialKeyValueListing);
+ Result firstResult =
+ encrypter.backup(
+ mSecretKey, /*unusedFingerprintMixerSalt=*/ null, initialExistingChunks);
+
+ return Pair.create(
+ encrypter.getNewKeyValueListing(), ImmutableSet.copyOf(firstResult.getAllChunks()));
+ }
+
+ private ChunkHash getChunkHash(String key, byte[] value) throws Exception {
+ KeyValuePair pair = new KeyValuePair();
+ pair.key = key;
+ pair.value = Arrays.copyOf(value, value.length);
+ return mChunkHasher.computeHash(KeyValuePair.toByteArray(pair));
+ }
+
+ private KeyValuePair decryptChunk(EncryptedChunk encryptedChunk) throws Exception {
+ Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM);
+ cipher.init(
+ Cipher.DECRYPT_MODE,
+ mSecretKey,
+ new GCMParameterSpec(GCM_TAG_LENGTH_BYTES * Byte.SIZE, encryptedChunk.nonce()));
+ byte[] decryptedBytes = cipher.doFinal(encryptedChunk.encryptedBytes());
+ return KeyValuePair.parseFrom(decryptedBytes);
+ }
+}
diff --git a/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/testing/DiffScriptProcessor.java b/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/testing/DiffScriptProcessor.java
new file mode 100644
index 000000000000..faddb6cf129c
--- /dev/null
+++ b/packages/BackupEncryption/test/robolectric/src/com/android/server/backup/encryption/testing/DiffScriptProcessor.java
@@ -0,0 +1,256 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.backup.encryption.testing;
+
+import static com.android.internal.util.Preconditions.checkArgument;
+import static com.android.internal.util.Preconditions.checkNotNull;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.RandomAccessFile;
+import java.util.Locale;
+import java.util.Optional;
+import java.util.Scanner;
+import java.util.regex.Pattern;
+
+/**
+ * To be used as part of a fake backup server. Processes a Scotty diff script.
+ *
+ * <p>A Scotty diff script consists of an ASCII line denoting a command, optionally followed by a
+ * range of bytes. Command format is either
+ *
+ * <ul>
+ * <li>A single 64-bit integer, followed by a new line: this denotes that the given number of
+ * bytes are to follow in the stream. These bytes should be written directly to the new file.
+ * <li>Two 64-bit integers, separated by a hyphen, followed by a new line: this says that the
+ * given range of bytes from the original file ought to be copied into the new file.
+ * </ul>
+ */
+public class DiffScriptProcessor {
+
+ private static final int COPY_BUFFER_SIZE = 1024;
+
+ private static final String READ_MODE = "r";
+ private static final Pattern VALID_COMMAND_PATTERN = Pattern.compile("^\\d+(-\\d+)?$");
+
+ private final File mInput;
+ private final File mOutput;
+ private final long mInputLength;
+
+ /**
+ * A new instance, with {@code input} as previous file, and {@code output} as new file.
+ *
+ * @param input Previous file from which ranges of bytes are to be copied. This file should be
+ * immutable.
+ * @param output Output file, to which the new data should be written.
+ * @throws IllegalArgumentException if input does not exist.
+ */
+ public DiffScriptProcessor(File input, File output) {
+ checkArgument(input.exists(), "input file did not exist.");
+ mInput = input;
+ mInputLength = input.length();
+ mOutput = checkNotNull(output);
+ }
+
+ public void process(InputStream diffScript) throws IOException, MalformedDiffScriptException {
+ RandomAccessFile randomAccessInput = new RandomAccessFile(mInput, READ_MODE);
+
+ try (FileOutputStream outputStream = new FileOutputStream(mOutput)) {
+ while (true) {
+ Optional<String> commandString = readCommand(diffScript);
+ if (!commandString.isPresent()) {
+ return;
+ }
+ Command command = Command.parse(commandString.get());
+
+ if (command.mIsRange) {
+ checkFileRange(command.mCount, command.mLimit);
+ copyRange(randomAccessInput, outputStream, command.mCount, command.mLimit);
+ } else {
+ long bytesCopied = copyBytes(diffScript, outputStream, command.mCount);
+ if (bytesCopied < command.mCount) {
+ throw new MalformedDiffScriptException(
+ String.format(
+ Locale.US,
+ "Command to copy %d bytes from diff script, but only %d"
+ + " bytes available",
+ command.mCount,
+ bytesCopied));
+ }
+ if (diffScript.read() != '\n') {
+ throw new MalformedDiffScriptException("Expected new line after bytes.");
+ }
+ }
+ }
+ }
+ }
+
+ private void checkFileRange(long start, long end) throws MalformedDiffScriptException {
+ if (end < start) {
+ throw new MalformedDiffScriptException(
+ String.format(
+ Locale.US,
+ "Command to copy %d-%d bytes from original file, but %2$d < %1$d.",
+ start,
+ end));
+ }
+
+ if (end >= mInputLength) {
+ throw new MalformedDiffScriptException(
+ String.format(
+ Locale.US,
+ "Command to copy %d-%d bytes from original file, but file is only %d"
+ + " bytes long.",
+ start,
+ end,
+ mInputLength));
+ }
+ }
+
+ /**
+ * Reads a command from the input stream.
+ *
+ * @param inputStream The input.
+ * @return Optional of command, or empty if EOF.
+ */
+ private static Optional<String> readCommand(InputStream inputStream) throws IOException {
+ ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+
+ int b;
+ while (!isEndOfCommand(b = inputStream.read())) {
+ byteArrayOutputStream.write(b);
+ }
+
+ byte[] bytes = byteArrayOutputStream.toByteArray();
+ if (bytes.length == 0) {
+ return Optional.empty();
+ } else {
+ return Optional.of(new String(bytes, UTF_8));
+ }
+ }
+
+ /**
+ * If the given output from {@link InputStream#read()} is the end of a command - i.e., a new
+ * line or the EOF.
+ *
+ * @param b The byte or -1.
+ * @return {@code true} if ends the command.
+ */
+ private static boolean isEndOfCommand(int b) {
+ return b == -1 || b == '\n';
+ }
+
+ /**
+ * Copies {@code n} bytes from {@code inputStream} to {@code outputStream}.
+ *
+ * @return The number of bytes copied.
+ * @throws IOException if there was a problem reading or writing.
+ */
+ private static long copyBytes(InputStream inputStream, OutputStream outputStream, long n)
+ throws IOException {
+ byte[] buffer = new byte[COPY_BUFFER_SIZE];
+ long copied = 0;
+ while (n - copied > COPY_BUFFER_SIZE) {
+ long read = copyBlock(inputStream, outputStream, buffer, COPY_BUFFER_SIZE);
+ if (read <= 0) {
+ return copied;
+ }
+ }
+ while (n - copied > 0) {
+ copied += copyBlock(inputStream, outputStream, buffer, (int) (n - copied));
+ }
+ return copied;
+ }
+
+ private static long copyBlock(
+ InputStream inputStream, OutputStream outputStream, byte[] buffer, int size)
+ throws IOException {
+ int read = inputStream.read(buffer, 0, size);
+ outputStream.write(buffer, 0, read);
+ return read;
+ }
+
+ /**
+ * Copies the given range of bytes from the input file to the output stream.
+ *
+ * @param input The input file.
+ * @param output The output stream.
+ * @param start Start position in the input file.
+ * @param end End position in the output file (inclusive).
+ * @throws IOException if there was a problem reading or writing.
+ */
+ private static void copyRange(RandomAccessFile input, OutputStream output, long start, long end)
+ throws IOException {
+ input.seek(start);
+
+ // Inefficient but obviously correct. If tests become slow, optimize.
+ for (; start <= end; start++) {
+ output.write(input.read());
+ }
+ }
+
+ /** Error thrown for a malformed diff script. */
+ public static class MalformedDiffScriptException extends Exception {
+ public MalformedDiffScriptException(String message) {
+ super(message);
+ }
+ }
+
+ /**
+ * A command telling the processor either to insert n bytes, which follow, or copy n-m bytes
+ * from the original file.
+ */
+ private static class Command {
+ private final long mCount;
+ private final long mLimit;
+ private final boolean mIsRange;
+
+ private Command(long count, long limit, boolean isRange) {
+ mCount = count;
+ mLimit = limit;
+ mIsRange = isRange;
+ }
+
+ /**
+ * Attempts to parse the command string into a usable structure.
+ *
+ * @param command The command string, without a new line at the end.
+ * @throws MalformedDiffScriptException if the command is not a valid diff script command.
+ * @return The parsed command.
+ */
+ private static Command parse(String command) throws MalformedDiffScriptException {
+ if (!VALID_COMMAND_PATTERN.matcher(command).matches()) {
+ throw new MalformedDiffScriptException("Bad command: " + command);
+ }
+
+ Scanner commandScanner = new Scanner(command);
+ commandScanner.useDelimiter("-");
+ long n = commandScanner.nextLong();
+ if (!commandScanner.hasNextLong()) {
+ return new Command(n, 0L, /*isRange=*/ false);
+ }
+ long m = commandScanner.nextLong();
+ return new Command(n, m, /*isRange=*/ true);
+ }
+ }
+}
diff --git a/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/CryptoTestUtils.java b/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/CryptoTestUtils.java
index 3f3494d2c22c..b9055cecd502 100644
--- a/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/CryptoTestUtils.java
+++ b/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/CryptoTestUtils.java
@@ -16,7 +16,11 @@
package com.android.server.backup.testing;
+import com.android.server.backup.encryption.chunk.ChunkHash;
+import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto;
+
import java.security.NoSuchAlgorithmException;
+import java.util.Arrays;
import java.util.Random;
import javax.crypto.KeyGenerator;
@@ -42,4 +46,72 @@ public class CryptoTestUtils {
random.nextBytes(bytes);
return bytes;
}
+
+ public static ChunksMetadataProto.Chunk newChunk(ChunkHash hash, int length) {
+ return newChunk(hash.getHash(), length);
+ }
+
+ public static ChunksMetadataProto.Chunk newChunk(byte[] hash, int length) {
+ ChunksMetadataProto.Chunk newChunk = new ChunksMetadataProto.Chunk();
+ newChunk.hash = Arrays.copyOf(hash, hash.length);
+ newChunk.length = length;
+ return newChunk;
+ }
+
+ public static ChunksMetadataProto.ChunkListing newChunkListing(
+ String docId,
+ byte[] fingerprintSalt,
+ int cipherType,
+ int orderingType,
+ ChunksMetadataProto.Chunk... chunks) {
+ ChunksMetadataProto.ChunkListing chunkListing =
+ newChunkListingWithoutDocId(fingerprintSalt, cipherType, orderingType, chunks);
+ chunkListing.documentId = docId;
+ return chunkListing;
+ }
+
+ public static ChunksMetadataProto.ChunkListing newChunkListingWithoutDocId(
+ byte[] fingerprintSalt,
+ int cipherType,
+ int orderingType,
+ ChunksMetadataProto.Chunk... chunks) {
+ ChunksMetadataProto.ChunkListing chunkListing = new ChunksMetadataProto.ChunkListing();
+ chunkListing.fingerprintMixerSalt = Arrays.copyOf(fingerprintSalt, fingerprintSalt.length);
+ chunkListing.cipherType = cipherType;
+ chunkListing.chunkOrderingType = orderingType;
+ chunkListing.chunks = chunks;
+ return chunkListing;
+ }
+
+ public static ChunksMetadataProto.ChunkOrdering newChunkOrdering(
+ int[] starts, byte[] checksum) {
+ ChunksMetadataProto.ChunkOrdering chunkOrdering = new ChunksMetadataProto.ChunkOrdering();
+ chunkOrdering.starts = Arrays.copyOf(starts, starts.length);
+ chunkOrdering.checksum = Arrays.copyOf(checksum, checksum.length);
+ return chunkOrdering;
+ }
+
+ public static ChunksMetadataProto.ChunkListing clone(
+ ChunksMetadataProto.ChunkListing original) {
+ ChunksMetadataProto.Chunk[] clonedChunks;
+ if (original.chunks == null) {
+ clonedChunks = null;
+ } else {
+ clonedChunks = new ChunksMetadataProto.Chunk[original.chunks.length];
+ for (int i = 0; i < original.chunks.length; i++) {
+ clonedChunks[i] = clone(original.chunks[i]);
+ }
+ }
+
+ return newChunkListing(
+ original.documentId,
+ original.fingerprintMixerSalt,
+ original.cipherType,
+ original.chunkOrderingType,
+ clonedChunks);
+ }
+
+ public static ChunksMetadataProto.Chunk clone(ChunksMetadataProto.Chunk original) {
+ return newChunk(original.hash, original.length);
+ }
}
diff --git a/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/shadows/DataEntity.java b/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/shadows/DataEntity.java
new file mode 100644
index 000000000000..6d3b5e9f1d7b
--- /dev/null
+++ b/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/shadows/DataEntity.java
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.testing.shadows;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+
+/**
+ * Represents a key value pair in {@link ShadowBackupDataInput} and {@link ShadowBackupDataOutput}.
+ */
+public class DataEntity {
+ public final String mKey;
+ public final byte[] mValue;
+ public final int mSize;
+
+ /**
+ * Constructs a pair with a string value. The value will be converted to a byte array in {@link
+ * StandardCharsets#UTF_8}.
+ */
+ public DataEntity(String key, String value) {
+ this.mKey = checkNotNull(key);
+ this.mValue = value.getBytes(StandardCharsets.UTF_8);
+ mSize = this.mValue.length;
+ }
+
+ /**
+ * Constructs a new entity with the given key but a negative size. This represents a deleted
+ * pair.
+ */
+ public DataEntity(String key) {
+ this.mKey = checkNotNull(key);
+ mSize = -1;
+ mValue = null;
+ }
+
+ /** Constructs a new entity where the size of the value is the entire array. */
+ public DataEntity(String key, byte[] value) {
+ this(key, value, value.length);
+ }
+
+ /**
+ * Constructs a new entity.
+ *
+ * @param key the key of the pair
+ * @param data the value to associate with the key
+ * @param size the length of the value in bytes
+ */
+ public DataEntity(String key, byte[] data, int size) {
+ this.mKey = checkNotNull(key);
+ this.mSize = size;
+ mValue = new byte[size];
+ for (int i = 0; i < size; i++) {
+ mValue[i] = data[i];
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ DataEntity that = (DataEntity) o;
+
+ if (mSize != that.mSize) {
+ return false;
+ }
+ if (!mKey.equals(that.mKey)) {
+ return false;
+ }
+ return Arrays.equals(mValue, that.mValue);
+ }
+
+ @Override
+ public int hashCode() {
+ int result = mKey.hashCode();
+ result = 31 * result + Arrays.hashCode(mValue);
+ result = 31 * result + mSize;
+ return result;
+ }
+}
diff --git a/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/shadows/ShadowBackupDataInput.java b/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/shadows/ShadowBackupDataInput.java
new file mode 100644
index 000000000000..7ac6ec40508d
--- /dev/null
+++ b/packages/BackupEncryption/test/robolectric/src/com/android/server/testing/shadows/ShadowBackupDataInput.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.server.testing.shadows;
+
+import static com.google.common.base.Preconditions.checkState;
+
+import android.annotation.Nullable;
+import android.app.backup.BackupDataInput;
+
+import org.robolectric.annotation.Implementation;
+import org.robolectric.annotation.Implements;
+
+import java.io.ByteArrayInputStream;
+import java.io.FileDescriptor;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/** Shadow for BackupDataInput. */
+@Implements(BackupDataInput.class)
+public class ShadowBackupDataInput {
+ private static final List<DataEntity> ENTITIES = new ArrayList<>();
+ @Nullable private static IOException sReadNextHeaderException;
+
+ @Nullable private ByteArrayInputStream mCurrentEntityInputStream;
+ private int mCurrentEntity = -1;
+
+ /** Resets the shadow, clearing any entities or exception. */
+ public static void reset() {
+ ENTITIES.clear();
+ sReadNextHeaderException = null;
+ }
+
+ /** Sets the exception which the input will throw for any call to {@link #readNextHeader}. */
+ public static void setReadNextHeaderException(@Nullable IOException readNextHeaderException) {
+ ShadowBackupDataInput.sReadNextHeaderException = readNextHeaderException;
+ }
+
+ /** Adds the given entity to the input. */
+ public static void addEntity(DataEntity e) {
+ ENTITIES.add(e);
+ }
+
+ /** Adds an entity to the input with the given key and value. */
+ public static void addEntity(String key, byte[] value) {
+ ENTITIES.add(new DataEntity(key, value, value.length));
+ }
+
+ public void __constructor__(FileDescriptor fd) {}
+
+ @Implementation
+ public boolean readNextHeader() throws IOException {
+ if (sReadNextHeaderException != null) {
+ throw sReadNextHeaderException;
+ }
+
+ mCurrentEntity++;
+
+ if (mCurrentEntity >= ENTITIES.size()) {
+ return false;
+ }
+
+ byte[] value = ENTITIES.get(mCurrentEntity).mValue;
+ if (value == null) {
+ mCurrentEntityInputStream = new ByteArrayInputStream(new byte[0]);
+ } else {
+ mCurrentEntityInputStream = new ByteArrayInputStream(value);
+ }
+ return true;
+ }
+
+ @Implementation
+ public String getKey() {
+ return ENTITIES.get(mCurrentEntity).mKey;
+ }
+
+ @Implementation
+ public int getDataSize() {
+ return ENTITIES.get(mCurrentEntity).mSize;
+ }
+
+ @Implementation
+ public void skipEntityData() {
+ // Do nothing.
+ }
+
+ @Implementation
+ public int readEntityData(byte[] data, int offset, int size) {
+ checkState(mCurrentEntityInputStream != null, "Must call readNextHeader() first");
+ return mCurrentEntityInputStream.read(data, offset, size);
+ }
+}