Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tesing - explict create blob container #23051

Draft
wants to merge 7 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions server/gitrest/packages/gitrest-base/src/routes/summaries.ts
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ async function createSummary(
persistLatestFullEphemeralSummary = false,
enableLowIoWrite: "initial" | boolean = false,
optimizeForInitialSummary: boolean = false,
enableContainerPerDocTimeStamp: number = 0,
): Promise<IWriteSummaryResponse | IWholeFlatSummary> {
const lumberjackProperties = {
...getLumberjackBasePropertiesFromRepoManagerParams(repoManagerParams),
Expand All @@ -152,12 +153,14 @@ async function createSummary(
{
enableLowIoWrite,
optimizeForInitialSummary,
enableContainerPerDocTimeStamp,
},
);

Lumberjack.info("Creating summary", lumberjackProperties);

const { isNew, writeSummaryResponse } = await wholeSummaryManager.writeSummary(
fileSystemManager,
payload,
isInitialSummary,
);
Expand Down Expand Up @@ -256,6 +259,16 @@ export function create(
const repoPerDocEnabled: boolean = store.get("git:repoPerDocEnabled") ?? false;
const enforceStrictPersistedFullSummaryReads: boolean =
store.get("git:enforceStrictPersistedFullSummaryReads") ?? false;
// Get the containerPerDocEnabling timestamp, the 1st element in the config array.
const containerPerDocMigrationCutoffTS: string =
store.get("azureBlobFs:containerPerDocMigrationCutoffTS") ?? undefined;
let enableContainerPerDocTimeStamp: number = 0;
if (containerPerDocMigrationCutoffTS) {
const migrationCutoffTimeStamp = containerPerDocMigrationCutoffTS.split(",").shift();
if (migrationCutoffTimeStamp) {
enableContainerPerDocTimeStamp = new Date(migrationCutoffTimeStamp.trim()).getTime();
}
}

/**
* Retrieves a summary.
Expand Down Expand Up @@ -315,6 +328,7 @@ export function create(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
router.post("/repos/:owner/:repo/git/summaries", async (request, response) => {
const repoManagerParams = getRepoManagerParamsFromRequest(request);
// Todo: Add cmk encryption scope here.
const tenantId = repoManagerParams.storageRoutingId?.tenantId;
const documentId = repoManagerParams.storageRoutingId?.documentId;
// request.query type is { [string]: string } but it's actually { [string]: any }
Expand Down Expand Up @@ -384,6 +398,7 @@ export function create(
persistLatestFullEphemeralSummary,
enableLowIoWrite,
optimizeForInitialSummary,
enableContainerPerDocTimeStamp,
);
})().catch((error) => logAndThrowApiError(error, request, repoManagerParams));
handleResponse(resultP, response, undefined, undefined, 201);
Expand Down
44 changes: 35 additions & 9 deletions server/gitrest/packages/gitrest-base/src/test/summaries.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,11 @@ testFileSystems.forEach((fileSystem) => {

// Test standard summary flow and storage access frequency.
it("Can create and read an initial summary and a subsequent incremental summary", async () => {
const fsManager = fsManagerFactory.create({
rootDir: repoManager.path,
});
const initialWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
sampleInitialSummaryUpload,
true,
);
Expand Down Expand Up @@ -292,6 +296,7 @@ testFileSystems.forEach((fileSystem) => {
);

const channelWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
sampleChannelSummaryUpload,
false,
);
Expand All @@ -311,6 +316,7 @@ testFileSystems.forEach((fileSystem) => {
);

const containerWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
// Replace the referenced channel summary with the one we just wrote.
// This matters when low-io write is enabled, because it alters how the tree is stored.
replaceTestShas(sampleContainerSummaryUpload, [
Expand Down Expand Up @@ -379,7 +385,11 @@ testFileSystems.forEach((fileSystem) => {
* 6. Wait until Client Summary is written
*/
it("Can create and read multiple summaries", async () => {
const fsManager = fsManagerFactory.create({
rootDir: repoManager.path,
});
const initialWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
ElaborateInitialPayload,
true,
);
Expand All @@ -403,6 +413,7 @@ testFileSystems.forEach((fileSystem) => {
);

const firstChannelWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
ElaborateFirstChannelPayload,
false,
);
Expand All @@ -422,6 +433,7 @@ testFileSystems.forEach((fileSystem) => {
);

const firstContainerWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
// Replace the referenced channel summary with the one we just wrote.
// This matters when low-io write is enabled, because it alters how the tree is stored.
replaceTestShas(ElaborateFirstContainerPayload, [
Expand Down Expand Up @@ -463,6 +475,7 @@ testFileSystems.forEach((fileSystem) => {

const firstServiceContainerWriteResponse =
await getWholeSummaryManager().writeSummary(
fsManager,
// Replace the referenced channel summary with the one we just wrote.
// This matters when low-io write is enabled, because it alters how the tree is stored.
replaceTestShas(ElaborateFirstServiceContainerPayload, [
Expand Down Expand Up @@ -492,6 +505,7 @@ testFileSystems.forEach((fileSystem) => {
);

const secondChannelWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
// Replace the referenced container summary with the one we just wrote.
replaceTestShas(ElaborateSecondChannelPayload, [
{
Expand All @@ -517,6 +531,7 @@ testFileSystems.forEach((fileSystem) => {
);

const secondContainerWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
// Replace the referenced channel summary with the one we just wrote.
// This matters when low-io write is enabled, because it alters how the tree is stored.
replaceTestShas(ElaborateSecondContainerPayload, [
Expand Down Expand Up @@ -557,8 +572,12 @@ testFileSystems.forEach((fileSystem) => {
* Validates that after deletion we cannot read and subsequent delete attempts are no-ops, not errors.
*/
it("Can hard-delete a document's summary data", async () => {
const fsManager = fsManagerFactory.create({
rootDir: repoManager.path,
});
// Write and validate initial summary.
const initialWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
ElaborateInitialPayload,
true,
);
Expand All @@ -581,9 +600,6 @@ testFileSystems.forEach((fileSystem) => {
);

// Delete document.
const fsManager = fsManagerFactory.create({
rootDir: repoManager.path,
});
await getWholeSummaryManager().deleteSummary(fsManager, false /* softDelete */);
// Validate that we cannot read the summary.
await assert.rejects(
Expand All @@ -607,8 +623,12 @@ testFileSystems.forEach((fileSystem) => {
* Validates that after deletion we cannot read and subsequent delete attempts are no-ops, not errors.
*/
it("Can soft-delete a document's summary data", async () => {
const fsManager = fsManagerFactory.create({
rootDir: repoManager.path,
});
// Write and validate initial summary.
const initialWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
ElaborateInitialPayload,
true,
);
Expand All @@ -631,9 +651,6 @@ testFileSystems.forEach((fileSystem) => {
);

// Delete document.
const fsManager = fsManagerFactory.create({
rootDir: repoManager.path,
});
await getWholeSummaryManager().deleteSummary(fsManager, true /* softDelete */);
// Validate that soft-deletion flag is detected.
assert.rejects(
Expand Down Expand Up @@ -670,9 +687,12 @@ testFileSystems.forEach((fileSystem) => {
it(`Can read from and write to an initial summary stored ${
enableLowIoWrite ? "with" : "without"
} low-io write`, async () => {
const fsManager = fsManagerFactory.create({
rootDir: repoManager.path,
});
await getWholeSummaryManager({
enableLowIoWrite,
}).writeSummary(sampleInitialSummaryUpload, true);
}).writeSummary(fsManager, sampleInitialSummaryUpload, true);

const initialReadResponse =
await getWholeSummaryManager().readSummary(LatestSummaryId);
Expand All @@ -682,10 +702,12 @@ testFileSystems.forEach((fileSystem) => {
"Initial summary read response should match expected response.",
);
const channelWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
sampleChannelSummaryUpload,
false,
);
const containerWriteResponse = await getWholeSummaryManager().writeSummary(
fsManager,
// Replace the referenced channel summary with the one we just wrote.
// This matters when low-io write is enabled, because it alters how the tree is stored.
JSON.parse(
Expand All @@ -711,15 +733,19 @@ testFileSystems.forEach((fileSystem) => {
it(`Can read an incremental summary stored ${
enableLowIoWrite ? "with" : "without"
} low-io write`, async () => {
const fsManager = fsManagerFactory.create({
rootDir: repoManager.path,
});
await getWholeSummaryManager({
enableLowIoWrite,
}).writeSummary(sampleInitialSummaryUpload, true);
}).writeSummary(fsManager, sampleInitialSummaryUpload, true);
const channelWriteResponse = await getWholeSummaryManager({
enableLowIoWrite,
}).writeSummary(sampleChannelSummaryUpload, false);
}).writeSummary(fsManager, sampleChannelSummaryUpload, false);
const containerWriteResponse = await getWholeSummaryManager({
enableLowIoWrite,
}).writeSummary(
fsManager,
// Replace the referenced channel summary with the one we just wrote.
// This matters when low-io write is enabled, because it alters how the tree is stored.
JSON.parse(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import { getSoftDeletedMarkerPath } from "./helpers";
const DefaultSummaryWriteFeatureFlags: ISummaryWriteFeatureFlags = {
enableLowIoWrite: false,
optimizeForInitialSummary: false,
enableContainerPerDocTimeStamp: 0,
};

export { isChannelSummary, isContainerSummary } from "./wholeSummary";
Expand Down Expand Up @@ -92,20 +93,35 @@ export class GitWholeSummaryManager {
* If the summary is a "channel" summary, the tree sha will be returned so that it can be referenced by a future "container" summary.
*/
public async writeSummary(
fileSystemManager: IFileSystemManager,
payload: IWholeSummaryPayload,
isInitial?: boolean,
): Promise<IWriteSummaryInfo> {
const lumberjackProperties: Record<string, any> = {
...this.lumberjackProperties,
enableLowIoWrite: this.summaryWriteFeatureFlags.enableLowIoWrite,
optimizeForInitialSummary: this.summaryWriteFeatureFlags.optimizeForInitialSummary,
enableContainerPerDocTS: this.summaryWriteFeatureFlags.enableContainerPerDocTimeStamp,
isInitial,
};
const writeSummaryMetric = Lumberjack.newLumberMetric(
GitRestLumberEventName.WholeSummaryManagerWriteSummary,
lumberjackProperties,
);
try {
// Create blob container if initial summary and blobContainerPerDoc is enabled.
if (
isInitial &&
this.summaryWriteFeatureFlags.enableContainerPerDocTimeStamp &&
Date.now() > this.summaryWriteFeatureFlags.enableContainerPerDocTimeStamp
) {
const summaryFolderPath = this.repoManager.path;
await fileSystemManager.promises.mkdir(summaryFolderPath, "create-blob-container");
Lumberjack.warning(
`[Azfs-debug] Created blob container for initial summary`,
lumberjackProperties,
);
}
if (isChannelSummary(payload)) {
lumberjackProperties.summaryType = "channel";
writeSummaryMetric.setProperty("summaryType", "channel");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,11 @@ export interface ISummaryWriteFeatureFlags {
* to avoid unnecessary storage operations. This can improve performance when creating a new document.
*/
optimizeForInitialSummary: boolean;
/**
* If the runtime time stamp is greater than enableContainerPerDocTimeStamp, meaning blobContainerPerDoc is enabled.
* The azure blob container for the document will be explictily created before initial summary upload.
*/
enableContainerPerDocTimeStamp: number;
}

/**
Expand Down
Loading