Skip to content
2 changes: 1 addition & 1 deletion netlify.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[build]
command="yarn website:build"
command="DOCKER=true yarn cli build specs all && yarn website:build"
publish="website/build"
ignore="git diff --quiet $COMMIT_REF $CACHED_COMMIT_REF -- website/"

Expand Down
200 changes: 122 additions & 78 deletions scripts/buildSpecs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,17 @@ const ALGOLIASEARCH_LITE_OPERATIONS = [
'post',
];

async function propagateTagsToOperations(
bundledPath: string,
client: string
): Promise<boolean> {
async function propagateTagsToOperations({
bundledPath,
withDoc,
clientName,
alias,
}: {
bundledPath: string;
withDoc: boolean;
clientName: string;
alias?: string;
}): Promise<void> {
if (!(await exists(bundledPath))) {
throw new Error(`Bundled file not found ${bundledPath}.`);
}
Expand All @@ -25,9 +32,36 @@ async function propagateTagsToOperations(
await fsp.readFile(bundledPath, 'utf8')
) as Spec;

for (const pathMethods of Object.values(bundledSpec.paths)) {
for (const specMethod of Object.values(pathMethods)) {
specMethod.tags = [client];
let bundledDocSpec: Spec | undefined;
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why load the yaml twice ? Can you simply copy the file ?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The problem in JS is pointer ref I can not simply reuse the same object.
So either I copy the file first and load it
or load the same file twice. which is equivalent I would say. No strong opinion

if (withDoc) {
bundledDocSpec = yaml.load(await fsp.readFile(bundledPath, 'utf8')) as Spec;
}
const tagsDefinitions = bundledSpec.tags;

for (const [pathKey, pathMethods] of Object.entries(bundledSpec.paths)) {
for (const [method, specMethod] of Object.entries(pathMethods)) {
// In the main bundle we need to only have the clientName before open-api generator will use this to determine the name of the client
specMethod.tags = [clientName];

if (!withDoc || !bundledDocSpec!.paths[pathKey][method].tags) {
continue;
}

// Checks that specified tags are well defined at root level
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I there a reason we want to ensure that?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

because if you don't define tags at the root level then nothing will appear in the doc

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It will still default to the base tag so I guess it's fine, what I mean is this feature is for the doc and shouldn't prevent dev but it's just my opinion

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Once it's done it's done, if you add a tag that means you expect to change the doc. So I'm not too worried about this impacting dev flow.

for (const tag of bundledDocSpec!.paths[pathKey][method].tags) {
if (tag === clientName || (alias && tag === alias)) {
return;
}

const tagExists = tagsDefinitions
? tagsDefinitions.find((t) => t.name === tag)
: null;
if (!tagExists) {
throw new Error(
`Tag "${tag}" in "client[${clientName}] -> operation[${specMethod.operationId}]" is not defined`
);
}
}
}
}

Expand All @@ -38,32 +72,38 @@ async function propagateTagsToOperations(
})
);

return true;
if (withDoc) {
const pathToDoc = bundledPath.replace('.yml', '.doc.yml');
await fsp.writeFile(
pathToDoc,
yaml.dump(bundledDocSpec, {
noRefs: true,
})
);
}
}

async function lintCommon(verbose: boolean, useCache: boolean): Promise<void> {
const spinner = createSpinner('linting common spec', verbose).start();

let hash = '';
const cacheFile = toAbsolutePath(`specs/dist/common.cache`);
if (useCache) {
const { cacheExists, hash: newCache } = await checkForCache(
{
job: 'common specs',
folder: toAbsolutePath('specs/'),
generatedFiles: [],
filesToCache: ['common'],
cacheFile,
},
verbose
);
const { cacheExists, hash: newCache } = await checkForCache({
folder: toAbsolutePath('specs/'),
generatedFiles: [],
filesToCache: ['common'],
cacheFile,
});

if (cacheExists) {
spinner.succeed("job skipped, cache found for 'common' spec");
return;
}

hash = newCache;
}

const spinner = createSpinner('linting common spec', verbose).start();
await run(`yarn specs:lint common`, { verbose });

if (hash) {
Expand All @@ -78,17 +118,21 @@ async function lintCommon(verbose: boolean, useCache: boolean): Promise<void> {
* Creates a lite search spec with the `ALGOLIASEARCH_LITE_OPERATIONS` methods
* from the `search` spec.
*/
async function buildLiteSpec(
spec: string,
bundledPath: string,
outputFormat: string,
verbose: boolean
): Promise<void> {
const searchSpec = yaml.load(
async function buildLiteSpec({
spec,
bundledPath,
outputFormat,
}: {
spec: string;
bundledPath: string;
outputFormat: string;
}): Promise<void> {
const parsed = yaml.load(
await fsp.readFile(toAbsolutePath(bundledPath), 'utf8')
) as Spec;

searchSpec.paths = Object.entries(searchSpec.paths).reduce(
// Filter methods.
parsed.paths = Object.entries(parsed.paths).reduce(
(acc, [path, operations]) => {
for (const [method, operation] of Object.entries(operations)) {
if (
Expand All @@ -105,95 +149,95 @@ async function buildLiteSpec(
);

const liteBundledPath = `specs/bundled/${spec}.${outputFormat}`;
await fsp.writeFile(toAbsolutePath(liteBundledPath), yaml.dump(searchSpec));

if (
!(await propagateTagsToOperations(toAbsolutePath(liteBundledPath), spec))
) {
throw new Error(
`Unable to propage tags to operations for \`${spec}\` spec.`
);
}
await fsp.writeFile(toAbsolutePath(liteBundledPath), yaml.dump(parsed));

await run(`yarn specs:fix bundled/${spec}.${outputFormat}`, {
verbose,
await propagateTagsToOperations({
bundledPath: toAbsolutePath(liteBundledPath),
clientName: spec,
withDoc: false,
});
}

/**
* Build spec file.
*/
async function buildSpec(
spec: string,
outputFormat: string,
verbose: boolean,
useCache: boolean
): Promise<void> {
const shouldBundleLiteSpec = spec === 'algoliasearch-lite';
const client = shouldBundleLiteSpec ? 'search' : spec;
const cacheFile = toAbsolutePath(`specs/dist/${client}.cache`);
const isLite = spec === 'algoliasearch-lite';
const specBase = isLite ? 'search' : spec; // In case of lite we use a different base because the base only exists virtually.
const cacheFile = toAbsolutePath(`specs/dist/${spec}.cache`);
let hash = '';

createSpinner(`'${client}' spec`, verbose).start().info();
const spinner = createSpinner(`starting '${spec}' spec`, verbose).start();

if (useCache) {
const generatedFiles = [`bundled/${client}.yml`];

if (shouldBundleLiteSpec) {
generatedFiles.push(`bundled/${spec}.yml`);
spinner.info(`checking cache for '${specBase}'`);
const generatedFiles: string[] = [`bundled/${spec}.yml`];
if (!isLite) {
generatedFiles.push(`bundled/${spec}.doc.yml`);
}

const { cacheExists, hash: newCache } = await checkForCache(
{
job: `'${client}' specs`,
folder: toAbsolutePath('specs/'),
generatedFiles,
filesToCache: [client, 'common'],
cacheFile,
},
verbose
);
const { cacheExists, hash: newCache } = await checkForCache({
folder: toAbsolutePath('specs/'),
generatedFiles,
filesToCache: [specBase, 'common'],
cacheFile,
});

if (cacheExists) {
spinner.succeed(`job skipped, cache found for '${specBase}'`);
return;
}

spinner.info(`cache not found for '${specBase}'`);
hash = newCache;
}

const spinner = createSpinner(`building ${client} spec`, verbose).start();
const bundledPath = `specs/bundled/${client}.${outputFormat}`;
// First linting the base
spinner.text = `linting '${spec}' spec`;
await run(`yarn specs:fix ${specBase}`, { verbose });

// Then bundle the file
const bundledPath = `specs/bundled/${spec}.${outputFormat}`;
await run(
`yarn openapi bundle specs/${client}/spec.yml -o ${bundledPath} --ext ${outputFormat}`,
`yarn openapi bundle specs/${specBase}/spec.yml -o ${bundledPath} --ext ${outputFormat}`,
{ verbose }
);

if (!(await propagateTagsToOperations(toAbsolutePath(bundledPath), client))) {
spinner.fail();
throw new Error(
`Unable to propage tags to operations for \`${client}\` spec.`
);
// Add the correct tags to be able to generate the proper client
if (!isLite) {
await propagateTagsToOperations({
bundledPath: toAbsolutePath(bundledPath),
clientName: spec,
withDoc: true,
});
} else {
await buildLiteSpec({
spec,
bundledPath: toAbsolutePath(bundledPath),
outputFormat,
});
}

spinner.text = `linting ${client} spec`;
await run(`yarn specs:fix ${client}`, { verbose });

spinner.text = `validating ${client} spec`;
await run(`yarn openapi lint specs/bundled/${client}.${outputFormat}`, {
// Validate and lint the final bundle
spinner.text = `validating '${spec}' bundled spec`;
await run(`yarn openapi lint specs/bundled/${spec}.${outputFormat}`, {
verbose,
});

spinner.text = `linting '${client}' bundled spec`;
await run(`yarn specs:fix bundled/${client}.${outputFormat}`, { verbose });

if (shouldBundleLiteSpec) {
spinner.text = `Building and linting '${spec}' spec`;
await buildLiteSpec(spec, bundledPath, outputFormat, verbose);
}
spinner.text = `linting '${spec}' bundled spec`;
await run(`yarn specs:fix bundled/${spec}.${outputFormat}`, { verbose });

if (hash) {
spinner.text = `storing ${client} spec cache`;
spinner.text = `storing '${spec}' spec cache`;
await fsp.writeFile(cacheFile, hash);
}

spinner.succeed(`building complete for '${client}' spec`);
spinner.succeed(`building complete for '${spec}' spec`);
}

export async function buildSpecs(
Expand Down
41 changes: 15 additions & 26 deletions scripts/common.ts
Original file line number Diff line number Diff line change
Expand Up @@ -202,17 +202,12 @@ export async function gitCommit({
});
}

export async function checkForCache(
{
job,
folder,
generatedFiles,
filesToCache,
cacheFile,
}: CheckForCacheOptions,
verbose: boolean
): Promise<CheckForCache> {
const spinner = createSpinner(`checking cache for ${job}`, verbose).start();
export async function checkForCache({
folder,
generatedFiles,
filesToCache,
cacheFile,
}: CheckForCacheOptions): Promise<CheckForCache> {
const cache: CheckForCache = {
cacheExists: false,
hash: '',
Expand All @@ -235,38 +230,32 @@ export async function checkForCache(
if (generatedFilesExists && (await exists(cacheFile))) {
const storedHash = (await fsp.readFile(cacheFile)).toString();
if (storedHash === cache.hash) {
spinner.succeed(`job skipped, cache found for ${job}`);
return {
cacheExists: true,
hash: cache.hash,
};
}
}

spinner.info(`cache not found for ${job}`);

return cache;
}

export async function buildCustomGenerators(verbose: boolean): Promise<void> {
const spinner = createSpinner('building custom generators', verbose).start();

const cacheFile = toAbsolutePath('generators/.cache');
const { cacheExists, hash } = await checkForCache(
{
job: 'custom generators',
folder: toAbsolutePath('generators/'),
generatedFiles: ['build'],
filesToCache: ['src', 'build.gradle', 'settings.gradle'],
cacheFile,
},
verbose
);
const { cacheExists, hash } = await checkForCache({
folder: toAbsolutePath('generators/'),
generatedFiles: ['build'],
filesToCache: ['src', 'build.gradle', 'settings.gradle'],
cacheFile,
});

if (cacheExists) {
spinner.succeed('job skipped, cache found for custom generators');
return;
}

const spinner = createSpinner('building custom generators', verbose).start();

await run('./gradle/gradlew --no-daemon -p generators assemble', {
verbose,
});
Expand Down
1 change: 0 additions & 1 deletion scripts/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ export type AdditionalProperties = Partial<{
Record<string, any>;

export type CheckForCacheOptions = {
job: string;
folder: string;
generatedFiles: string[];
filesToCache: string[];
Expand Down
2 changes: 2 additions & 0 deletions specs/search/paths/advanced/getLogs.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
get:
tags:
- Advanced
operationId: getLogs
description: Return the lastest log entries.
summary: Return the lastest log entries.
Expand Down
2 changes: 2 additions & 0 deletions specs/search/paths/advanced/getTask.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
get:
tags:
- Indices
Comment on lines +2 to +3
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would it make sense to avoid any manual changes added in the specs, and default to the name of the folder the spec is stored in?

This way, we only have to handle where to put the file, not what tag it should match

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So with today's structure, the tag for getTask would be advanced, but we can also create a new folder for indices, and we default to it

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nope because of deleteIndex and saveObject living in the same rest path

Copy link
Member

@shortcuts shortcuts Apr 27, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah sorry I did not fully explained why, but looking at the rest path, search settings objects all have the same so I guess we can do whatever we want here with the folders, wdyt?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we could but it would need more care in spec.yml, experience tells me deducting something from the filesystem will blow up on your face at some point.
And deleteIndex and saveObject are not splittable because of the limitation of $ref object, so my point still stand.

Copy link
Member

@shortcuts shortcuts Apr 27, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

They are both index operations so it wouldn't choc me to see both of them under an index folder/tag.

My point is only that we don't have extra logic but I get yours too.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

one is index operation, the other record operation. it's definitely debatable but as a user I would find it weird :p

operationId: getTask
description: Check the current status of a given task.
summary: Check the current status of a given task.
Expand Down
2 changes: 2 additions & 0 deletions specs/search/paths/dictionaries/batchDictionaryEntries.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
post:
tags:
- Dictionnaries
operationId: batchDictionaryEntries
description: Send a batch of dictionary entries.
summary: Send a batch of dictionary entries.
Expand Down
Loading