mirror of
https://github.com/tauri-apps/tauri-search.git
synced 2026-02-04 10:51:19 +01:00
chore: added link tests
This commit is contained in:
1
packages/docs/src/components.d.ts
vendored
1
packages/docs/src/components.d.ts
vendored
@@ -4,7 +4,6 @@
|
||||
|
||||
declare module 'vue' {
|
||||
export interface GlobalComponents {
|
||||
'AntDesign:apiTwotone': typeof import('~icons/ant-design/api-twotone')['default']
|
||||
'AntDesign:fileMarkdownOutlined': typeof import('~icons/ant-design/file-markdown-outlined')['default']
|
||||
'Bx:bxCheckbox': typeof import('~icons/bx/bx-checkbox')['default']
|
||||
'Bx:bxCheckboxChecked': typeof import('~icons/bx/bx-checkbox-checked')['default']
|
||||
|
||||
@@ -41,15 +41,15 @@ const details = () => {
|
||||
<div class="description flex flex-grow font-light text-sm truncate text-gray-500">{{doc.description}}</div>
|
||||
</div>
|
||||
|
||||
<div v-if="doc._idx === 'api'">
|
||||
<div v-if="doc._idx === 'api'" class="flex flex-row flex-grow space-x-2 place-items-center items-center">
|
||||
<vscode-icons:file-type-typescript-official v-if="doc.language === 'typescript'" class="flex flex-shrink-0" />
|
||||
<vscode-icons:file-type-rust v-if="doc.language === 'rust'" class="flex" />
|
||||
<span class="flex">{{ doc.name }}</span>
|
||||
</div>
|
||||
|
||||
<div v-if="doc._idx === 'prose'">
|
||||
<div v-if="doc._idx === 'prose'" class="flex flex-row flex-grow space-x-2 place-items-center items-center">
|
||||
<ant-design:file-markdown-outlined class="flex" />
|
||||
|
||||
<div class="name">{{doc.title}}</div>
|
||||
</div>
|
||||
|
||||
<div v-if="doc._idx === 'consolidated'" class="w-full">
|
||||
|
||||
@@ -3,15 +3,8 @@
|
||||
import { acceptHMRUpdate, defineStore } from "pinia";
|
||||
import type { UserModule } from "~/types";
|
||||
import { MeiliSearchHealth, MeiliSearchIndex, MeiliSearchInterface, MeiliSearchResponse, MeiliSearchStats } from "~/types/meilisearch";
|
||||
import {ApiModel, ProseModel } from "tauri-search";
|
||||
import { IMeilisearchIndexSettings } from "tauri-search";
|
||||
|
||||
export function createIndexes() {
|
||||
console.group("Establishing known search indexes");
|
||||
|
||||
ApiModel.query.createIndex().then(() => console.log(`ApiModel index created`));
|
||||
ProseModel.query.createIndex().then(() => console.log(`ApiModel index created`));
|
||||
ApiModel.query.createIndex().then(() => console.log(`ApiModel index created`));
|
||||
}
|
||||
|
||||
//#region STORE
|
||||
export interface SearchState {
|
||||
@@ -27,6 +20,9 @@ export interface SearchState {
|
||||
/** database stats for MeiliSearch */
|
||||
stats?: MeiliSearchStats;
|
||||
|
||||
/** index settings */
|
||||
indexSettings: Record<string, IMeilisearchIndexSettings<any>>;
|
||||
|
||||
searchStatus: "ready" | "searching" | "error" | "not-ready";
|
||||
|
||||
searchResults: {id: string; _idx: string; [key: string]: unknown}[];
|
||||
@@ -36,6 +32,7 @@ export const useSearch = defineStore("search", ({
|
||||
state: () => ({
|
||||
health: "initializing",
|
||||
indexes: [],
|
||||
indexSettings: {},
|
||||
searchUsing: ["consolidated"],
|
||||
stats: undefined,
|
||||
searchStatus: "not-ready",
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
"clean": "rimraf dist/* bin/*",
|
||||
"lint": "eslint src --ext ts,js,tsx,jsx --fix --no-error-on-unmatched-pattern",
|
||||
"prune": "docker system prune",
|
||||
"clear-caches": "node bin/clear-caches.js",
|
||||
"rebuild-caches": "node bin/rebuild-caches.js",
|
||||
"sitemap": "node bin/sitemap.js",
|
||||
"current-indexes": "node bin/current-indexes.js",
|
||||
"create-indexes": "node bin/create-indexes.js",
|
||||
@@ -38,6 +40,7 @@
|
||||
"dependencies": {
|
||||
"cheerio": "^1.0.0-rc.10",
|
||||
"dotenv": "^14.3.2",
|
||||
"fast-glob": "^3.2.11",
|
||||
"gray-matter": "^4.0.3",
|
||||
"inferred-types": "^0.18.4",
|
||||
"native-dash": "^1.21.5",
|
||||
|
||||
@@ -139,13 +139,13 @@ function simpleParse(f: string, content: string) {
|
||||
if (Array.isArray(node.content)) {
|
||||
headings[tag].push(node.content[0] as { content: string; type: string });
|
||||
if (node.content.length > 1) {
|
||||
console.error(
|
||||
`A heading tag in "${f}" was found which accumulated ${
|
||||
node.content.length
|
||||
} content elements in a single entry; only expected 1: ${node.content
|
||||
.map((n) => n.type)
|
||||
.join(", ")}`
|
||||
);
|
||||
// console.error(
|
||||
// `A heading tag in "${f}" was found which accumulated ${
|
||||
// node.content.length
|
||||
// } content elements in a single entry; only expected 1: ${node.content
|
||||
// .map((n) => n.type)
|
||||
// .join(", ")}`
|
||||
// );
|
||||
}
|
||||
} else {
|
||||
console.error(
|
||||
|
||||
8
packages/tauri-search/src/cli/clear-caches.ts
Normal file
8
packages/tauri-search/src/cli/clear-caches.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { clearCaches } from "~/pipelines";
|
||||
|
||||
(async () => {
|
||||
console.log(`- clearing all cache files in repo`);
|
||||
const files = await clearCaches();
|
||||
console.log(`- removed ${files.length} cache files`);
|
||||
console.log("- run 'pnpm run rebuild-caches' to rebuild");
|
||||
})();
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
import { pushConsolidatedDocs } from "~/pipelines/pushConsolidatedDocs";
|
||||
import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
|
||||
import { getEnv } from "~/utils/getEnv";
|
||||
import { ConsolidatedModel } from "..";
|
||||
import { ConsolidatedModel } from "~/models";
|
||||
|
||||
(async () => {
|
||||
console.log(`- pushing all models into consolidated index`);
|
||||
|
||||
6
packages/tauri-search/src/cli/rebuild-caches.ts
Normal file
6
packages/tauri-search/src/cli/rebuild-caches.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { rebuildCaches } from "~/pipelines/rebuildCaches";
|
||||
|
||||
(async () => {
|
||||
console.log(`- Rebuilding all caches files`);
|
||||
await rebuildCaches();
|
||||
})();
|
||||
@@ -1,12 +1,12 @@
|
||||
import { writeFile } from "fs/promises";
|
||||
import { buildDocsSitemap } from "~/utils/github/buildDocsSitemap";
|
||||
import { config } from "dotenv";
|
||||
import { refreshSitemap } from "~/pipelines/refreshSitemap";
|
||||
|
||||
(async () => {
|
||||
config();
|
||||
const repo = process.env.REPO || "tauri";
|
||||
const ref = process.env.BRANCH || "dev";
|
||||
const sitemap = await buildDocsSitemap({ ref, repo });
|
||||
const sitemap = await refreshSitemap({ ref, repo });
|
||||
await writeFile(
|
||||
`src/generated/sitemap-${repo}-${ref}.json`,
|
||||
JSON.stringify(sitemap),
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -15,5 +15,5 @@ export const ProseMapper: ModelMapper<MarkdownAst, IProseModel> = (i) => ({
|
||||
subSections: i.h3.map((i) => i.content),
|
||||
code: i.programmingLanguages,
|
||||
text: i.text,
|
||||
url: `${TAURI_BASE_URL}/docs/${i.filepath}/${i.filename.replace(".md", "")}`,
|
||||
url: `${TAURI_BASE_URL}/${i.filepath}/${i.filename.replace(".md", "")}`,
|
||||
});
|
||||
|
||||
13
packages/tauri-search/src/pipelines/clearCaches.ts
Normal file
13
packages/tauri-search/src/pipelines/clearCaches.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import fg from "fast-glob";
|
||||
import { rm } from "fs/promises";
|
||||
|
||||
/**
|
||||
* clears all cache files from the `src/generated` directory
|
||||
*/
|
||||
export async function clearCaches() {
|
||||
const files = await fg("src/generated/**/*.json");
|
||||
for (const f of files) {
|
||||
await rm(f);
|
||||
}
|
||||
return files;
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { ProseModel, ApiModel, RepoModel } from "~/models";
|
||||
import { MsSettingsUpdate } from "..";
|
||||
import { IMeilisearchIndexSettings } from "..";
|
||||
|
||||
const models = {
|
||||
api: ApiModel,
|
||||
@@ -22,7 +22,7 @@ export async function createIndexes() {
|
||||
// create the index
|
||||
console.log(await model.query.createIndex());
|
||||
// then update settings
|
||||
const indexSettings: MsSettingsUpdate<any> = {
|
||||
const indexSettings: IMeilisearchIndexSettings<any> = {
|
||||
...(model.index.displayed ? { displayedAttributes: model.index.displayed } : {}),
|
||||
...(model.index.searchable
|
||||
? { searchableAttributes: model.index.searchable }
|
||||
|
||||
@@ -5,3 +5,5 @@ export * from "./pushTypescriptDocs";
|
||||
export * from "./refreshProse";
|
||||
export * from "./refreshRepos";
|
||||
export * from "./refreshTypescript";
|
||||
export * from "./refreshSitemap";
|
||||
export * from "./clearCaches";
|
||||
|
||||
14
packages/tauri-search/src/pipelines/rebuildCaches.ts
Normal file
14
packages/tauri-search/src/pipelines/rebuildCaches.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { getEnv } from "~/utils/getEnv";
|
||||
import { refreshProse, refreshRepos, refreshTypescript } from ".";
|
||||
import { refreshSitemap } from "./refreshSitemap";
|
||||
|
||||
export async function rebuildCaches() {
|
||||
const { repo, branch } = getEnv();
|
||||
|
||||
await refreshSitemap();
|
||||
await Promise.all([
|
||||
refreshProse(repo, branch),
|
||||
refreshRepos(),
|
||||
refreshTypescript(repo, branch),
|
||||
]);
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
import axios from "axios";
|
||||
import { existsSync, mkdirSync } from "fs";
|
||||
import { readFile, rm, writeFile } from "fs/promises";
|
||||
import { readFile, writeFile } from "fs/promises";
|
||||
import path, { join } from "node:path";
|
||||
import { parseMarkdown } from "~/ast/parseMarkdown";
|
||||
import { ProseMapper } from "~/mappers";
|
||||
import { IProseModel } from "~/models/ProseModel";
|
||||
import { flattenSitemap, sitemapDictionary } from "~/utils/convertSitemap";
|
||||
import { buildDocsSitemap, IDocsSitemap } from "~/utils/github/buildDocsSitemap";
|
||||
import { IDocsSitemap, refreshSitemap } from "./refreshSitemap";
|
||||
|
||||
/* eslint-disable no-console */
|
||||
export interface IRefreshProseOptions {
|
||||
@@ -39,7 +39,6 @@ async function cacheMarkdownAst(file: string, url: string, repo: string, branch:
|
||||
const content = (await axios.get(url)).data;
|
||||
const ast = await parseMarkdown({ file, content });
|
||||
await write(jsonFile, JSON.stringify(ast));
|
||||
console.log(`- wrote markdown AST file: ${jsonFile}`);
|
||||
return ast;
|
||||
}
|
||||
|
||||
@@ -62,7 +61,7 @@ export async function refreshProse(
|
||||
? sitemapDictionary(JSON.parse(await readFile(sitemapFile, "utf-8")) as IDocsSitemap)
|
||||
: {};
|
||||
|
||||
const newSitemap = await buildDocsSitemap({ repo, ref: branch });
|
||||
const newSitemap = await refreshSitemap({ repo, ref: branch });
|
||||
const flatmap = flattenSitemap(newSitemap);
|
||||
const documents: IProseModel[] = [];
|
||||
const unchangedDocuments: IProseModel[] = [];
|
||||
@@ -90,7 +89,7 @@ export async function refreshProse(
|
||||
}
|
||||
} else {
|
||||
changed.push(file.filepath);
|
||||
console.log(`- change in "${file.filepath}" detected`);
|
||||
// console.log(`- change in "${file.filepath}" detected`);
|
||||
documents.push(
|
||||
ProseMapper(
|
||||
await cacheMarkdownAst(file.filepath, file.download_url, repo, branch)
|
||||
@@ -119,16 +118,23 @@ export async function refreshProse(
|
||||
|
||||
if (currentSitemap) {
|
||||
// look for files which have been removed, since last time
|
||||
const current = flattenSitemap(JSON.parse(await readFile(sitemapFile, "utf-8")));
|
||||
const lookup = sitemapDictionary(newSitemap);
|
||||
const removed = current.filter((c) => !lookup[c.filepath]).map((i) => i.filepath);
|
||||
if (removed.length > 0) {
|
||||
console.log(
|
||||
`- detected ${removed.length} files which no longer exist: ${removed.join(", ")}`
|
||||
);
|
||||
for (const file of removed) {
|
||||
await rm(jsonFileFromMarkdown(file, repo, branch));
|
||||
}
|
||||
}
|
||||
// const current = flattenSitemap(JSON.parse(await readFile(sitemapFile, "utf-8")));
|
||||
// const lookup = sitemapDictionary(newSitemap);
|
||||
// const removed = current.filter((c) => !lookup[c.filepath]).map((i) => i.filepath);
|
||||
// if (removed.length > 0) {
|
||||
// console.log(
|
||||
// `- detected ${removed.length} files which no longer exist: ${removed.join(", ")}`
|
||||
// );
|
||||
// for (const file of removed) {
|
||||
// await rm(jsonFileFromMarkdown(file, repo, branch));
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
const sitemap = `src/generated/sitemap-${repo}-${branch}.json`;
|
||||
|
||||
await writeFile(sitemap, JSON.stringify(currentSitemap), "utf-8");
|
||||
console.log(`- wrote Repo Sitemap to: ${sitemap}`);
|
||||
|
||||
return { sitemap };
|
||||
}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { Endpoints } from "@octokit/types";
|
||||
import axios from "axios";
|
||||
import { join } from "node:path";
|
||||
import { GITHUB_API_BASE } from "~/constants";
|
||||
import { GithubContentsReq, GithubContentsResp } from "~/types";
|
||||
import { getEnv } from "../getEnv";
|
||||
import { getEnv } from "~/utils/getEnv";
|
||||
|
||||
const DEFAULT: GithubContentsReq = {
|
||||
owner: "tauri-apps",
|
||||
@@ -76,7 +75,7 @@ function reduceClutter(
|
||||
/**
|
||||
* Uses Github API to build a sitemap of markdown files for a given repo
|
||||
*/
|
||||
export async function buildDocsSitemap(options: Partial<GithubContentsReq> = DEFAULT) {
|
||||
export async function refreshSitemap(options: Partial<GithubContentsReq> = DEFAULT) {
|
||||
const o = { ...DEFAULT, ...options };
|
||||
const [files, children] = reduceClutter(o.path, (await getDirectory(o)).data);
|
||||
const sitemap: IDocsSitemap = {
|
||||
@@ -89,7 +88,7 @@ export async function buildDocsSitemap(options: Partial<GithubContentsReq> = DEF
|
||||
for (const child of children) {
|
||||
const p = join(o.path, `/${child}`);
|
||||
const mo = { ...o, path: p };
|
||||
waitFor.push(buildDocsSitemap(mo));
|
||||
waitFor.push(refreshSitemap(mo));
|
||||
}
|
||||
const resolved = await Promise.all(waitFor);
|
||||
sitemap.children = resolved;
|
||||
@@ -57,7 +57,7 @@ export interface MsSettingsResponse<T extends {}> {
|
||||
distinctAttribute: null | (keyof T)[] | ["*"];
|
||||
}
|
||||
|
||||
export interface MsSettingsUpdate<T extends {}> {
|
||||
export interface IMeilisearchIndexSettings<T extends {}> {
|
||||
/** List of associated words treated similarly. A word associated to an array of word as synonyms. */
|
||||
synonyms?: Record<string, string[]>;
|
||||
/** List of words ignored when present in search queries. */
|
||||
@@ -217,7 +217,9 @@ export interface MeiliSearchQueryApi<TDoc extends {}> {
|
||||
search: (text: string) => Promise<MeiliSearchResponse>;
|
||||
|
||||
getAllIndexSettings: () => Promise<MsSettingsResponse<TDoc>>;
|
||||
updateIndexSettings: (settings: MsSettingsUpdate<TDoc>) => Promise<MsTaskStatus>;
|
||||
updateIndexSettings: (
|
||||
settings: IMeilisearchIndexSettings<TDoc>
|
||||
) => Promise<MsTaskStatus>;
|
||||
|
||||
resetIndexSettings: () => Promise<MsTaskStatus>;
|
||||
updateRankingRules: () => Promise<MsTaskStatus>;
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
MsTaskStatus,
|
||||
MsKey,
|
||||
MsIndexStatusResponse,
|
||||
MsSettingsUpdate,
|
||||
IMeilisearchIndexSettings,
|
||||
MeiliSearchQueryApi,
|
||||
ISearchConfig,
|
||||
MsAllTasks,
|
||||
@@ -139,8 +139,11 @@ export function MeiliSearchApi<TDoc extends {}>(
|
||||
put<MsAddOrReplace>(`indexes/${idx}/documents`, JSON.stringify(doc), o),
|
||||
search: (text: string) => get<MeiliSearchResponse>(`indexes/${idx}/search?q=${text}`),
|
||||
getAllIndexSettings: () => get<MsSettingsResponse<TDoc>>(`indexes/${idx}/settings`),
|
||||
updateIndexSettings: (settings: MsSettingsUpdate<TDoc>) =>
|
||||
post<MsTaskStatus, MsSettingsUpdate<TDoc>>(`indexes/${idx}/settings`, settings),
|
||||
updateIndexSettings: (settings: IMeilisearchIndexSettings<TDoc>) =>
|
||||
post<MsTaskStatus, IMeilisearchIndexSettings<TDoc>>(
|
||||
`indexes/${idx}/settings`,
|
||||
settings
|
||||
),
|
||||
resetIndexSettings: () => del<MsTaskStatus>(`indexes/${idx}/settings`),
|
||||
updateRankingRules: () => post<MsTaskStatus>(`indexes/${idx}/settings/ranking-rules`),
|
||||
updateDistinctAttribute: () =>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { omit } from "native-dash";
|
||||
import { join } from "path";
|
||||
import { IDocsSitemap } from "./github/buildDocsSitemap";
|
||||
import { IDocsSitemap } from "~/pipelines/refreshSitemap";
|
||||
|
||||
export interface IFlatSitemap {
|
||||
/** the full "relative path" (aka, dir and filename combined) */
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
import { existsSync } from "fs";
|
||||
import { readFile } from "fs/promises";
|
||||
import { LinkExists, linkExists, LinkMissing } from "../tools/linkExists";
|
||||
import { beforeAll, describe, expect, it } from "vitest";
|
||||
import { REPO_DOCS_CACHE, TS_DOCS_CACHE } from "~/constants";
|
||||
import { ConsolidatedMapper } from "~/mappers/ConsolidatedMapper";
|
||||
import { IApiModel, IProseModel, IRepoModel } from "~/models";
|
||||
import {
|
||||
proseDocsCacheFile,
|
||||
refreshProse,
|
||||
refreshRepos,
|
||||
refreshTypescript,
|
||||
} from "~/pipelines";
|
||||
import { getEnv } from "~/utils/getEnv";
|
||||
const { repo, branch } = getEnv();
|
||||
|
||||
describe("link testing of consolidated index", () => {
|
||||
// let docs: IConsolidatedModel[];
|
||||
beforeAll(async () => {
|
||||
if (!existsSync(TS_DOCS_CACHE)) {
|
||||
await refreshTypescript(repo, branch);
|
||||
}
|
||||
if (!existsSync(REPO_DOCS_CACHE)) {
|
||||
await refreshRepos();
|
||||
}
|
||||
if (!existsSync(proseDocsCacheFile(repo, branch))) {
|
||||
await refreshProse(repo, branch);
|
||||
}
|
||||
});
|
||||
|
||||
it("test links originating from prose", async () => {
|
||||
const docs = (
|
||||
JSON.parse(
|
||||
await readFile(proseDocsCacheFile(repo, branch), "utf-8")
|
||||
) as IProseModel[]
|
||||
).map((i) => ConsolidatedMapper(i));
|
||||
const links: Promise<LinkExists | LinkMissing>[] = [];
|
||||
for (const doc of docs) {
|
||||
links.push(linkExists(doc.url));
|
||||
}
|
||||
const results = await Promise.all(links);
|
||||
const broken = results.filter((i) => !i.ok);
|
||||
expect(
|
||||
results.every((i) => i.ok),
|
||||
`${broken.length} of ${docs.length} prose links don't seem to exist:\n\t${broken
|
||||
.map((i) => `${i.url} -> ${(i as LinkMissing).error}`)
|
||||
.join("\n\t")}`
|
||||
).toBeTruthy();
|
||||
});
|
||||
it("test links originating from Typescript API", async () => {
|
||||
const docs = (JSON.parse(await readFile(TS_DOCS_CACHE, "utf-8")) as IApiModel[]).map(
|
||||
(i) => ConsolidatedMapper(i)
|
||||
);
|
||||
const links: Promise<LinkExists | LinkMissing>[] = [];
|
||||
for (const doc of docs) {
|
||||
links.push(linkExists(doc.url));
|
||||
}
|
||||
const results = await Promise.all(links);
|
||||
const broken = results.filter((i) => !i.ok);
|
||||
expect(
|
||||
results.every((i) => i.ok),
|
||||
`${broken.length} of ${docs.length} prose links don't seem to exist:\n\t${broken
|
||||
.map((i) => `${i.url} -> ${(i as LinkMissing).error}`)
|
||||
.join("\n\t")}`
|
||||
).toBeTruthy();
|
||||
});
|
||||
it.todo("test links originating from Rust API", async () => {});
|
||||
it("test links originating from repos", async () => {
|
||||
const docs = (
|
||||
JSON.parse(await readFile(REPO_DOCS_CACHE, "utf-8")) as IRepoModel[]
|
||||
).map((i) => ConsolidatedMapper(i));
|
||||
const links: Promise<LinkExists | LinkMissing>[] = [];
|
||||
for (const doc of docs) {
|
||||
links.push(linkExists(doc.url));
|
||||
}
|
||||
const results = await Promise.all(links);
|
||||
const broken = results.filter((i) => !i.ok);
|
||||
expect(
|
||||
results.every((i) => i.ok),
|
||||
`${broken.length} of ${docs.length} prose links don't seem to exist:\n\t${broken
|
||||
.map((i) => `${i.url} -> ${(i as LinkMissing).error}`)
|
||||
.join("\n\t")}`
|
||||
).toBeTruthy();
|
||||
});
|
||||
});
|
||||
31
packages/tauri-search/test/tools/linkExists.ts
Normal file
31
packages/tauri-search/test/tools/linkExists.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import axios from "axios";
|
||||
|
||||
export interface LinkExists {
|
||||
url: string;
|
||||
ok: true;
|
||||
code: number;
|
||||
}
|
||||
export interface LinkMissing {
|
||||
url: string;
|
||||
ok: false;
|
||||
code: number;
|
||||
error: string;
|
||||
}
|
||||
|
||||
export async function linkExists(url: string) {
|
||||
try {
|
||||
const res = await axios.head(url, { timeout: 3000 });
|
||||
return { url, ok: true, code: res.status } as LinkExists;
|
||||
} catch (err) {
|
||||
if (axios.isAxiosError(err)) {
|
||||
return {
|
||||
url,
|
||||
ok: false,
|
||||
code: err.response?.status || -1,
|
||||
error: err.response?.data?.message || err.response?.statusText,
|
||||
} as LinkMissing;
|
||||
}
|
||||
|
||||
return { url, ok: false, code: -1, error: (err as Error).message } as LinkMissing;
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { readFile } from "fs/promises";
|
||||
import { beforeAll, describe, expect, it } from "vitest";
|
||||
import { flattenSitemap, sitemapDictionary } from "~/utils/convertSitemap";
|
||||
import { IDocsSitemap } from "~/utils/github/buildDocsSitemap";
|
||||
import { IDocsSitemap } from "~/pipelines/buildDocsSitemap";
|
||||
|
||||
let sitemap: IDocsSitemap;
|
||||
|
||||
|
||||
2
pnpm-lock.yaml
generated
2
pnpm-lock.yaml
generated
@@ -148,6 +148,7 @@ importers:
|
||||
eslint-plugin-import: ^2.25.4
|
||||
eslint-plugin-prettier: ^4.0.0
|
||||
eslint-plugin-promise: ^6.0.0
|
||||
fast-glob: ^3.2.11
|
||||
fx: ^20.0.2
|
||||
gray-matter: ^4.0.3
|
||||
husky: ^7.0.4
|
||||
@@ -170,6 +171,7 @@ importers:
|
||||
dependencies:
|
||||
cheerio: 1.0.0-rc.10
|
||||
dotenv: 14.3.2
|
||||
fast-glob: 3.2.11
|
||||
gray-matter: 4.0.3
|
||||
inferred-types: 0.18.4
|
||||
native-dash: 1.21.5
|
||||
|
||||
Reference in New Issue
Block a user