refactor: adjusted both CJS and ESM code to be able to interact better WRT to stage

This commit is contained in:
Ken Snyder
2022-02-07 19:53:40 -08:00
parent 5aaaafe7ff
commit deb541fafe
47 changed files with 799 additions and 623 deletions

View File

@@ -20,6 +20,9 @@ services:
ports:
- 7700:7700
- 2222:22
restart: unless-stopped
security_opt:
- no-new-priveledges:true
volumes:
- search_db:/home/db

View File

@@ -2,6 +2,7 @@
"name": "tauri-search-monorepo",
"private": true,
"license": "MIT",
"type": "module",
"author": "Ken Snyder<ken@ken.net>",
"scripts": {
"cli:reset-index-config": "run-s cli:drop-indexes cli:create-indexes cli:push-caches",

View File

@@ -24,25 +24,25 @@
"docs-searchbar.js": "^2.1.0",
"floating-vue": "^2.0.0-beta.5",
"inferred-types": "^0.18.4",
"markdown-it-expandable": "^1.0.0",
"markdown-it-expandable": "^1.0.2",
"nprogress": "^0.2.0",
"pinia": "^2.0.11",
"prism-theme-vars": "^0.2.2",
"tauri-search": "workspace:*",
"vue": "^3.2.29",
"vue": "^3.2.30",
"vue-demi": "^0.12.1",
"vue-i18n": "^9.1.9",
"vue-router": "^4.0.12"
},
"devDependencies": {
"@antfu/eslint-config": "^0.16.1",
"@iconify/json": "^2.0.33",
"@intlify/vite-plugin-vue-i18n": "^3.2.1",
"@iconify/json": "^2.0.34",
"@intlify/vite-plugin-vue-i18n": "^3.2.2",
"@types/markdown-it-link-attributes": "^3.0.1",
"@types/nprogress": "^0.2.0",
"@vitejs/plugin-vue": "^2.1.0",
"@vue/compiler-sfc": "^3.2.29",
"@vue/server-renderer": "^3.2.29",
"@vue/compiler-sfc": "^3.2.30",
"@vue/server-renderer": "^3.2.30",
"@vue/test-utils": "^2.0.0-rc.18",
"critters": "^0.0.16",
"cross-env": "^7.0.3",
@@ -57,16 +57,16 @@
"typescript": "^4.5.5",
"unplugin-auto-import": "^0.5.11",
"unplugin-icons": "^0.13.0",
"unplugin-vue-components": "^0.17.15",
"unplugin-vue-components": "^0.17.17",
"vite": "^2.7.13",
"vite-plugin-inspect": "^0.3.13",
"vite-plugin-md": "^0.11.7",
"vite-plugin-pages": "^0.20.1",
"vite-plugin-pages": "^0.20.2",
"vite-plugin-pwa": "^0.11.13",
"vite-plugin-vue-layouts": "^0.5.0",
"vite-plugin-windicss": "^1.6.3",
"vite-ssg": "^0.17.9",
"vitest": "^0.2.6",
"vue-tsc": "^0.31.1"
"vite-ssg": "^0.17.10",
"vitest": "^0.2.7",
"vue-tsc": "^0.31.2"
}
}

View File

@@ -0,0 +1,7 @@
lockfileVersion: 5.3
specifiers:
meili-searchbar: ^2.1.0
dependencies:
meili-searchbar: link:../../../../../../../../../Users/ken/pnpm-global/5/node_modules/meili-searchbar

View File

@@ -1,17 +1,21 @@
<script setup lang="ts">
import docsSearchBar from "docs-searchbar.js";
// import docsSearchBar from "meili-searchbar";
docsSearchBar({
hostUrl: "http://localhost:7700",
apiKey: "",
indexUid: "docs",
inputSelector: "#search-bar-input",
onBeforeMount(() => {
// docsSearchBar({
// hostUrl: "http://localhost:7700",
// apiKey: "",
// indexUid: "docs",
// inputSelector: "#search-bar-input",
// });
// console.log(docsSearchBar);
});
</script>
<template>
<div>
<input id="search-bar-input" type="text">
<input id="search-bar-input" type="search">
</div>
</template>

View File

@@ -79,9 +79,9 @@ const searchable =s.indexSettings[doc.value._idx as any].searchableAttributes as
<!-- PROSE -->
<div v-if="doc.from === 'prose'" class="flex flex-row flex-grow space-x-2 place-items-center items-center">
<teenyicons:text-document-solid class="flex" />
<teenyicons:text-document-solid class="flex flex-shrink-0" />
<div class="title font-semibold flex-shrink-0">{{doc.hierarchy_lvl0}}</div>
<div class="title font-light truncate text-gray-500 flex flex-grow">{{doc.hierarchy_lvl1}}</div>
<div class="title font-light truncate text-gray-500 flex flex-grow">{{doc.hierarchy_lvl1 || doc.content}}</div>
<link-validation :url="(doc.url as string)" />
</div>

View File

@@ -66,5 +66,6 @@ onStartTyping(() => {
<search-results :query="searchText" />
</div>
</div>
</div>
</template>

View File

@@ -15,4 +15,4 @@ declare module "*.vue" {
export default component;
}
declare module "docs-searchbar.js";
declare module "meili-searchbar";

View File

@@ -2,7 +2,7 @@
"compilerOptions": {
"baseUrl": ".",
"module": "ESNext",
"target": "es2016",
"target": "ES2020",
"lib": ["DOM", "ESNext"],
"strict": true,
"esModuleInterop": true,

View File

@@ -0,0 +1,15 @@
GH_USER="ksnyde"
GH_TOKEN="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"
API_KEY=""
DOCS_PATH="docs"
ADMIN_KEY="MtgOqm4Nc9c2c3d8285f41e3e81d240ee48872b217380d7d80725cdf03dc78d327a19b6f"
SEARCH_KEY="XZEH8BS90ee09c45215a8421c06857bcbde5c1a6797bdf4859a57a3ac1228a2b81df0994"
MEILI_URL="http://localhost:7700"
VITE_GH_USER="ksnyde"
VITE_GH_TOKEN="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"
VITE_API_KEY=""
VITE_DOCS_PATH="docs"
VITE_ADMIN_KEY="MtgOqm4Nc9c2c3d8285f41e3e81d240ee48872b217380d7d80725cdf03dc78d327a19b6f"
VITE_SEARCH_KEY="XZEH8BS90ee09c45215a8421c06857bcbde5c1a6797bdf4859a57a3ac1228a2b81df0994"
VITE_MEILI_URL="https://search2.tauri.studio"

View File

@@ -5,39 +5,41 @@
"description": "Search Engine for Tauri website",
"license": "MIT",
"author": "Ken Snyder<ken@ken.net>",
"main": "dist/index.js",
"module": "dist/index.mjs",
"main": "dist/index.cjs",
"module": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"bin": {},
"scripts": {
"build": "run-p build:*",
"build:cli": "tsup src/cli/*.ts --format=esm,cjs --clean --sourcemap -d bin",
"build:npm": "tsup src/index.ts --dts --format=esm,cjs --sourcemap --clean -d dist ",
"clean": "rimraf dist/* bin/*",
"clear-caches": "rimraf src/generated/ast && node bin/clear-caches.js",
"create-indexes": "node bin/create-indexes.js",
"drop-indexes": "node bin/drop-indexes.js",
"current-indexes": "node bin/current-indexes.js",
"clear-caches": "rimraf src/generated/ast && node bin/clear-caches.cjs",
"create-indexes": "node bin/create-indexes.cjs",
"drop-indexes": "node bin/drop-indexes.cjs",
"current-indexes": "node bin/current-indexes.cjs",
"lint": "eslint src --ext ts,js,tsx,jsx --fix --no-error-on-unmatched-pattern",
"prune": "docker system prune",
"delta-update": "run-s rebuild-caches ",
"full-update": "run-s drop-indexes clear-caches rebuild-caches create-indexes push-consolidated push-prose push-repos push-typescript",
"push-caches": "node bin/push-cache.js",
"push-consolidated": "node bin/push-consolidated.js",
"push-prose": "node bin/push-prose.js",
"push-repos": "node bin/push-repos.js",
"push-typescript": "node bin/push-typescript.js",
"rebuild-caches": "node bin/rebuild-caches.js",
"refresh-prose": "node bin/refresh-prose.js",
"refresh-sitemap": "node bin/refresh-sitemap.js",
"refresh-repos": "node bin/refresh-repos.js",
"refresh-typescript": "node bin/refresh-typescript.js",
"full-update:staging": "NODE_ENV=staging pnpm run full-update",
"push-caches": "node bin/push-cache.cjs",
"push-consolidated": "node bin/push-consolidated.cjs",
"push-prose": "node bin/push-prose.cjs",
"push-repos": "node bin/push-repos.cjs",
"push-typescript": "node bin/push-typescript.cjs",
"rebuild-caches": "node bin/rebuild-caches.cjs",
"refresh-prose": "node bin/refresh-prose.cjs",
"refresh-sitemap": "node bin/refresh-sitemap.cjs",
"refresh-repos": "node bin/refresh-repos.cjs",
"refresh-typescript": "node bin/refresh-typescript.cjs",
"restart": "docker compose restart",
"sitemap": "node bin/sitemap.js",
"sitemap": "node bin/sitemap.cjs",
"test": "vitest run",
"test:watch": "vitest watch --ui",
"ts-ast": "node ./bin/ts-ast.js",
"ts-ast-overview": "node ./bin/ts-ast-overview.js",
"ts-ast": "node ./bin/ts-ast.cjs",
"ts-ast-overview": "node ./bin/ts-ast-overview.cjs",
"watch": "run-p watch:*",
"watch:cli": "tsup src/cli/*.ts --format=esm,cjs --sourcemap -d bin --watch",
"watch:npm": "tsup src/index.ts --dts --format=esm,cjs --sourcemap -d dist --watch"
@@ -58,9 +60,9 @@
"@type-challenges/utils": "^0.1.1",
"@types/markdown-it": "^12.2.3",
"@types/node": "^14.18.10",
"@typescript-eslint/eslint-plugin": "^5.10.2",
"@typescript-eslint/parser": "^5.10.2",
"@vitest/ui": "^0.2.6",
"@typescript-eslint/eslint-plugin": "^5.11.0",
"@typescript-eslint/parser": "^5.11.0",
"@vitest/ui": "^0.2.7",
"changeset": "^0.2.6",
"eslint": "^8.8.0",
"eslint-config-prettier": "^8.3.0",
@@ -73,13 +75,13 @@
"npm-run-all": "^4.1.5",
"prettier": "^2.5.1",
"rimraf": "^3.0.2",
"ts-node": "^10.4.0",
"ts-node": "^10.5.0",
"tsup": "^5.11.13",
"typescript": "^4.5.5",
"vite": "^2.7.13",
"vite-plugin-dts": "^0.9.9",
"vite-plugin-inspect": "^0.3.13",
"vitest": "^0.2.6"
"vitest": "^0.2.7"
},
"engines": {
"node": ">=14",

View File

@@ -3,7 +3,7 @@
import { readFile } from "fs/promises";
// import xxhash from "xxhash-wasm";
import matter from "gray-matter";
import {convert} from "html-to-text";
import { convert } from "html-to-text";
import smd from "simple-markdown-2";
import { ITauriFrontmatter, MarkdownAst } from "~/types/markdown";

View File

@@ -1,9 +1,13 @@
/* eslint-disable no-console */
import { createIndexes } from "~/pipelines/createIndexes";
import { getEnv } from "~/utils/getEnv/node/getEnv";
(async () => {
try {
const { skipping, created } = await createIndexes();
const options = getEnv();
console.log(`- creating Meilisearch indexes [${options.stage}]`);
const { skipping, created } = await createIndexes(options);
if (skipping.length > 0) {
console.log(
`- the following indexes -- ${skipping.join(

View File

@@ -1,10 +1,15 @@
/* eslint-disable no-console */
import { ApiModel } from "~/models";
import { getEnv } from "~/utils/getEnv/node/getEnv";
(async () => {
const active = (await ApiModel().query.currentIndexes()).map((i) => i.name);
const o = getEnv();
const active = (
await ApiModel(o.stage, { admin_key: o.adminKey }).query.currentIndexes()
).map((i) => i.name);
console.log(`- clearing all active indexes: ${active.join(", ")}`);
for (const idx of active) {
await ApiModel().query.deleteIndex(idx);
await ApiModel(o.stage, { admin_key: o.adminKey }).query.deleteIndex(idx);
}
})();

View File

@@ -2,15 +2,24 @@
import { pushConsolidatedDocs } from "~/pipelines/pushConsolidatedDocs";
import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
import { ConsolidatedModel } from "~/models";
import { getEnv } from "~/utils/getEnv/node/getEnv";
(async () => {
console.log(`- pushing all individual models into a consolidated index`);
const { tasks } = await pushConsolidatedDocs();
const o = getEnv();
console.log(
`- pushing all individual doc caches into a consolidated index [${o.stage}]`
);
const { tasks } = await pushConsolidatedDocs(o);
console.log();
console.log(
`- all consolidated documents [${tasks.length}] have been pushed to MeiliSearch queue`
);
communicateTaskStatus(ConsolidatedModel(), tasks, { timeout: 75000 });
await communicateTaskStatus(
ConsolidatedModel(o.stage, { admin_key: o.adminKey }),
tasks,
{
timeout: 75000,
}
);
})();

View File

@@ -2,13 +2,18 @@
import { pushProseDocs } from "~/pipelines";
import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
import { ProseModel } from "~/models";
import { getEnv } from "~/utils/getEnv/node/getEnv";
(async () => {
console.log(`- Pushing "prose" documents to MeiliSearch`);
const tasks = await pushProseDocs();
const o = getEnv();
console.log(`- Pushing "prose" documents to MeiliSearch [${o.stage}]`);
const tasks = await pushProseDocs(o);
console.log(
`- all ${tasks.length} documents were pushed via API; monitoring task status ...`
);
await communicateTaskStatus(ProseModel(), tasks, { timeout: 75000 });
await communicateTaskStatus(ProseModel(o.stage, { admin_key: o.adminKey }), tasks, {
timeout: 75000,
});
})();

View File

@@ -1,12 +1,14 @@
/* eslint-disable no-console */
import { pushRepoDocs } from "~/pipelines";
import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
import { RepoModel } from "..";
import { RepoModel } from "~/models";
import { getEnv } from "~/utils/getEnv/node/getEnv";
(async () => {
const o = getEnv();
console.log(`- Pushing Repo document cache into MeiliSearch`);
const { docs, errors, tasks } = await pushRepoDocs();
const { docs, errors, tasks } = await pushRepoDocs(o);
console.log();
if (errors.length > 0) {
console.log(
@@ -20,6 +22,6 @@ import { RepoModel } from "..";
`- Completed pushing all ${docs.length} Repo docs to MeiliSearch; monitoring queue status`
);
await communicateTaskStatus(RepoModel(), tasks);
await communicateTaskStatus(RepoModel(o.stage, { admin_key: o.adminKey }), tasks);
}
})();

View File

@@ -2,10 +2,15 @@
import { ApiModel } from "~/models";
import { pushTypescriptDocs } from "~/pipelines/pushTypescriptDocs";
import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
import { getEnv } from "~/utils/getEnv/node/getEnv";
(async () => {
console.log(`- pushing Typescript API documents to Meilisearch`);
const { errors, tasks } = await pushTypescriptDocs({ branch: "feat/generate-js-ast" });
const o = getEnv();
console.log(`- pushing Typescript API documents to Meilisearch [${o.stage}]`);
const { errors, tasks } = await pushTypescriptDocs({
...o,
branch: "feat/generate-js-ast",
});
console.log();
if (errors.length > 0) {
@@ -21,6 +26,8 @@ import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
console.log(
`- Completed pushing all Typescript docs [${tasks.length}] to MeiliSearch. Now monitoring task progress ...`
);
communicateTaskStatus(ApiModel(), tasks, { timeout: 65000 });
communicateTaskStatus(ApiModel(o.stage, { admin_key: o.adminKey }), tasks, {
timeout: 65000,
});
}
})();

View File

@@ -1,6 +1,6 @@
/* eslint-disable no-console */
import { refreshProse } from "~/pipelines/refreshProse";
import { getEnv } from "~/utils/getEnv";
import { getEnv } from "~/utils/getEnv/node/getEnv";
(async () => {
const { repo, branch } = getEnv();

View File

@@ -1,6 +1,6 @@
/* eslint-disable no-console */
import { refreshSitemap } from "~/pipelines/refreshSitemap";
import { getEnv } from "~/utils/getEnv";
import { getEnv } from "~/utils/getEnv/node/getEnv";
(async () => {
console.log(`- refreshing sitemap for prose content`);

15
packages/tauri-search/src/env.d.ts vendored Normal file
View File

@@ -0,0 +1,15 @@
import { Stage } from "~/types";
export interface ImportMeta {
env: {
VITE_TITLE?: string;
VITE_GH_USER?: string;
VITE_GH_TOKEN?: string;
VITE_ADMIN_KEY?: string;
VITE_STAGE?: Stage;
BASE_URL: string;
MODE: string;
PROD: boolean;
DEV: boolean;
};
}

File diff suppressed because one or more lines are too long

View File

@@ -12,22 +12,28 @@ export const ConsolidatedMapper: ModelMapper<
IConsolidatedModel
> = (i): IConsolidatedModel => ({
objectID: i.id,
hierarchy_lvl0: isRepoDocument(i) ? i.name : isApiDocument(i) ? i.name : i.title,
hierarchy_lvl0: isRepoDocument(i)
? i.name
: isApiDocument(i)
? i.name || null
: i.title,
hierarchy_lvl1: isRepoDocument(i)
? i.topics?.join(" ") || null
: isApiDocument(i)
? i.module || null
: i.tags?.join(" ") || null,
hierarchy_lvl2: isRepoDocument(i)
? i.kind === "unknown"
? null
: i.kind || null
: isApiDocument(i)
? i.language
: i.sections?.join(" ") || null,
hierarchy_lvl3: isRepoDocument(i)
? i.description || null
: isApiDocument(i)
? i.module
: i.tags?.join(" ") || null,
hierarchy_lvl2: isRepoDocument(i)
? i.kind || null
: isApiDocument(i)
? null
: i.sections?.join(" ") || null,
hierarchy_lvl3: isRepoDocument(i)
? i.topics?.join(" ") || null
: isApiDocument(i)
? i.language || null
: i.subSections?.join(" ") || null,
hierarchy_lvl4: isRepoDocument(i)
? i.language || null
: isApiDocument(i)
@@ -51,7 +57,13 @@ export const ConsolidatedMapper: ModelMapper<
? i.language
: i.code?.pop() || null,
tags: isRepoDocument(i) ? i.topics || null : isApiDocument(i) ? null : i.tags || null,
content: isRepoDocument(i) ? i.text : isApiDocument(i) ? i.comment || null : i.text,
content: isRepoDocument(i)
? i.topics?.join(" ") || null
: isApiDocument(i)
? i.declaration || null
: i.subSections?.join(" ") || null,
text: isApiDocument(i) ? i.comment || null : i.text || null,
rank: isRepoDocument(i)
? IndexRank.repo
: isApiDocument(i)

View File

@@ -8,7 +8,10 @@ import { sanitizeDocId } from "~/utils/sanitizeDocId";
*/
export const ProseMapper: ModelMapper<MarkdownAst, IProseModel> = (i) => ({
id: sanitizeDocId(`prose_${i.filepath}_${i.filename}`),
title: i.frontmatter.title?.content || i.h1.shift() || "UNKNOWN",
title:
typeof i.frontmatter.title === "object" && "content" in i.frontmatter.title
? i.frontmatter.title?.content
: i.frontmatter.title || i.h1.shift() || "UNKNOWN",
tags: i.frontmatter.tags as string[],
category: i.frontmatter.section as string,
sections: i.h2.map((i) => i.content),

View File

@@ -8,6 +8,7 @@ export type IConsolidatedModel = IScrapeSelectorTargets & {
symbol: string | null;
tags: null | string[];
language: string | null;
text: string | null;
};
export const ConsolidatedModel = createModel<IConsolidatedModel>("consolidated", (c) =>
@@ -22,14 +23,16 @@ export const ConsolidatedModel = createModel<IConsolidatedModel>("consolidated",
})
.filterable("from", "language", "symbol")
.searchable(
"content",
"hierarchy_lvl0",
"hierarchy_lvl1",
"symbol",
"tags",
"hierarchy_lvl3",
"hierarchy_lvl2",
"hierarchy_lvl1",
"rank",
"content"
"hierarchy_lvl3",
"hierarchy_lvl2"
)
.rankingRules((r) =>
r.words().typo().sort().attribute().proximity().ASC("rank").exactness()
)
.rankingRules((r) => r.words().typo().sort().attribute().proximity().ASC("rank").exactness())
);

View File

@@ -1,22 +1,36 @@
/* eslint-disable no-console */
import { ProseModel, ApiModel, RepoModel, ConsolidatedModel } from "~/models";
import { ISearchModel, Stage, IEnv } from "~/types";
import { getEnv } from "~/utils/getEnv/esm/getEnv";
const models = {
api: ApiModel(),
repo: RepoModel(),
prose: ProseModel(),
consolidated: ConsolidatedModel(),
};
const models = (stage: Stage, admin_key: string) => ({
api: ApiModel(stage, { admin_key }),
repo: RepoModel(stage, { admin_key }),
prose: ProseModel(stage, { admin_key }),
consolidated: ConsolidatedModel(stage, { admin_key }),
});
/**
* Will add -- and configure -- all known indexes to MeiliSearch which aren't already
* present in server.
*/
export async function createIndexes() {
const skipping = (await ProseModel().query.currentIndexes()).map((i) => i.name);
export async function createIndexes(options: Partial<IEnv> = {}) {
const { stage, adminKey } = { ...getEnv(), ...options };
if (!adminKey && stage !== "local") {
throw new Error(
`- To publish to ${stage}, you will need set an ADMIN key!\n${Object.keys(
import.meta.env || {}
).join(", ")}`
);
}
const skipping = (
await ProseModel(stage, { admin_key: adminKey }).query.currentIndexes()
).map((i) => i.name);
const created: string[] = [];
for (const key of Object.keys(models)) {
const model = models[key as keyof typeof models];
for (const key of ["api", "repo", "prose", "consolidated"]) {
const model = models(stage, adminKey || "")[
key as keyof typeof models
] as ISearchModel<any>;
if (!skipping.includes(model.name)) {
created.push(key);
// create the index and configure it

View File

@@ -1,8 +1,8 @@
import { ConsolidatedMapper } from "~/mappers/ConsolidatedMapper";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { getEnv } from "~/utils/getEnv/esm/getEnv";
import { ConsolidatedModel, IConsolidatedModel } from "~/models";
import { IMonitoredTask } from "~/types";
import { IMonitoredTask, IEnv } from "~/types";
export async function pushConsolidatedDocs(options: Partial<IEnv> = {}) {
const o = { ...getEnv(), ...options };
@@ -24,7 +24,9 @@ export async function pushConsolidatedDocs(options: Partial<IEnv> = {}) {
const errors: IConsolidatedModel[] = [];
const tasks: IMonitoredTask[] = [];
for (const doc of docs) {
const res = await ConsolidatedModel().query.addOrReplaceDocuments(doc);
const res = await ConsolidatedModel(o.stage, {
admin_key: o.adminKey,
}).query.addOrReplaceDocuments(doc);
if (res.status !== "enqueued") {
errors.push(doc);
} else {

View File

@@ -1,20 +1,21 @@
import { ProseModel } from "~/models/ProseModel";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { IMonitoredTask } from "~/types";
import { getEnv } from "~/utils/getEnv/esm/getEnv";
import { IMonitoredTask, IEnv } from "~/types";
/**
* Pushes the cached prose documents into the MeiliSearch "prose" index
*/
export async function pushProseDocs(options: Partial<IEnv> = {}) {
const o = { ...getEnv(), ...options };
const { cache } = await getCache(CacheKind.proseDocs, o);
const tasks: IMonitoredTask[] = [];
for (const doc of cache) {
tasks.push(
await ProseModel()
await ProseModel(o.stage, { admin_key: o.adminKey })
.query.addOrReplaceDocuments(doc)
.then((i) => ({ docId: doc.id, taskId: i.uid }))
);

View File

@@ -1,7 +1,7 @@
import { IRepoModel, RepoModel } from "~/models";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { IMonitoredTask } from "..";
import { getEnv } from "~/utils/getEnv/esm/getEnv";
import { IMonitoredTask, IEnv } from "~/types";
/**
* Pushes the cached REPO documents into the MeiliSearch "repo" index
@@ -13,7 +13,9 @@ export async function pushRepoDocs(options: Partial<IEnv> = {}) {
const tasks: IMonitoredTask[] = [];
for (const doc of docs) {
const res = await RepoModel().query.addOrReplaceDocuments(doc);
const res = await RepoModel(o.stage, {
admin_key: o.adminKey,
}).query.addOrReplaceDocuments(doc);
if (res.status !== "enqueued") {
errors.push(doc);
} else {

View File

@@ -1,8 +1,8 @@
import { ApiModel, IApiModel } from "~/models";
import { IMonitoredTask } from "~/types";
import { IMonitoredTask, IEnv } from "~/types";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { refreshTypescript } from ".";
import { getEnv } from "~/utils/getEnv/esm/getEnv";
import { refreshTypescript } from "./refreshTypescript";
/**
* Iterates over each Typescript module and all of the
@@ -20,7 +20,9 @@ export async function pushTypescriptDocs(options: Partial<IEnv> = {}) {
const tasks: IMonitoredTask[] = [];
for (const doc of docs) {
const res = await ApiModel().query.addOrReplaceDocuments(doc);
const res = await ApiModel(o.stage, {
admin_key: o.adminKey,
}).query.addOrReplaceDocuments(doc);
if (res.status !== "enqueued") {
errors.push(doc);
} else {

View File

@@ -4,9 +4,10 @@ import { join } from "node:path";
import { parseMarkdown } from "~/ast/parseMarkdown";
import { ProseMapper } from "~/mappers";
import { IProseModel } from "~/models/ProseModel";
import { IEnv } from "~/types";
import { flattenSitemap } from "~/utils/convertSitemap";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { getEnv } from "~/utils/getEnv/esm/getEnv";
import { writeCacheFile } from "~/utils/writeCacheFile";
import { refreshSitemap } from "./refreshSitemap";

View File

@@ -1,8 +1,8 @@
import { join } from "path";
import { GithubContentsResp } from "~/types";
import { GithubContentsResp, IEnv } from "~/types";
import { flattenSitemap, IFlatSitemap, sitemapDictionary } from "~/utils/convertSitemap";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { getEnv } from "~/utils/getEnv/esm/getEnv";
import { getDirectory } from "~/utils/github/getDirectory";
import { writeCacheFile } from "~/utils/writeCacheFile";

View File

@@ -1,10 +1,11 @@
import { parseTypescriptAst } from "~/ast/parseTypescriptAst";
import { TypescriptMapper } from "~/mappers";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { getEnv } from "~/utils/getEnv/esm/getEnv";
import { getRepoFile } from "~/utils/github/getRepoFile";
import { writeCacheFile } from "~/utils/writeCacheFile";
import { IApiModel, TypescriptBlock } from "..";
import { TypescriptBlock, IEnv } from "~/types";
import { IApiModel } from "~/models";
/**
* Refreshes the document cache

View File

@@ -0,0 +1,22 @@
import { Stage } from "./model";
export interface IEnv {
org: string;
repo: string;
branch: string;
stage: Stage;
docsPath: string;
/**
* the full filename path to the AST JSON file exported by
*/
tsAstPath: string;
adminKey?: string;
searchKey?: string;
github_token?: string;
github_user?: string;
force?: boolean;
}

View File

@@ -1,6 +1,7 @@
export * from "./scraper";
export * from "./ts-ast";
export * from "./apis";
export * from "./env";
export * from "./github";
export * from "./type-guards";
export * from "./mapping";

View File

@@ -73,3 +73,7 @@ export type ISearchModel<TDoc extends {}> = {
};
export type ISearchConfig<TDoc extends {}> = Omit<ISearchModel<TDoc>, "query">;
export type PartialModel<T extends {}> = Omit<Partial<ISearchModel<T>>, "index"> & {
index: Partial<ISearchModel<T>["index"]>;
};

View File

@@ -10,14 +10,14 @@ import {
MsIndexStatusResponse,
IMeilisearchIndexSettings,
IMeiliSearchQueryApi,
ISearchConfig,
IMeilisearchAllTasks,
ISearchConfig,
} from "~/types";
import { getEnv } from "./getEnv";
export interface MeiliSearchOptions {
url?: string;
search_key?: string;
admin_key?: string;
}
export type PagingOptions = {
limit?: number;
@@ -28,16 +28,11 @@ export type ApiOptions = Omit<AxiosRequestConfig, "method">;
export function MeiliSearchApi<TDoc extends {}>(
model: ISearchConfig<TDoc>,
options: MeiliSearchOptions = {}
searchOptions: MeiliSearchOptions = {}
) {
const baseURL = options.url || "http://localhost:7700";
const baseURL = searchOptions.url || "http://localhost:7700";
const idx = model.name;
const { adminKey, searchKey } = getEnv();
const headers = {
"X-Meili-API-Key": options.search_key || adminKey || searchKey || "",
"Access-Control-Allow-Origin": "*",
};
// const { adminKey, searchKey } = getEnv();
const call = async <T>(
method: "get" | "post" | "put" | "delete",
@@ -45,16 +40,24 @@ export function MeiliSearchApi<TDoc extends {}>(
options: AxiosRequestConfig = {}
): Promise<T> => {
const fullUrl = `${baseURL}/${url.startsWith("/") ? url.slice(1) : url}`;
const token = searchOptions.admin_key || searchOptions.search_key || "";
const headers: Record<string, any> = {
"Access-Control-Allow-Origin": "*",
"Content-Type": "application/json",
};
if (token && token.length > 0) {
headers["X-Meili-API-Key"] = token;
headers["Authorization"] = `Bearer ${token}`;
}
const res = await axios({
method,
url: fullUrl,
...{
...options,
headers: {
"Content-Type": options.data ? "application/json" : "application/text",
...headers,
},
...options,
},
}).catch((err) => {
if (axios.isAxiosError(err)) {

View File

@@ -1,83 +1,8 @@
import { SERVERS } from "~/constants";
import {
IndexApi,
ISearchConfig,
ISearchModel,
RankingRule,
RankingRulesApi,
Stage,
} from "~/types";
import { getEnv } from "./getEnv";
import { MeiliSearchApi } from "./MeiliSearchApi";
import { rankingRules } from "./model-api/rankingRules";
export type PartialModel<T extends {}> = Omit<Partial<ISearchModel<T>>, "index"> & {
index: Partial<ISearchModel<T>["index"]>;
};
const modelConfigApi = <TDoc extends {}>(update: (s: PartialModel<TDoc>) => void) => {
const api = <TExclude extends string = never, M extends string = never>(): IndexApi<
TDoc,
TExclude
> =>
({
pk(pk: string) {
update({ index: { pk } });
return api<TExclude | "pk", M>();
},
searchable(...props) {
if (props?.length > 0) {
update({ index: { searchable: props } });
}
return api<TExclude | "searchable", M>();
},
displayed(...props) {
if (props?.length > 0) {
update({ index: { displayed: props } });
}
return api<TExclude | "displayed", M>();
},
distinct(...props) {
if (props?.length > 0) {
update({ index: { distinct: props } });
}
return api<TExclude | "distinct", M>();
},
filterable(...props) {
if (props?.length > 0) {
update({ index: { filterable: props } });
}
return api<TExclude | "filterable", M>();
},
sortable(...props) {
if (props?.length > 0) {
update({ index: { sortable: props } });
}
return api<TExclude | "searchable", M>();
},
stopWords(words) {
update({ index: { stopWords: words } });
return api<TExclude | "stopWords">();
},
synonyms(synonyms) {
update({ index: { synonyms } });
return api<TExclude | "synonyms">();
},
rankingRules(cb: (r: RankingRulesApi<TDoc>) => void) {
const updateRules = (r: RankingRule<TDoc>[]) => {
update({ index: { rules: r } });
};
const ruleApi = rankingRules(updateRules);
cb(ruleApi);
return api<TExclude | "rankingRules">();
},
} as IndexApi<TDoc, TExclude>);
return api();
};
import { IndexApi, ISearchConfig, ISearchModel, PartialModel, Stage } from "~/types";
import { getEnv } from "./getEnv/esm/getEnv";
import { MeiliSearchApi, MeiliSearchOptions } from "./MeiliSearchApi";
import { modelConfigApi } from "./model-api/modelConfigApi";
export const createModel = <TDoc extends Record<string, any>>(
/** the MeiliSearch index name which this model is servicing */
@@ -101,14 +26,15 @@ export const createModel = <TDoc extends Record<string, any>>(
cb(modelConfigApi<TDoc>(updateState));
}
return (stage?: Stage) => {
const url = stage ? SERVERS[stage]?.url : SERVERS[getEnv().stage]?.url;
const search_key = stage
? SERVERS[stage]?.search_key
: SERVERS[getEnv().stage]?.search_key;
return (stage?: Stage, options: MeiliSearchOptions = {}) => {
const { adminKey, searchKey, stage: s } = stage ? { ...getEnv(), stage } : getEnv();
const url = SERVERS[s]?.url;
const search_key = SERVERS[s]?.search_key || searchKey;
const admin_key: string | undefined = adminKey || options.admin_key || "";
return {
...state,
query: MeiliSearchApi<TDoc>(state, { url, search_key }),
query: MeiliSearchApi<TDoc>(state, { url, search_key, admin_key, ...options }),
toString() {
return `Model(${name}[${state.index.pk}])`;
},

View File

@@ -2,7 +2,7 @@
import { readFile } from "fs/promises";
import { IDocsSitemap } from "~/pipelines";
import { IApiModel, IProseModel, IRepoModel } from "..";
import { getEnv } from "./getEnv";
import { getEnv } from "./getEnv/node/getEnv";
export enum CacheKind {
sitemap = "Sitemap of Markdown files",

View File

@@ -0,0 +1,28 @@
import { IEnv, Stage } from "~/types";
export function getEnv(): IEnv {
return {
org: (import.meta?.env?.ORG as string) || "tauri-apps",
repo: (import.meta?.env?.REPO as string) || "tauri-docs",
branch: (import.meta?.env?.BRANCH as string) || "dev",
stage: (import.meta?.env?.NODE_ENV as Stage) || "local",
docsPath: (import.meta?.env?.DOCS_PATH as string) || "docs",
tsAstPath: (import.meta?.env?.TS_AST_PATH as string) || "docs/api/js/js-api.json",
adminKey:
(import.meta?.env?.ADMIN_KEY as string) ||
(import.meta?.env?.VITE_ADMIN_KEY as string) ||
undefined,
searchKey:
(import.meta?.env?.SEARCH_KEY as string) ||
(import.meta?.env?.VITE_SEARCH_KEY as string) ||
undefined,
github_token:
(import.meta?.env?.GH_TOKEN as string | undefined) ||
(import.meta?.env?.GITHUB_TOKEN as string | undefined) ||
undefined,
github_user: (import.meta?.env?.GH_USER as string | undefined) || undefined,
force: import.meta?.env?.FORCE ? Boolean(import.meta?.env?.FORCE) : false,
};
}

View File

@@ -1,26 +1,5 @@
import { config } from "dotenv";
import { Stage } from "~/types";
export interface IEnv {
org: string;
repo: string;
branch: string;
stage: Stage;
docsPath: string;
/**
* the full filename path to the AST JSON file exported by
*/
tsAstPath: string;
adminKey?: string;
searchKey?: string;
github_token?: string;
github_user?: string;
force?: boolean;
}
import { IEnv, Stage } from "~/types";
export function getEnv(): IEnv {
config();

View File

@@ -1,6 +1,6 @@
import { GITHUB_API_BASE } from "~/constants";
import { getEnv, IEnv } from "../getEnv";
import {GithubContentsResp } from "~/types";
import { getEnv, IEnv } from "../getEnv/node/getEnv";
import { GithubContentsResp } from "~/types";
import axios from "axios";
/**
@@ -9,7 +9,7 @@ import axios from "axios";
export async function getDirectory(o: IEnv) {
const { github_token, github_user } = getEnv();
const url = `${GITHUB_API_BASE}/repos/${o.org}/${o.repo}/contents/${o.docsPath}?ref=${o.branch}`;
try {
const res = await axios.get<GithubContentsResp>(url, {
httpAgent: "Tauri Search",
@@ -31,4 +31,4 @@ export async function getDirectory(o: IEnv) {
}`
);
}
}
}

View File

@@ -1,7 +1,7 @@
import axios from "axios";
import { GITHUB_API_BASE } from "~/constants";
import { GithubRepoResp } from "~/types";
import { getEnv } from "../getEnv";
import { getEnv } from "../getEnv/node/getEnv";
export async function getRepo(repo: `${string}/${string}`): Promise<GithubRepoResp> {
const url = `${GITHUB_API_BASE}/repos/${repo}`;

View File

@@ -1,7 +1,7 @@
import axios from "axios";
import { GITHUB_API_BASE } from "~/constants";
import { GithubContentsResp } from "~/types";
import { getEnv } from "../getEnv";
import { getEnv } from "../getEnv/node/getEnv";
/** returns the markdown text in the README.md file in the root of a repo */
export async function getRepoReadme(

View File

@@ -0,0 +1,68 @@
import { IndexApi, PartialModel, RankingRule, RankingRulesApi } from "~/types";
import { rankingRules } from "./rankingRules";
export const modelConfigApi = <TDoc extends {}>(
update: (s: PartialModel<TDoc>) => void
) => {
const api = <TExclude extends string = never, M extends string = never>(): IndexApi<
TDoc,
TExclude
> =>
({
pk(pk: string) {
update({ index: { pk } });
return api<TExclude | "pk", M>();
},
searchable(...props) {
if (props?.length > 0) {
update({ index: { searchable: props } });
}
return api<TExclude | "searchable", M>();
},
displayed(...props) {
if (props?.length > 0) {
update({ index: { displayed: props } });
}
return api<TExclude | "displayed", M>();
},
distinct(...props) {
if (props?.length > 0) {
update({ index: { distinct: props } });
}
return api<TExclude | "distinct", M>();
},
filterable(...props) {
if (props?.length > 0) {
update({ index: { filterable: props } });
}
return api<TExclude | "filterable", M>();
},
sortable(...props) {
if (props?.length > 0) {
update({ index: { sortable: props } });
}
return api<TExclude | "searchable", M>();
},
stopWords(words) {
update({ index: { stopWords: words } });
return api<TExclude | "stopWords">();
},
synonyms(synonyms) {
update({ index: { synonyms } });
return api<TExclude | "synonyms">();
},
rankingRules(cb: (r: RankingRulesApi<TDoc>) => void) {
const updateRules = (r: RankingRule<TDoc>[]) => {
update({ index: { rules: r } });
};
const ruleApi = rankingRules(updateRules);
cb(ruleApi);
return api<TExclude | "rankingRules">();
},
} as IndexApi<TDoc, TExclude>);
return api();
};

View File

@@ -2,6 +2,9 @@ import path from "path";
import { defineConfig, UserConfig } from "vite";
import inspect from "vite-plugin-inspect";
import dts from "vite-plugin-dts";
import { config } from "dotenv";
config();
export default defineConfig({
resolve: {

791
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff