chore: created placeholder files for github actions

refactor: refactored all CLI and pipeline code to be more consistent and use better abstraction

feature: added the ability to configure ranking properties in a model

refactor: reduced the number of cache files and removed those unnecessary for delta updates from git
This commit is contained in:
Ken Snyder
2022-02-07 00:08:42 -08:00
parent faf048b2cf
commit e3e371d3a8
45 changed files with 524 additions and 442 deletions

0
.github/workflows/build.yaml vendored Normal file
View File

0
.github/workflows/lint.yaml vendored Normal file
View File

0
.github/workflows/prose.yaml vendored Normal file
View File

1
.github/workflows/publish.yaml vendored Normal file
View File

@@ -0,0 +1 @@
- name: Publish index and document updates to Prod Search Server

0
.github/workflows/rs-api.yaml vendored Normal file
View File

0
.github/workflows/sitemap.yaml vendored Normal file
View File

0
.github/workflows/test.yaml vendored Normal file
View File

0
.github/workflows/ts-api.yaml vendored Normal file
View File

View File

@@ -10,9 +10,11 @@
"cli:push-caches": "pnpm -C ./packages/tauri-search run push-caches",
"cli:clear-caches": "pnpm -C ./packages/tauri-search run clear-caches",
"clean": "pnpm run --filter ./packages run clean",
"start": "pnpm -r install && pnpm run start:tauri-search && pnpm run start:docs && pnpm run up",
"start": "pnpm -r install && run-s up start:tauri-search start:docs full-update",
"start:tauri-search": "pnpm -C ./packages/tauri-search run watch",
"start:docs": "pnpm -C ./packages/docs run watch",
"full-update": "pnpm -C ./packages/tauri-search run full-update",
"delta-update": "pnpm -C ./packages/tauri-search run delta-update",
"build": "run-p build:*",
"build:cli": "pnpm -C ./packages/tauri-search run build:cli",
"build:npm": "pnpm -C ./packages/tauri-search run build:npm",

View File

@@ -45,6 +45,7 @@
"@vue/test-utils": "^2.0.0-rc.18",
"critters": "^0.0.16",
"cross-env": "^7.0.3",
"dotenv": "^14.3.2",
"eslint": "^8.8.0",
"eslint-plugin-cypress": "^2.12.1",
"https-localhost": "^4.7.0",

View File

@@ -1,16 +1,18 @@
export const SERVERS = [
{
default: true,
name: "local",
url: "http://localhost:7700",
},
{
name: "prod",
url: "https://search.tauri.studio",
indexes: [""],
token: "",
indexes: ["unknown"],
},
{
name: "pre-prod",
name: "staging",
url: "https://search2.tauri.studio",
token: "XZEH8BS90ee09c45215a8421c06857bcbde5c1a6797bdf4859a57a3ac1228a2b81df0994",
indexes: ["consolidated"],
},
];

View File

@@ -37,29 +37,49 @@ pnpm run watch
```
>>>
## Secrets and ENV variables
## Secrets, ENV variables, and Environments
>>> DotENV
- We use the popular DotEnv **npm** package to allow users to set ENV variables but **not** have them checked into the repository.
- Simply add a `.env` file to add variables you want to use locally; this can be both secret and non-secret variables
- In the local _dockerized_ MeiliSearch you will not really need any secrets but if you're rebuilding the document caches then you'll be using the Github API enough (and in parallel) such that providing a Github "personal access token" will be a good idea.
- Use the `GH_TOKEN` and `GH_USER` env variables to have the Github API's use your personal access token (versus being an anonymous user)
- There are also some non-secret ENV variables you may want to adjust:
- the `REPO` variable is used to determine which Github respository hosts Markdown/Prose documents
- This will default to `tauri` for now if no ENV is detected; this will likely change in the future to `tauri-docs`.
- the `BRANCH` variable is used to specify which branch to use; it will use `dev` if not found
>>>
>>> The Meilisearch Master Key
- the dockerized container has no master key set (though you can set it); allowing all operations to be done via the API
- a _production_ container should always be setup with a Master Key immediately
- having the master key allows you to access all API endpoints but must be included in the Header as a bearer token
+++ ENV Variables
- >>> `GH_TOKEN` and `GH_USER`
- Use the `GH_TOKEN` and `GH_USER` env variables to have the Github API's use your personal access token (versus being an anonymous user)
- If you're using the API anonymously it will quickly use the quota up but once authorized the calls are effectively nothing compared to an authorized API quota
- >>> `REPO`, `BRANCH`, and `DOCS_PATH`
- the `REPO` variable is used to determine which **Github** repo hosts the Markdown/Prose documents
- the `BRANCH` variable is used to specify which branch to use
- the `DOC_PATH` variable indicates where in the repo's directory structure should be
- >>> `FORCE`
- by default CLI commands will attempt to leverage file caches as much as is possible but if you set the FORCE property to `true` then no caches will be used
- >>> `MASTER_KEY`, `ADMIN_KEY`, and `SEARCH_KEY`
- >>> the `MASTER_KEY` should be setup immediately on all production environments as a one time task
- There is only a single Master Key and unlike other keys, it is not setup via the API but rather using Meilisearch's setup script.
- While the Master Key can be passed into the API as bearer token for any endpoint in the API, it probably should only be used to setup other keys
- +++Production keys:
- >>> the `ADMIN_KEY` allows for most operations (outside of key management)
- you'll need to have this set to push documents or manage indexes on both `staging` and `production` environments
- when using the CLI commands to create changes on Meilisearch you'll need to ensure that not only `ADMIN_KEY` is set but that `NODE_ENV` is set to "staging" or "production"
- >>> the `SEARCH_KEY` only gives access to searching
- it gives rights to search but nothing else and while it may block some nefarious traffic it is not considered a "secret"
- setting this ENV variable isn't really very useful in this repo as the local Meilisearch doesn't require any keys and you'll need more power to push new documents to a production environment
- note the variables below as a more useful alternative
> **Note:** the interactive playground provides a view into searches but also some ability to modify the local server instance via the API. Regardless of what the `NODE_ENV` is set to it will keep it's focus on the local environment, however, if you set the `VITE_SEARCH_STAGING` or `VITE_SEARCH_PRODUCTION` variables to a search key for those environments then it will allow switching searches to this different environment.
+++
>>> Server Environments
- **LOCAL** - the default environment which both CLI commands and Playground operate on are the dockerized local server running on `http://localhost:7700`.
- **STAGING** - when `NODE_ENV` is set to "staging" the CLI will interact with this env but the Playground will still default to the local environment. Assuming you've provided an `ADMIN_KEY`, however, the Playground will offer some interaction with this env
- **PRODUCTION** - this is the server which has all official search docs/indexes and services the Tauri website. Behaviorly it acts the same as STAGING.
>>>
## Models
Central to using this library to build and refresh your search indexes is understanding the concept of `Model`.
- A Model has a `1:1` relationship with search indexes (or at least _potential_ indexes)
- A Model is intended to represent:
- >>> A Model is intended to represent:
- the **document structure** that will be used for docs in the index
- allows for **configuring the index** itself (e.g., stop words, synonyms, etc.)
- allows you to embed data mappers which map from one document structure to another
@@ -114,11 +134,11 @@ Central to using this library to build and refresh your search indexes is unders
## External Resources
- General Documentation
- >>> Documentation
- <span class="bg-green-500 rounded px-2 py-1 text-white">GET</span> - [MeiliSearch Website Docs](https://docs.meilisearch.com/learn/what_is_meilisearch/)
- API Docs
- <span class="bg-green-500 rounded px-2 py-1 text-white">GET</span> - [Open API for MeiliSearch](https://bump.sh/doc/meilisearch)
- <span class="bg-green-500 rounded px-2 py-1 text-white">GET</span> - [API Docs from MeiliSearch Website](https://docs.meilisearch.com/reference/api/)
- <span class="bg-green-500 rounded px-2 py-1 text-white">GET</span> - [Postman Collection of MeiliSearch API](https://docs.meilisearch.com/postman/meilisearch-collection.json)
- Interactive
- <span class="bg-green-500 rounded px-2 py-1 text-white">GET</span> - [MeiliSearch Dashboard](http://localhost:7700/)
- <span class="bg-green-500 rounded px-2 py-1 text-white">GET</span> - [MeiliSearch Dashboard](http://localhost:7700/)

View File

@@ -20,6 +20,8 @@
"current-indexes": "node bin/current-indexes.js",
"lint": "eslint src --ext ts,js,tsx,jsx --fix --no-error-on-unmatched-pattern",
"prune": "docker system prune",
"delta-update": "run-s rebuild-caches ",
"full-update": "run-s drop-indexes clear-caches rebuild-caches create-indexes push-consolidated push-prose push-repos push-typescript",
"push-caches": "node bin/push-cache.js",
"push-consolidated": "node bin/push-consolidated.js",
"push-prose": "node bin/push-prose.js",

View File

@@ -3,7 +3,7 @@ import { getContent } from "~/utils/getContent";
import { TsDocProject, TypescriptBlock, TypescriptSymbol } from "~/types";
import { TypescriptKind } from "~/enums";
const fetchContent = getContent({ file: "test/fixtures/tsdoc.json" });
const fixtureContent = getContent({ file: "test/fixtures/tsdoc.json" });
function parseModule(mod: TypescriptBlock) {
const modDefn: TypescriptSymbol = {
@@ -53,9 +53,8 @@ function parseModule(mod: TypescriptBlock) {
* @param source if not specified will use historically factual fixture data, if a URL it will load over network, if a file then will load over file system
*/
export async function parseTypescriptAst(
source?: Parameters<typeof fetchContent>[0]
content: TypescriptBlock
): Promise<TsDocProject> {
const content = JSON.parse(await fetchContent(source)) as TypescriptBlock;
/**
* The top level "project" isn't probably worth putting into the index,
* but instead we'll start at the modules level.

View File

@@ -1,34 +1,12 @@
/* eslint-disable no-console */
import { existsSync } from "node:fs";
import { REPO_DOCS_CACHE, TS_DOCS_CACHE } from "~/constants";
import {
proseDocsCacheFile,
refreshProse,
refreshRepos,
refreshTypescript,
} from "~/pipelines";
import { pushConsolidatedDocs } from "~/pipelines/pushConsolidatedDocs";
import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
import { getEnv } from "~/utils/getEnv";
import { ConsolidatedModel } from "~/models";
(async () => {
console.log(`- pushing all models into consolidated index`);
const { repo, branch } = getEnv();
console.log(`- pushing all individual models into a consolidated index`);
if (!existsSync(TS_DOCS_CACHE)) {
console.log(`- The Typescript documents cache wasn't found; creating first`);
await refreshTypescript(repo, branch);
}
if (!existsSync(REPO_DOCS_CACHE)) {
console.log("- No cache for Repo documents found, so refreshing cache first");
await refreshRepos();
}
if (!existsSync(proseDocsCacheFile(repo, branch))) {
await refreshProse(repo, branch);
}
const { tasks } = await pushConsolidatedDocs(repo, branch);
const { tasks } = await pushConsolidatedDocs();
console.log();
console.log(
`- all consolidated documents [${tasks.length}] have been pushed to MeiliSearch queue`

View File

@@ -1,21 +1,14 @@
/* eslint-disable no-console */
import { existsSync } from "fs";
import { pushProseDocs } from "~/pipelines/pushProseDocs";
import { proseDocsCacheFile, refreshProse } from "~/pipelines/refreshProse";
import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
import { getEnv } from "~/utils/getEnv";
import { ProseModel } from "..";
(async () => {
const { repo, branch } = getEnv();
if (!existsSync(proseDocsCacheFile(repo, branch))) {
await refreshProse(repo, branch);
}
console.log(`- Pushing "prose" documents to MeiliSearch`);
const tasks = await pushProseDocs(repo, branch);
const tasks = await pushProseDocs();
console.log(
`- all ${tasks.length} documents were pushed via API; monitoring task status ...`
);
await communicateTaskStatus(ProseModel, tasks, { timeout: 45000 });
await communicateTaskStatus(ProseModel, tasks, { timeout: 75000 });
})();

View File

@@ -1,17 +1,11 @@
/* eslint-disable no-console */
import { existsSync } from "fs";
import { REPO_DOCS_CACHE } from "~/constants";
import { pushRepoDocs, refreshRepos } from "~/pipelines";
import { pushRepoDocs } from "~/pipelines";
import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
import { RepoModel } from "..";
(async () => {
console.log(`- Pushing Repo document cache into MeiliSearch`);
if (!existsSync(REPO_DOCS_CACHE)) {
console.log("- No cache for Repo documents found, so refreshing cache first");
await refreshRepos();
}
const { docs, errors, tasks } = await pushRepoDocs();
console.log();
if (errors.length > 0) {

View File

@@ -1,21 +1,11 @@
/* eslint-disable no-console */
import { existsSync } from "node:fs";
import { TS_DOCS_CACHE } from "~/constants";
import { ApiModel } from "~/models";
import { pushTypescriptDocs } from "~/pipelines/pushTypescriptDocs";
import { refreshTypescript } from "~/pipelines/refreshTypescript";
import { communicateTaskStatus } from "~/utils/communicateTaskStatus";
import { getEnv } from "~/utils/getEnv";
import { ApiModel } from "..";
(async () => {
const { repo, branch } = getEnv();
if (!existsSync(TS_DOCS_CACHE)) {
console.log(`- The Typescript documents cache wasn't found; creating first`);
await refreshTypescript(repo, branch);
}
console.log(`- Starting update process for Typescript API documents`);
const { errors, tasks } = await pushTypescriptDocs();
console.log(`- pushing Typescript API documents to Meilisearch`);
const { errors, tasks } = await pushTypescriptDocs({ branch: "feat/generate-js-ast" });
console.log();
if (errors.length > 0) {
@@ -31,6 +21,6 @@ import { ApiModel } from "..";
console.log(
`- Completed pushing all Typescript docs [${tasks.length}] to MeiliSearch. Now monitoring task progress ...`
);
communicateTaskStatus(ApiModel, tasks, { timeout: 45000 });
communicateTaskStatus(ApiModel, tasks, { timeout: 65000 });
}
})();

View File

@@ -3,5 +3,14 @@ import { rebuildCaches } from "~/pipelines/rebuildCaches";
(async () => {
console.log(`- Rebuilding all caches files`);
await rebuildCaches();
const results = await rebuildCaches();
console.log(
`- there were ${results.prose[1]} prose docs; saved to ${results.prose[0]}`
);
console.log(
`- there were ${results.typescript[1]} typescript API docs; saved to ${results.typescript[0]}`
);
console.log(
`- there were ${results.repos[1]} repos docs; saved to ${results.repos[0]}`
);
})();

View File

@@ -3,9 +3,10 @@ import { refreshProse } from "~/pipelines/refreshProse";
import { getEnv } from "~/utils/getEnv";
(async () => {
const { repo, branch, force } = getEnv();
const { repo, branch } = getEnv();
console.log(`- refreshing all prose from ${repo}@${branch}`);
await refreshProse(repo, branch, { force });
console.log(`- completed updates of prose [${repo}@${branch}] `);
const { docs, cacheFile } = await refreshProse();
console.log(`- prose documents [${docs?.length}] saved to cache file: ${cacheFile}`);
})();

View File

@@ -2,7 +2,7 @@
import { refreshRepos } from "~/pipelines/refreshRepos";
(async () => {
console.log(`- writing Repo docs to cache`);
const repos = await refreshRepos();
console.log(`- all ${repos.length} repos have been updated`);
console.log(`- refresh Repo document cache`);
const { docs, cacheFile } = await refreshRepos();
console.log(`- all ${docs.length} repos have been updated and saved to: ${cacheFile}`);
})();

View File

@@ -1,65 +1,44 @@
/* eslint-disable no-console */
import { refreshSitemap } from "~/pipelines/refreshSitemap";
import { flattenSitemap, sitemapDictionary } from "~/utils/convertSitemap";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv } from "~/utils/getEnv";
import { writeGeneratedFile } from "~/utils/writeGeneratedFile";
(async () => {
const { repo, branch} = getEnv();
const filename = `src/generated/sitemap-${repo}-${branch}.json`;
const existingSitemap = sitemapDictionary(await getCache(CacheKind.sitemap));
const existingFlatmap = flattenSitemap(await getCache(CacheKind.sitemap));
console.log(`- refreshing sitemap for prose content`);
console.log(`- refreshing prose sitemap for ${repo}@${branch}${existingSitemap ? `; using existing sitemap for deltas [${existingFlatmap?.length} docs]` : ""}`);
const o = getEnv();
const r = await refreshSitemap(o);
const sitemap = await refreshSitemap({ ref: branch, repo });
const contents = flattenSitemap( sitemap);
const changed: string[] = [];
const added: string[] = [];
const removed: string[] = [];
if(existingSitemap) {
for (const doc of contents) {
if(existingSitemap[doc.filepath as any].sha && existingSitemap[doc.filepath as any].sha !== doc.sha) {
changed.push(doc.filepath);
} else if (!existingSitemap[doc.filepath as any]?.filepath && doc.sha !== existingSitemap[doc.filepath as any]?.sha ) {
added.push(doc.filepath);
}
console.log(`- updated sitemap has ${r.count} documents`);
if (r.hasDeltaInfo) {
if (r.changes?.added?.length > 0) {
console.log(
`- ${
r.changes.added.length
} files r.changes.added since last check: ${r.changes.added.join(", ")}`
);
}
}
if(existingFlatmap) {
for (const doc of existingFlatmap) {
if(!contents[doc.filepath as any]) {
removed.push(doc.filepath);
}
if (r.changes.changed.length > 0) {
console.log(
`- ${
r.changes.changed.length
} files changed since last check: ${r.changes.changed.join(", ")}`
);
}
}
console.log(`- updated sitemap has ${contents.length} documents`);
if(existingSitemap) {
if(added.length > 0) {
console.log(`- ${added.length} files added since last check: `);
if (r.changes.removed.length > 0) {
console.log(
`- ${
r.changes.removed.length
} files removed since last check: ${r.changes.removed.join(", ")}`
);
}
if(changed.length > 0) {
console.log(`- ${changed.length} files changed since last check: ${changed.join(", ")}`);
}
if(removed.length > 0) {
console.log(`- ${removed.length} files added since last check: ${removed.join(", ")}`);
}
if([added, changed,removed].every(i => i.length === 0)) {
if (
[r.changes.added, r.changes.changed, r.changes.removed].every((i) => i.length === 0)
) {
console.log(`- no files changed since the last check`);
}
} else {
console.log(`- no prior cache info so no delta analysis was done`);
}
await writeGeneratedFile(
filename,
JSON.stringify(sitemap)
);
console.log(`- files saved to: ${filename}`);
console.log(`- cache file now resides at: ${r.cacheFile}`);
})();

View File

@@ -1,14 +1,13 @@
/* eslint-disable no-console */
import { TS_AST_CACHE, TS_DOCS_CACHE } from "~/constants";
import { refreshTypescript } from "~/pipelines/refreshTypescript";
import { getEnv } from "~/utils/getEnv";
(async () => {
const { repo, branch } = getEnv();
console.log(`- refreshing Typescript ASTs and Docs cache`);
const docs = await refreshTypescript(repo, branch);
console.log(`- completed caching of ${docs.length} TS API documents:`);
console.log(` - AST Cache: ${TS_AST_CACHE}`);
console.log(` - Doc Cache: ${TS_DOCS_CACHE}`);
const { docs, cacheFile, repo } = await refreshTypescript({
branch: "feat/generate-js-ast",
});
console.log(`- completed caching of ${docs.length} TS API documents from ${repo}:`);
console.log(` - Doc Cache: ${cacheFile}`);
console.log();
})();

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,11 +1,11 @@
/* eslint-disable no-console */
import { ProseModel, ApiModel, RepoModel } from "~/models";
import { ProseModel, ApiModel, RepoModel, ConsolidatedModel } from "~/models";
const models = {
api: ApiModel,
repo: RepoModel,
prose: ProseModel,
// consolidated: ConsolidatedModel,
consolidated: ConsolidatedModel,
};
/**

View File

@@ -1,40 +1,34 @@
import { readFile } from "fs/promises";
import { REPO_DOCS_CACHE, TS_DOCS_CACHE } from "~/constants";
import { ConsolidatedMapper, IConsolidatedModel } from "~/mappers/ConsolidatedMapper";
import {
ConsolidatedModel,
IApiModel,
IMonitoredTask,
IProseModel,
IRepoModel,
} from "..";
import { proseDocsCacheFile } from "./refreshProse";
import { ConsolidatedMapper } from "~/mappers/ConsolidatedMapper";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { ConsolidatedModel, IConsolidatedModel } from "~/models";
import { IMonitoredTask } from "~/types";
export async function pushConsolidatedDocs(repo: string, branch: string) {
// gather documents
const ts: IConsolidatedModel[] = (
JSON.parse(await readFile(TS_DOCS_CACHE, "utf-8")) as IApiModel[]
).map((c) => ConsolidatedMapper(c));
// TODO: add in Rust API docs
const prose: IConsolidatedModel[] = (
JSON.parse(await readFile(proseDocsCacheFile(repo, branch), "utf-8")) as IProseModel[]
).map((i) => ConsolidatedMapper(i));
const repos: IConsolidatedModel[] = (
JSON.parse(await readFile(REPO_DOCS_CACHE, "utf-8")) as IRepoModel[]
).map((i) => ConsolidatedMapper(i));
export async function pushConsolidatedDocs(options: Partial<IEnv> = {}) {
const o = { ...getEnv(), ...options };
const docs: IConsolidatedModel[] = [
...(await getCache(CacheKind.typescriptDocs, {
...o,
branch: "feat/generate-js-ast",
}).then((c) => c.cache.map((c) => ConsolidatedMapper(c)))),
...(await getCache(CacheKind.proseDocs, o).then((c) =>
c.cache.map((c) => ConsolidatedMapper(c))
)),
...(await getCache(CacheKind.repoDocs, o).then((c) =>
c.cache.map((c) => ConsolidatedMapper(c))
)),
];
// push into MeiliSearch task queue
const errors: IConsolidatedModel[] = [];
const tasks: IMonitoredTask[] = [];
const docs = [...ts, ...prose, ...repos];
for (const doc of docs) {
const res = await ConsolidatedModel.query.addOrReplaceDocuments(doc);
if (res.status !== "enqueued") {
process.stdout.write("x");
errors.push(doc);
} else {
process.stdout.write(".");
tasks.push({ docId: doc.docId, taskId: res.uid });
tasks.push({ docId: doc.objectID, taskId: res.uid });
}
}
return { docs, tasks, errors };

View File

@@ -1,14 +1,15 @@
import { readFile } from "fs/promises";
import { IProseModel, ProseModel } from "~/models/ProseModel";
import { ProseModel } from "~/models/ProseModel";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { IMonitoredTask } from "..";
import { proseDocsCacheFile } from "./refreshProse";
/**
* Pushes the cached prose documents into the MeiliSearch "prose" index
*/
export async function pushProseDocs(repo: string, branch: string) {
const filename = proseDocsCacheFile(repo, branch);
const cache = JSON.parse(await readFile(filename, "utf-8")) as IProseModel[];
export async function pushProseDocs(options: Partial<IEnv> = {}) {
const o = { ...getEnv(), ...options };
const { cache } = await getCache(CacheKind.proseDocs, o);
const tasks: IMonitoredTask[] = [];
for (const doc of cache) {

View File

@@ -1,27 +1,25 @@
import { readFile } from "fs/promises";
import { REPO_DOCS_CACHE } from "~/constants";
import { IRepoModel, RepoModel } from "~/models";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { IMonitoredTask } from "..";
/**
* Pushes the cached REPO documents into the MeiliSearch "repo" index
*/
export async function pushRepoDocs() {
const docs = JSON.parse(await readFile(REPO_DOCS_CACHE, "utf-8")) as IRepoModel[];
export async function pushRepoDocs(options: Partial<IEnv> = {}) {
const o = { ...getEnv(), ...options };
const { cache: docs, cacheFile } = await getCache(CacheKind.repoDocs, o);
const errors: IRepoModel[] = [];
const tasks: IMonitoredTask[] = [];
process.stdout.write(" ");
for (const doc of docs) {
const res = await RepoModel.query.addOrReplaceDocuments(doc);
if (res.status !== "enqueued") {
process.stdout.write("x");
errors.push(doc);
} else {
process.stdout.write(".");
tasks.push({ docId: doc.id, taskId: res.uid });
}
}
return { docs, errors, tasks };
return { docs, errors, tasks, cacheFile };
}

View File

@@ -1,28 +1,32 @@
import { readFile } from "fs/promises";
import { TS_DOCS_CACHE } from "~/constants";
import { ApiModel, IApiModel } from "~/models";
import { IMonitoredTask } from "~/types";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { refreshTypescript } from ".";
/**
* Iterates over each Typescript module and all of the
* modules symbols and uses the `addOrUpdate` call to ensure
* the index is fully up-to-date.
*/
export async function pushTypescriptDocs() {
const docs = JSON.parse(await readFile(TS_DOCS_CACHE, "utf-8")) as IApiModel[];
export async function pushTypescriptDocs(options: Partial<IEnv> = {}) {
const o = { ...getEnv(), ...options };
// eslint-disable-next-line prefer-const
let { cache: docs, cacheFile } = await getCache(CacheKind.typescriptDocs, o);
if (docs.length === 0) {
docs = (await refreshTypescript(o)).docs;
}
const errors: IApiModel[] = [];
const tasks: IMonitoredTask[] = [];
for (const doc of docs) {
const res = await ApiModel.query.addOrReplaceDocuments(doc);
if (res.status !== "enqueued") {
process.stdout.write("x");
errors.push(doc);
} else {
process.stdout.write(".");
tasks.push({ docId: doc.id, taskId: res.uid });
}
}
return { docs, tasks, errors };
return { docs, tasks, errors, cacheFile };
}

View File

@@ -1,14 +1,16 @@
import { getEnv } from "~/utils/getEnv";
import { refreshProse, refreshRepos, refreshTypescript } from ".";
import { refreshSitemap } from "./refreshSitemap";
export async function rebuildCaches() {
const { repo, branch } = getEnv();
await refreshSitemap();
let prose: [string, number] = ["", 0];
let repos: [string, number] = ["", 0];
let typescript: [string, number] = ["", 0];
await Promise.all([
refreshProse(repo, branch),
refreshRepos(),
refreshTypescript(repo, branch),
refreshProse().then((c) => (prose = [c.cacheFile as string, c.docs?.length || 0])),
refreshRepos().then((c) => (repos = [c.cacheFile as string, c.docs?.length || 0])),
refreshTypescript({ branch: "feat/generate-js-ast" }).then(
(c) => (typescript = [c.cacheFile as string, c.docs?.length || 0])
),
]);
return { prose, repos, typescript };
}

View File

@@ -1,31 +1,19 @@
import axios from "axios";
import { existsSync, mkdirSync } from "fs";
import { readFile, writeFile } from "fs/promises";
import path, { join } from "node:path";
import { join } from "node:path";
import { parseMarkdown } from "~/ast/parseMarkdown";
import { ProseMapper } from "~/mappers";
import { IProseModel } from "~/models/ProseModel";
import { flattenSitemap, sitemapDictionary } from "~/utils/convertSitemap";
import { IDocsSitemap, refreshSitemap } from "./refreshSitemap";
/* eslint-disable no-console */
export interface IRefreshProseOptions {
force?: boolean;
}
import { flattenSitemap } from "~/utils/convertSitemap";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { writeCacheFile } from "~/utils/writeCacheFile";
import { refreshSitemap } from "./refreshSitemap";
function jsonFileFromMarkdown(file: string, repo: string, branch: string) {
return join(`src/generated/ast/prose/${repo}_${branch}/`, file.replace(".md", ".json"));
}
/** writes file to local path, ensuring directory exists */
async function write(file: string, data: string) {
const dir = path.dirname(file);
if (!existsSync(dir)) {
mkdirSync(dir, { recursive: true });
}
await writeFile(file, data, "utf-8");
}
export function proseDocsCacheFile(repo: string, branch: string) {
const dir = `src/generated/ast/prose/${repo}_${branch}`;
if (!existsSync(dir)) {
@@ -38,103 +26,64 @@ async function cacheMarkdownAst(file: string, url: string, repo: string, branch:
const jsonFile = jsonFileFromMarkdown(file, repo, branch);
const content = (await axios.get(url)).data;
const ast = await parseMarkdown({ file, content });
await write(jsonFile, JSON.stringify(ast));
await writeCacheFile(jsonFile, JSON.stringify(ast));
return ast;
}
export async function refreshProse(
repo: string,
branch: string,
options: IRefreshProseOptions = {}
) {
const sitemapFile = `src/generated/sitemap-${repo}-${branch}.json`;
const existingSitemap = existsSync(sitemapFile);
if (existingSitemap) {
console.log(`- existing sitemap found [${sitemapFile}]`);
console.log(`- will use to detect changes in prose`);
} else {
console.log(
`- no existing sitemap for ${repo}@${branch}; all markdown content will be pulled down`
/**
* Refreshes both the sitemap and then the prose itself based on
* changes found in sitemap
*/
export async function refreshProse(options: Partial<IEnv> = {}) {
const { repo, branch, force } = { ...getEnv(), ...options };
const sm = await refreshSitemap();
const setChanges = [...sm.changes.added, sm.changes.changed];
const removals = sm.changes.removed;
if (setChanges.length + removals.length === 0) {
return { changes: false };
}
const {
/** prose cache file; will be set to empty array if hadn't previously existed */
cache: currentDocs,
cacheFile,
} = await getCache(CacheKind.proseDocs);
// reduce to just added/changed docs unless FORCE is set
const docsToUpdate =
force || currentDocs.length === 0
? flattenSitemap(sm.sitemap)
: flattenSitemap(sm.sitemap).filter((i) => setChanges.includes(i.filepath));
const docsPromise: Promise<IProseModel>[] = [];
// convert markdown files to AST and then again to IProseModel
for (const file of docsToUpdate) {
docsPromise.push(
cacheMarkdownAst(file.filepath, file.download_url, repo, branch).then((i) =>
ProseMapper(i)
)
);
}
const currentSitemap = existingSitemap
? sitemapDictionary(JSON.parse(await readFile(sitemapFile, "utf-8")) as IDocsSitemap)
: {};
const newSitemap = await refreshSitemap({ repo, ref: branch });
const flatmap = flattenSitemap(newSitemap);
const documents: IProseModel[] = [];
const unchangedDocuments: IProseModel[] = [];
const unchanged: string[] = [];
const changed: string[] = [];
for (const file of flatmap) {
const cache = currentSitemap[file.filepath];
if (cache && cache.sha === file.sha) {
unchanged.push(file.filepath);
if (
options.force ||
!existsSync(jsonFileFromMarkdown(file.filepath, repo, branch))
) {
documents.push(
ProseMapper(
await cacheMarkdownAst(file.filepath, file.download_url, repo, branch)
)
);
} else {
unchangedDocuments.push(
ProseMapper(
await cacheMarkdownAst(file.filepath, file.download_url, repo, branch)
)
);
}
} else {
changed.push(file.filepath);
// console.log(`- change in "${file.filepath}" detected`);
documents.push(
ProseMapper(
await cacheMarkdownAst(file.filepath, file.download_url, repo, branch)
)
);
}
}
if (changed.length === 0 && !options.force) {
console.log(`- all AST cache files remain valid; nothing new written to cache`);
if (!existsSync(proseDocsCacheFile(repo, branch))) {
console.log(
`- while AST files exist, the documents cache was missing and will be refreshed`
);
await writeFile(
proseDocsCacheFile(repo, branch),
JSON.stringify(unchangedDocuments)
);
}
} else {
console.log(
`- finished writing markdown AST files [ ${changed.length} changed, ${unchanged.length} unchanged]`
);
await write(proseDocsCacheFile(repo, branch), JSON.stringify(documents));
console.log(`- wrote Meilisearch documents to "${proseDocsCacheFile(repo, branch)}"`);
}
const updatedDocs = await Promise.all(docsPromise);
const updatedKeys = updatedDocs.map((i) => i.id);
if (currentSitemap) {
// look for files which have been removed, since last time
// const current = flattenSitemap(JSON.parse(await readFile(sitemapFile, "utf-8")));
// const lookup = sitemapDictionary(newSitemap);
// const removed = current.filter((c) => !lookup[c.filepath]).map((i) => i.filepath);
// if (removed.length > 0) {
// console.log(
// `- detected ${removed.length} files which no longer exist: ${removed.join(", ")}`
// );
// for (const file of removed) {
// await rm(jsonFileFromMarkdown(file, repo, branch));
// }
// }
}
const docs =
force || currentDocs.length === 0
? updatedDocs
: currentDocs
.filter((i) => !removals.includes(i.id))
.map((i) => {
return updatedKeys.includes(i.id)
? (updatedDocs.find((f) => f.id === i.id) as IProseModel)
: i;
});
const sitemap = `src/generated/sitemap-${repo}-${branch}.json`;
await writeCacheFile(cacheFile, docs);
await writeFile(sitemap, JSON.stringify(currentSitemap), "utf-8");
console.log(`- wrote Repo Sitemap to: ${sitemap}`);
return { };
return { docs, changes: sm.hasDeltaInfo ? sm.changes : undefined, force, cacheFile };
}

View File

@@ -1,11 +1,12 @@
/* eslint-disable no-console */
import { getRepo } from "~/utils/github/getRepo";
import { getRepoReadme } from "~/utils/github/getRepoReadme";
import { REPOS, REPO_DOCS_CACHE } from "~/constants";
import { REPOS } from "~/constants";
import { GithubMapper } from "~/mappers";
import { GithubRepoResp } from "~/types";
import { IRepoModel } from "~/models";
import { writeFile } from "fs/promises";
import { CacheKind, getCache } from "~/utils/getCache";
import { writeCacheFile } from "~/utils/writeCacheFile";
/**
* Responsible for iterating through each of the designated repos
@@ -13,6 +14,8 @@ import { writeFile } from "fs/promises";
* indexes.
*/
export async function refreshRepos() {
const { cacheFile } = await getCache(CacheKind.repoDocs);
const repoPromise: Promise<GithubRepoResp>[] = [];
const readmePromise: Promise<[string, string | undefined]>[] = [];
for (const repo of REPOS) {
@@ -25,13 +28,11 @@ export async function refreshRepos() {
}, {} as Record<string, string | undefined>);
const repos = await Promise.all(repoPromise);
console.log(`- all repo's meta info has been retrieved from Github API`);
const docs: IRepoModel[] = [];
for (const r of repos) {
docs.push(GithubMapper({ ...r, text: readmes[r.full_name] }));
}
await writeFile(REPO_DOCS_CACHE, JSON.stringify(docs), "utf-8");
console.log(`- repo documents have been written to cache: ${REPO_DOCS_CACHE} `);
await writeCacheFile(cacheFile, docs);
return REPOS;
return { cacheFile, docs };
}

View File

@@ -1,15 +1,10 @@
import axios from "axios";
import { join } from "node:path";
import { GITHUB_API_BASE } from "~/constants";
import { GithubContentsReq, GithubContentsResp } from "~/types";
import { getEnv } from "~/utils/getEnv";
const DEFAULT: GithubContentsReq = {
owner: "tauri-apps",
path: "docs",
repo: "tauri",
ref: "dev",
};
import { join } from "path";
import { GithubContentsResp } from "~/types";
import { flattenSitemap, IFlatSitemap, sitemapDictionary } from "~/utils/convertSitemap";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { getDirectory } from "~/utils/github/getDirectory";
import { writeCacheFile } from "~/utils/writeCacheFile";
export interface IDocsSitemapFile {
name: string;
@@ -24,33 +19,24 @@ export interface IDocsSitemap {
children: IDocsSitemap[];
}
async function getDirectory(o: GithubContentsReq) {
const { github_token, github_user } = getEnv();
const url = `${GITHUB_API_BASE}/repos/${o.owner}/${o.repo}/contents/${o.path}?ref=${o.ref}`;
try {
const res = await axios.get<GithubContentsResp>(url, {
httpAgent: "Tauri Search",
...(github_token && github_user
? { auth: { username: github_user, password: github_token } }
: {}),
});
if (res.status < 299) {
return res;
} else {
throw new Error(
`The attempt to call Github's "contents" API failed [${res.status}, ${url}]: ${res.statusText}`
);
}
} catch (err) {
throw new Error(
`The attempt to call Github's "contents" API failed [${url}]: ${
(err as Error).message
}`
);
}
}
/**
* A type utiility which adds delta-context to a recently created sitemap
*/
export type Sitemap<T extends IDocsSitemap> = {
hasDeltaInfo: boolean;
cacheFile: string;
sitemap: T;
count: number;
changes: {
added: string[];
changed: string[];
removed: string[];
};
};
/**
* reduces the Github output to just core properties
*/
function reduceClutter(
dir: string,
resp: GithubContentsResp
@@ -72,25 +58,27 @@ function reduceClutter(
return [files, children];
}
/**
* Uses Github API to build a sitemap of markdown files for a given repo
* and will also report on changes since last sitemap if a prior sitemap
* existed
*/
export async function refreshSitemap(options: Partial<GithubContentsReq> = DEFAULT) {
const o = { ...DEFAULT, ...options };
const [files, children] = reduceClutter(o.path, (await getDirectory(o)).data);
async function getStructure(o: IEnv) {
// RECURSE INTO REPO STARTING at PATH
const [files, children] = reduceClutter(o.docsPath, (await getDirectory(o)).data);
const sitemap: IDocsSitemap = {
dir: o.path,
dir: o.docsPath,
files,
children: [],
};
if (children.length > 0) {
const waitFor: Promise<IDocsSitemap>[] = [];
for (const child of children) {
const p = join(o.path, `/${child}`);
const mo = { ...o, path: p };
waitFor.push(refreshSitemap(mo));
if (child.startsWith("_")) {
// eslint-disable-next-line no-console
console.log(`- skipping the "${child}" directory due to leading underscore`);
} else {
const p = join(o.docsPath, `/${child}`);
const mo: IEnv = { ...o, docsPath: p };
waitFor.push(getStructure(mo));
}
}
const resolved = await Promise.all(waitFor);
sitemap.children = resolved;
@@ -98,3 +86,66 @@ export async function refreshSitemap(options: Partial<GithubContentsReq> = DEFAU
return sitemap;
}
/**
* Uses Github API to build a sitemap of markdown files for a given repo.
*
* Note: if a sitemap already exists, it will compare the hash values from
* the cached sitemap and return `added`, `removed`, and `changed` arrays
* to help downstream consumers only update what is necessary.
*/
export async function refreshSitemap(
options: Partial<IEnv> = {}
): Promise<Sitemap<IDocsSitemap>> {
const o = { ...getEnv(), ...options };
const sitemap = await getStructure(o);
/** flattened version just created sitemap */
const flatSitemap = flattenSitemap(sitemap) as IFlatSitemap[];
const { cacheFile, cache } = await getCache(CacheKind.sitemap);
const existingSitemap = sitemapDictionary(cache);
const existingFlatmap = flattenSitemap(cache);
// DELTAs
const changed: string[] = [];
const added: string[] = [];
const removed: string[] = [];
if (existingSitemap) {
for (const doc of flatSitemap) {
if (
existingSitemap[doc.filepath as any]?.sha &&
existingSitemap[doc.filepath as any]?.sha !== doc?.sha
) {
changed.push(doc.filepath);
} else if (
!existingSitemap[doc.filepath as any]?.filepath &&
doc?.sha !== existingSitemap[doc.filepath as any]?.sha
) {
added.push(doc.filepath);
}
}
}
if (existingFlatmap) {
const newSitemap = sitemapDictionary(sitemap);
for (const doc of existingFlatmap) {
if (!newSitemap[doc.filepath as any]) {
removed.push(doc.filepath);
}
}
}
// write new sitemap
await writeCacheFile(cacheFile, sitemap);
return {
sitemap,
hasDeltaInfo: existingFlatmap ? true : false,
changes: { added, changed, removed },
count: flatSitemap?.length || 0,
cacheFile: cacheFile,
};
}

View File

@@ -1,25 +1,31 @@
import { config } from "dotenv";
import { writeFile } from "fs/promises";
import { parseTypescriptAst } from "~/ast/parseTypescriptAst";
import { TS_AST_CACHE, TS_DOCS_CACHE } from "~/constants";
import { TypescriptMapper } from "~/mappers";
import { IApiModel } from "..";
import { CacheKind, getCache } from "~/utils/getCache";
import { getEnv, IEnv } from "~/utils/getEnv";
import { getRepoFile } from "~/utils/github/getRepoFile";
import { writeCacheFile } from "~/utils/writeCacheFile";
import { IApiModel, TypescriptBlock } from "..";
export async function refreshTypescript(repo: string, branch: string) {
const prod = { repo, branch, filepath: "ts-api.json" };
config();
/**
* Refreshes the document cache
*/
export async function refreshTypescript(options: Partial<IEnv> = {}) {
const { org, repo, branch } = { ...getEnv(), ...options };
const { cacheFile } = await getCache(CacheKind.typescriptDocs, {
...getEnv(),
...options,
});
const ast = (await getRepoFile(
`${org}/${repo}`,
"docs/api/js/js-api.json",
branch
)) as TypescriptBlock;
const ast =
process.env.NODE_ENV === "production"
? await parseTypescriptAst(prod)
: await parseTypescriptAst();
const simplified = await parseTypescriptAst(ast);
await writeFile(TS_AST_CACHE, JSON.stringify(ast));
const docs: IApiModel[] = [];
for (const i of ast.symbols) {
docs.push(TypescriptMapper(i));
}
await writeFile(TS_DOCS_CACHE, JSON.stringify(docs));
const docs: IApiModel[] = simplified.symbols.map((i) => TypescriptMapper(i));
return docs;
await writeCacheFile(cacheFile, docs);
return { docs, cacheFile, repo: `${org}/${repo}@${branch}` };
}

View File

@@ -15,13 +15,14 @@ export type ISitemapDictionary = Record<string, Omit<IFlatSitemap, "filepath">>;
/**
* Flattens the hierarchical structure of a sitemap into an easily iterable array
*/
export function flattenSitemap<S extends IDocsSitemap | undefined>(sm: S): S extends IDocsSitemap ? IFlatSitemap[] : undefined {
export function flattenSitemap<S extends IDocsSitemap | undefined>(
sm: S
): S extends IDocsSitemap ? IFlatSitemap[] : undefined {
let flat: IFlatSitemap[] = [];
if(!sm) {
if (!sm) {
return undefined as S extends IDocsSitemap ? IFlatSitemap[] : undefined;
} else {
for (const f of sm.files) {
for (const f of sm?.files || []) {
const filepath = join(sm.dir, f.name);
flat.push({ filepath, sha: f.sha, size: f.size, download_url: f.download_url });
}
@@ -30,14 +31,16 @@ export function flattenSitemap<S extends IDocsSitemap | undefined>(sm: S): S ext
flat = flat.concat(...flattenSitemap(child));
}
}
}
return flat as S extends IDocsSitemap ? IFlatSitemap[] : undefined;
}
export function sitemapDictionary<S extends IDocsSitemap | undefined>(sm: S) {
return (sm ? flattenSitemap(sm as IDocsSitemap).reduce((acc, i) => {
return { ...acc, [i.filepath]: { ...omit(i, "filepath") } };
}, {} as ISitemapDictionary) : undefined) as S extends IDocsSitemap ? IFlatSitemap[] : undefined;
return (
sm
? flattenSitemap(sm as IDocsSitemap).reduce((acc, i) => {
return { ...acc, [i.filepath]: { ...omit(i, "filepath") } };
}, {} as ISitemapDictionary)
: undefined
) as S extends IDocsSitemap ? IFlatSitemap[] : undefined;
}

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-console */
import { readFile } from "fs/promises";
import { IDocsSitemap } from "~/pipelines";
import { IApiModel, IProseModel, IRepoModel } from "..";
@@ -8,50 +9,91 @@ export enum CacheKind {
proseDocs = "Prose/Markdown docs",
repoDocs = "Repo docs",
typescriptDocs = "Typescript API docs",
rustDocs = "Rust API docs"
rustDocs = "Rust API docs",
}
export interface GetCacheOptions {
repo?:string;
repo?: string;
branch?: string;
}
export async function getCache<K extends CacheKind>(kind: K, options: GetCacheOptions = {}) {
const {repo, branch} = {...getEnv(), ...options};
let content;
export async function getCache<K extends CacheKind>(
kind: K,
options: GetCacheOptions = {}
) {
const { repo, branch } = { ...getEnv(), ...options };
let cache;
let cacheFile;
try {
switch(kind) {
case CacheKind.sitemap:
content = await readFile(`src/generated/sitemap-${repo}-${branch}.json`, "utf-8").then(c => JSON.parse(c) as IDocsSitemap);
break;
switch (kind) {
case CacheKind.sitemap:
cacheFile = `src/generated/sitemap-${repo}-${branch}.json`;
try {
cache = await readFile(cacheFile, "utf-8").then(
(c) => JSON.parse(c) as IDocsSitemap
);
} catch (err) {
cache = undefined;
console.warn(`- no cache file found at: ${cacheFile}`);
}
break;
case CacheKind.proseDocs:
content = await readFile(`src/generated/prose/${repo}_${branch}/documents.json`, "utf-8").then(c => JSON.parse(c) as IProseModel[]);
break;
case CacheKind.proseDocs:
cacheFile = `src/generated/prose/${repo}_${branch}/documents.json`;
try {
cache = await readFile(cacheFile, "utf-8").then(
(c) => JSON.parse(c) as IProseModel[]
);
} catch (err) {
console.warn(`- no Prose cache file found at: ${cacheFile}`);
cache = [];
}
break;
case CacheKind.repoDocs:
content = await readFile(`src/generated/repos/documents.json`, "utf-8").then(c => JSON.parse(c) as IRepoModel[]);
break;
case CacheKind.repoDocs:
cacheFile = `src/generated/repos/documents.json`;
try {
cache = await readFile(cacheFile, "utf-8").then(
(c) => JSON.parse(c) as IRepoModel[]
);
} catch (err) {
console.warn(`- no Repo cache file found at: ${cacheFile}`);
cache = [];
}
break;
case CacheKind.typescriptDocs:
content = await readFile(`src/generated/api/${repo}_${branch}/ts-documents.json`, "utf-8").then(c => JSON.parse(c) as IApiModel[]);
break;
case CacheKind.typescriptDocs:
cacheFile = `src/generated/api/${repo}_${branch}/ts-documents.json`;
try {
cache = await readFile(cacheFile, "utf-8").then(
(c) => JSON.parse(c) as IApiModel[]
);
} catch (err) {
console.warn(`- no Typescript docs cach found at: ${cacheFile}`);
cache = [];
}
break;
case CacheKind.rustDocs:
content = await readFile(`src/generated/api/${repo}_${branch}/rs-documents.json`, "utf-8").then(c => JSON.parse(c) as IApiModel[]);
break;
default:
content = undefined;
}
case CacheKind.rustDocs:
cacheFile = `src/generated/api/${repo}_${branch}/rs-documents.json`;
cache = await readFile(cacheFile, "utf-8").then(
(c) => JSON.parse(c) as IApiModel[]
);
break;
return content as K extends CacheKind.sitemap ? IDocsSitemap | undefined : K extends CacheKind.proseDocs | undefined? IProseModel[] : K extends CacheKind.repoDocs ? IRepoModel[]| undefined : K extends CacheKind.typescriptDocs | undefined ? IApiModel[] : undefined;
} catch (e) {
console.warn(e);
// throw new Error(`Problem loading "${kind}" generated cache file: ${(e as Error).message}`);
return undefined;
default:
cache = undefined;
}
}
type Content = K extends CacheKind.sitemap
? IDocsSitemap | undefined
: K extends CacheKind.proseDocs | undefined
? IProseModel[]
: K extends CacheKind.repoDocs
? IRepoModel[]
: K extends CacheKind.typescriptDocs | undefined
? IApiModel[]
: undefined;
return { cache, cacheFile } as { cache: Content; cacheFile: string };
}

View File

@@ -1,10 +1,27 @@
import { config } from "dotenv";
export function getEnv() {
export type Stage = "production" | "staging" | "local" | undefined;
export interface IEnv {
org: string;
repo: string;
branch: string;
stage: Stage;
docsPath: string;
github_token?: string;
github_user?: string;
force?: boolean;
}
export function getEnv(): IEnv {
config();
return {
org: process.env.ORG || "tauri-apps",
repo: process.env.REPO || "tauri-docs",
branch: process.env.BRANCH || "dev",
stage: process.env.NODE_ENV as Stage,
docsPath: process.env.DOCS_PATH || "docs",
github_token: process.env.GH_TOKEN || process.env.GITHUB_TOKEN || undefined,
github_user: process.env.GH_USER || undefined,
force: process.env.FORCE ? Boolean(process.env.FORCE) : false,

View File

@@ -0,0 +1,34 @@
import { GITHUB_API_BASE } from "~/constants";
import { getEnv, IEnv } from "../getEnv";
import {GithubContentsResp } from "~/types";
import axios from "axios";
/**
* Returns the markdown files and sub-directories from a given repo (and path)
*/
export async function getDirectory(o: IEnv) {
const { github_token, github_user } = getEnv();
const url = `${GITHUB_API_BASE}/repos/${o.org}/${o.repo}/contents/${o.docsPath}?ref=${o.branch}`;
try {
const res = await axios.get<GithubContentsResp>(url, {
httpAgent: "Tauri Search",
...(github_token && github_user
? { auth: { username: github_user, password: github_token } }
: {}),
});
if (res.status < 299) {
return res;
} else {
throw new Error(
`The attempt to call Github's "contents" API failed [${res.status}, ${url}]: ${res.statusText}`
);
}
} catch (err) {
throw new Error(
`The attempt to call Github's "contents" API failed [${url}]: ${
(err as Error).message
}`
);
}
}

View File

@@ -0,0 +1,22 @@
import { mkdir, writeFile } from "fs/promises";
import { dirname, join } from "path";
/**
* Writes a "generated file" to disk while ensuring that the recursive path to the file
* exists.
*/
export async function writeCacheFile(file: string, data: any) {
const content = typeof data === "string" ? data : JSON.stringify(data);
const dir = join(process.cwd(), dirname(file));
try {
await mkdir(dir, { recursive: true });
return writeFile(file, content, "utf-8");
} catch (err) {
throw new Error(
`Problem creating directory "${dir}" so that a generated file could be placed into this driectory: ${
(err as Error).message
}`
);
}
}

View File

@@ -1,15 +0,0 @@
import { mkdir, writeFile } from "fs/promises";
import { dirname } from "path";
export async function writeGeneratedFile(file: string, data: any) {
const content = typeof data === "string" ? data : JSON.stringify(data);
const dir = dirname(file);
try {
await mkdir(dir, { recursive: true });
} catch(err) {
throw new Error(`Problem creating directory "${dir}" so that a generated file could be placed into this driectory: ${(err as Error).message}`);
}
return writeFile(file, content, "utf-8");
}

3
pnpm-lock.yaml generated
View File

@@ -26,6 +26,7 @@ importers:
critters: ^0.0.16
cross-env: ^7.0.3
date-fns: ^2.28.0
dotenv: ^14.3.2
eslint: ^8.8.0
eslint-plugin-cypress: ^2.12.1
floating-vue: ^2.0.0-beta.5
@@ -85,6 +86,7 @@ importers:
'@vue/test-utils': 2.0.0-rc.18_vue@3.2.29
critters: 0.0.16
cross-env: 7.0.3
dotenv: 14.3.2
eslint: 8.8.0
eslint-plugin-cypress: 2.12.1_eslint@8.8.0
https-localhost: 4.7.0
@@ -3077,7 +3079,6 @@ packages:
/dotenv/14.3.2:
resolution: {integrity: sha512-vwEppIphpFdvaMCaHfCEv9IgwcxMljMw2TnAQBB4VWPvzXQLTb82jwmdOKzlEVUL3gNFT4l4TPKO+Bn+sqcrVQ==}
engines: {node: '>=12'}
dev: false
/ee-first/1.1.1:
resolution: {integrity: sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=}