feat(desktop): cloud for orgs support - phase 3 (#6043)
This commit is contained in:
parent
f690d5969a
commit
8aac537c7a
36 changed files with 1160 additions and 130 deletions
85
devenv.lock
85
devenv.lock
|
|
@ -3,10 +3,11 @@
|
|||
"devenv": {
|
||||
"locked": {
|
||||
"dir": "src/modules",
|
||||
"lastModified": 1764669403,
|
||||
"lastModified": 1774428097,
|
||||
"narHash": "sha256-yQAutPgbsVHsN/SygZDyzMRxQn6Im53PJkrI377N8Sg=",
|
||||
"owner": "cachix",
|
||||
"repo": "devenv",
|
||||
"rev": "3f2d25e7af748127da0571266054575dd8fec5ab",
|
||||
"rev": "957d63f663f230dc8ac3b85f950690e56fe8b1e0",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
@ -24,10 +25,11 @@
|
|||
"rust-analyzer-src": "rust-analyzer-src"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1764658058,
|
||||
"lastModified": 1774423251,
|
||||
"narHash": "sha256-g/PP8G9WcP4vtZVOBNYwfGxLnwLQoTERHnef8irAMeQ=",
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"rev": "12bd9c7bcbeb949741b3ad0ca2b3506d0718cf4d",
|
||||
"rev": "b70d7535088cd8a9e4322c372a475f66ffa18adf",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
@ -36,68 +38,13 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1761588595,
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"git-hooks": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"gitignore": "gitignore",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1763988335,
|
||||
"owner": "cachix",
|
||||
"repo": "git-hooks.nix",
|
||||
"rev": "50b9238891e388c9fdc6a5c49e49c42533a1b5ce",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "cachix",
|
||||
"repo": "git-hooks.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gitignore": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"git-hooks",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1762808025,
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"rev": "cb5e3fdca1de58ccbc3ef53de65bd372b48f567c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1764611609,
|
||||
"lastModified": 1774273680,
|
||||
"narHash": "sha256-a++tZ1RQsDb1I0NHrFwdGuRlR5TORvCEUksM459wKUA=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "8c29968b3a942f2903f90797f9623737c215737c",
|
||||
"rev": "fdc7b8f7b30fdbedec91b71ed82f36e1637483ed",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
@ -111,21 +58,18 @@
|
|||
"inputs": {
|
||||
"devenv": "devenv",
|
||||
"fenix": "fenix",
|
||||
"git-hooks": "git-hooks",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"pre-commit-hooks": [
|
||||
"git-hooks"
|
||||
],
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
},
|
||||
"rust-analyzer-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1764603480,
|
||||
"lastModified": 1774376228,
|
||||
"narHash": "sha256-7oA0u4aghFjjIcIDKZ26NUpXH7hVXGPC0sI1OfK7NUk=",
|
||||
"owner": "rust-lang",
|
||||
"repo": "rust-analyzer",
|
||||
"rev": "f25db5500baa047106d74962fe361ea59ce6f91e",
|
||||
"rev": "eabb84b771420b8396ab4bb4747694302d9be277",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
@ -142,10 +86,11 @@
|
|||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1764643237,
|
||||
"lastModified": 1774408260,
|
||||
"narHash": "sha256-Jn9d9r85dmf3gTMnSRt6t+DP2nQ5uJns/MMXg2FpzfM=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "e66d6b924ac59e6c722f69332f6540ea57c69233",
|
||||
"rev": "d6471ee5a8f470251e6e5b83a20a182eb6c46c9b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ let
|
|||
];
|
||||
|
||||
linuxPackages = with pkgs; [
|
||||
nodePackages.prisma
|
||||
prisma-engines
|
||||
libsoup_3
|
||||
webkitgtk_4_1
|
||||
librsvg
|
||||
|
|
@ -31,8 +33,6 @@ in {
|
|||
nodejs_22
|
||||
nodePackages.typescript-language-server
|
||||
nodePackages."@volar/vue-language-server"
|
||||
nodePackages.prisma
|
||||
prisma-engines
|
||||
cargo-edit
|
||||
cargo-tauri
|
||||
] ++ lib.optionals pkgs.stdenv.isDarwin darwinPackages
|
||||
|
|
@ -169,7 +169,7 @@ in {
|
|||
};
|
||||
go = {
|
||||
enable = true;
|
||||
package = pkgs.go_1_24;
|
||||
package = pkgs.go_1_25;
|
||||
};
|
||||
rust = {
|
||||
enable = true;
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@
|
|||
>
|
||||
<component
|
||||
:is="
|
||||
platform.organization.customOrganizationSwitcherComponent
|
||||
platform.organization?.customOrganizationSwitcherComponent
|
||||
"
|
||||
v-if="
|
||||
platform.organization?.customOrganizationSwitcherComponent
|
||||
|
|
@ -401,6 +401,7 @@ const t = useI18n()
|
|||
const toast = useToast()
|
||||
const kernelMode = getKernelMode()
|
||||
|
||||
const headerRef = ref<HTMLElement | null>(null)
|
||||
const downloadableLinksRef =
|
||||
kernelMode === "web" ? ref<any | null>(null) : ref(null)
|
||||
const switcherRef = ref<HTMLElement | null>(null)
|
||||
|
|
|
|||
|
|
@ -291,11 +291,23 @@ const isInstanceSwitchingEnabled = computed(() => {
|
|||
return platform.instance?.instanceSwitchingEnabled ?? false
|
||||
})
|
||||
|
||||
// Whether the org switcher is handling the default instance entry. When it is,
|
||||
// the vendored instance should not appear here since the "Hoppscotch Cloud"
|
||||
// entry in the org section already covers switching back to the default state.
|
||||
// Showing both "Hoppscotch Cloud" (org section) and "Hoppscotch Desktop"
|
||||
// (instance section) is confusing because they represent the same thing from
|
||||
// the user's perspective.
|
||||
const orgSwitcherHandlesDefault = computed(
|
||||
() => !!platform.organization?.customOrganizationSwitcherComponent
|
||||
)
|
||||
|
||||
const connectedInstance = computed(() => {
|
||||
if (!isConnectedState(connectionState.value)) return null
|
||||
const instance = currentInstance.value
|
||||
// cloud and cloud-org instances belong in the org section, not here
|
||||
if (instance?.kind === "cloud" || instance?.kind === "cloud-org") return null
|
||||
if (instance?.kind === "vendored" && orgSwitcherHandlesDefault.value)
|
||||
return null
|
||||
return instance
|
||||
})
|
||||
|
||||
|
|
@ -305,7 +317,8 @@ const recentInstances = computed(() => {
|
|||
instance.serverUrl !== currentInstance.value?.serverUrl &&
|
||||
// cloud and cloud-org instances are accessed via the dedicated cloud entry
|
||||
instance.kind !== "cloud" &&
|
||||
instance.kind !== "cloud-org"
|
||||
instance.kind !== "cloud-org" &&
|
||||
!(instance.kind === "vendored" && orgSwitcherHandlesDefault.value)
|
||||
)
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { getKernelMode, initKernel } from "@hoppscotch/kernel"
|
||||
import { Log } from "./kernel/log"
|
||||
import { HOPP_MODULES } from "@modules/."
|
||||
import { createApp } from "vue"
|
||||
|
||||
|
|
@ -25,6 +26,13 @@ export async function createHoppApp(
|
|||
platformDef: PlatformDef
|
||||
) {
|
||||
initKernel(getKernelMode())
|
||||
|
||||
// initialize the kernel log module (opens the log file on desktop,
|
||||
// no-op on web). also drains any `diag()` entries that were buffered
|
||||
// during module evaluation (before `initKernel()` ran) so they make
|
||||
// it to the log file on disk
|
||||
Log.init().catch((err) => console.warn("[kernel-log] init failed:", err))
|
||||
|
||||
setPlatformDef(platformDef)
|
||||
|
||||
const app = createApp(App)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,11 @@ export { Io } from "./io"
|
|||
export { Relay } from "./relay"
|
||||
export { Store } from "./store"
|
||||
|
||||
// Log and diag are intentionally not re-exported here. log.ts imports
|
||||
// `getModule` from this file, and re-exporting from log.ts would create
|
||||
// a circular dependency that causes a TDZ error at bundle time.
|
||||
// consumers import directly from "~/kernel/log" instead
|
||||
|
||||
export const getModule = <K extends keyof KernelAPI>(
|
||||
name: K
|
||||
): NonNullable<KernelAPI[K]> => {
|
||||
|
|
|
|||
140
packages/hoppscotch-common/src/kernel/log.ts
Normal file
140
packages/hoppscotch-common/src/kernel/log.ts
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
// kernel log wrapper. same setup as store.ts: delegates to the kernel
|
||||
// log module, which on web writes to console and on desktop writes to
|
||||
// console and a log file on disk.
|
||||
//
|
||||
// usage:
|
||||
// import { diag } from "~/kernel/log"
|
||||
// diag("store", "STORE_PATH resolved to", storePath)
|
||||
//
|
||||
// import { Log } from "~/kernel/log"
|
||||
// Log.info("persistence", "settings loaded", { theme: "dark" })
|
||||
// Log.error("auth", "token refresh failed", error.message)
|
||||
//
|
||||
// NOTE: this file intentionally does not import from "." (kernel/index.ts)
|
||||
// or from "./store". store.ts imports `diag` from this file and calls it
|
||||
// at module evaluation time. any static import back into the barrel
|
||||
// (index.ts) would create a circular dependency that causes a TDZ error
|
||||
// at bundle time. instead we access `window.__KERNEL__` directly
|
||||
|
||||
const LOG_FILE_NAME = "io.hoppscotch.desktop.diag.log"
|
||||
|
||||
// in-memory buffer that works even before the kernel is initialized.
|
||||
// the kernel log impls maintain their own buffers on `window.__DIAG_LOGS__`
|
||||
// once they take over, but early entries (before `initKernel()`) land here.
|
||||
//
|
||||
// the window assignment is an intentional debugging hatch for DevTools
|
||||
// inspection, same as the kernel impls (see kernel/log/impl/web/v/1.ts
|
||||
// for full rationale). will be internalized once the kernel exposes
|
||||
// proper retrieval APIs (getLogs(), getLogsByTag())
|
||||
const earlyBuffer: string[] = []
|
||||
|
||||
if (typeof window !== "undefined") {
|
||||
;(window as any).__DIAG_LOGS__ = earlyBuffer
|
||||
;(window as any).__dumpDiagLogs__ = () => earlyBuffer.join("\n")
|
||||
}
|
||||
|
||||
// the log "path" passed through the kernel API is just the filename.
|
||||
// on desktop, the Rust `append_log` command joins it with `logs_dir()`
|
||||
// (from path.rs) so the JS side never needs to resolve directories.
|
||||
// on web the filename is unused but the API requires one
|
||||
const getLogPath = (): string => LOG_FILE_NAME
|
||||
|
||||
// access the kernel log module directly from `window.__KERNEL__` instead
|
||||
// of importing `getModule` from the barrel. returns null if the kernel
|
||||
// hasn't been initialized yet (normal during module evaluation, since
|
||||
// `diag()` is called at top level in store.ts before `initKernel()`)
|
||||
const tryModule = () => {
|
||||
if (typeof window === "undefined") return null
|
||||
const kernel = window.__KERNEL__
|
||||
return kernel?.log ?? null
|
||||
}
|
||||
|
||||
type LogLevel = "debug" | "info" | "warn" | "error"
|
||||
|
||||
const formatLine = (level: string, tag: string, ...args: unknown[]): string => {
|
||||
const ts = new Date().toISOString()
|
||||
const parts = args.map((a) => {
|
||||
if (typeof a === "string") return a
|
||||
try {
|
||||
return JSON.stringify(a)
|
||||
} catch {
|
||||
return String(a)
|
||||
}
|
||||
})
|
||||
return `[${ts}] [${level.toUpperCase()}] [${tag}] ${parts.join(" ")}`
|
||||
}
|
||||
|
||||
const logAtLevel = async (
|
||||
level: LogLevel,
|
||||
tag: string,
|
||||
...args: unknown[]
|
||||
): Promise<void> => {
|
||||
const mod = tryModule()
|
||||
|
||||
if (mod) {
|
||||
// kernel is ready, delegate to it (console + file on desktop)
|
||||
const logPath = getLogPath()
|
||||
const message = args
|
||||
.map((a) => {
|
||||
if (typeof a === "string") return a
|
||||
try {
|
||||
return JSON.stringify(a)
|
||||
} catch {
|
||||
return String(a)
|
||||
}
|
||||
})
|
||||
.join(" ")
|
||||
await mod.log(logPath, level, tag, message)
|
||||
} else {
|
||||
// kernel not ready yet (module eval time). write to console and
|
||||
// buffer directly so nothing is lost
|
||||
const line = formatLine(level, tag, ...args)
|
||||
if (level === "debug") console.debug(line)
|
||||
else if (level === "warn") console.warn(line)
|
||||
else if (level === "error") console.error(line)
|
||||
else console.log(line)
|
||||
earlyBuffer.push(line)
|
||||
if (earlyBuffer.length > 5000)
|
||||
earlyBuffer.splice(0, earlyBuffer.length - 5000)
|
||||
}
|
||||
}
|
||||
|
||||
// on web: writes to console + in-memory buffer.
|
||||
// on desktop: writes to console + in-memory buffer + log file on disk
|
||||
export const Log = {
|
||||
// initialize the file logger (desktop only). call once during app
|
||||
// startup, after `initKernel()`. flushes any entries that were buffered
|
||||
// before the kernel was ready (module-eval-time `diag()` calls) so they
|
||||
// make it to the log file on disk. on web the init is a no-op but the
|
||||
// drain still feeds the kernel's in-memory buffer
|
||||
init: async () => {
|
||||
const mod = tryModule()
|
||||
if (!mod) throw new Error("Log.init() called before initKernel()")
|
||||
const logPath = getLogPath()
|
||||
const result = await mod.init(logPath)
|
||||
|
||||
// drain early buffer entries through the kernel log module so they
|
||||
// end up in the log file on desktop (they were only in console before)
|
||||
if (earlyBuffer.length > 0) {
|
||||
for (const line of earlyBuffer) {
|
||||
await mod.log(logPath, "info", "early", line)
|
||||
}
|
||||
earlyBuffer.length = 0
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
|
||||
debug: (tag: string, ...args: unknown[]) => logAtLevel("debug", tag, ...args),
|
||||
info: (tag: string, ...args: unknown[]) => logAtLevel("info", tag, ...args),
|
||||
warn: (tag: string, ...args: unknown[]) => logAtLevel("warn", tag, ...args),
|
||||
error: (tag: string, ...args: unknown[]) => logAtLevel("error", tag, ...args),
|
||||
} as const
|
||||
|
||||
// convenience alias for `Log.info`. drop-in replacement for the old
|
||||
// `diag()` function so existing call sites only need to change the
|
||||
// import path, not the function name
|
||||
export function diag(tag: string, ...args: unknown[]): void {
|
||||
// fire-and-forget: diag should never block the caller
|
||||
logAtLevel("info", tag, ...args).catch(() => {})
|
||||
}
|
||||
|
|
@ -3,12 +3,37 @@ import type {
|
|||
StoreError,
|
||||
StoreEvents,
|
||||
StoreEventEmitter,
|
||||
ScopedStore,
|
||||
} from "@hoppscotch/kernel"
|
||||
import { extendStore } from "@hoppscotch/kernel"
|
||||
import * as E from "fp-ts/Either"
|
||||
import { getModule } from "."
|
||||
import { getKernelMode } from "@hoppscotch/kernel"
|
||||
import { diag } from "./log"
|
||||
|
||||
const STORE_PATH = `${window.location.host}.hoppscotch.store`
|
||||
// on desktop, org webviews share the same app:// origin as the main webview
|
||||
// (to keep Tauri IPC working). the org context is passed as a query param
|
||||
// (?org=test-org.hoppscotch.io) instead. we include it in the store path so
|
||||
// each org gets its own store file on disk, preserving per-org isolation for
|
||||
// auth tokens, settings, collections, etc.
|
||||
//
|
||||
// the org param is the raw host (e.g. "test-org.hoppscotch.io") so we
|
||||
// sanitize it the same way Tauri sanitizes window labels: replace all
|
||||
// non-alphanumeric chars with underscores. this produces the same filename
|
||||
// as the old per-hostname approach (test_org_hoppscotch_io.hoppscotch.store)
|
||||
// the ?org= query param is preserved across Vue Router navigations by
|
||||
// a beforeEach guard in modules/router.ts, and survives full-page reloads
|
||||
// because Tauri sets it on the initial webview URL
|
||||
const orgParam = new URLSearchParams(window.location.search).get("org")
|
||||
const STORE_PATH = orgParam
|
||||
? `${orgParam.replace(/[^a-zA-Z0-9]/g, "_")}.hoppscotch.store`
|
||||
: `${window.location.host}.hoppscotch.store`
|
||||
|
||||
diag("store", "--- COMMON store.ts module evaluated ---")
|
||||
diag("store", "orgParam:", orgParam ?? "(none)")
|
||||
diag("store", "STORE_PATH:", STORE_PATH)
|
||||
diag("store", "window.location.host:", window.location.host)
|
||||
diag("store", "window.location.href:", window.location.href)
|
||||
|
||||
let cachedStorePath: string | undefined
|
||||
|
||||
|
|
@ -71,8 +96,17 @@ export const getInstanceDir = async (): Promise<string> => {
|
|||
return await invoke<string>("get_instance_dir")
|
||||
}
|
||||
|
||||
export const getLogsDir = async (): Promise<string> => {
|
||||
await isInitd()
|
||||
if (!invoke) throw new Error("getLogsDir is only available in desktop mode")
|
||||
return await invoke<string>("get_logs_dir")
|
||||
}
|
||||
|
||||
const getStorePath = async (): Promise<string> => {
|
||||
if (cachedStorePath) return cachedStorePath
|
||||
if (cachedStorePath) {
|
||||
diag("store", "getStorePath: returning cached:", cachedStorePath)
|
||||
return cachedStorePath
|
||||
}
|
||||
|
||||
if (getKernelMode() === "desktop") {
|
||||
await isInitd()
|
||||
|
|
@ -80,14 +114,24 @@ const getStorePath = async (): Promise<string> => {
|
|||
try {
|
||||
const storeDir = await getStoreDir()
|
||||
cachedStorePath = await join(storeDir, STORE_PATH)
|
||||
diag(
|
||||
"store",
|
||||
"getStorePath: resolved desktop path:",
|
||||
cachedStorePath,
|
||||
"(STORE_PATH:",
|
||||
STORE_PATH,
|
||||
")"
|
||||
)
|
||||
return cachedStorePath
|
||||
} catch (error) {
|
||||
diag("store", "getStorePath: failed to get store dir:", String(error))
|
||||
console.error("Failed to get store directory:", error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cachedStorePath = STORE_PATH
|
||||
diag("store", "getStorePath: using fallback STORE_PATH:", cachedStorePath)
|
||||
return cachedStorePath
|
||||
}
|
||||
|
||||
|
|
@ -99,7 +143,10 @@ export const Store = (() => {
|
|||
|
||||
init: async () => {
|
||||
const storePath = await getStorePath()
|
||||
return module().init(storePath)
|
||||
diag("store", "Store.init() called with path:", storePath)
|
||||
const result = await module().init(storePath)
|
||||
diag("store", "Store.init() completed for path:", storePath)
|
||||
return result
|
||||
},
|
||||
|
||||
set: async (
|
||||
|
|
@ -109,6 +156,7 @@ export const Store = (() => {
|
|||
options?: StorageOptions
|
||||
): Promise<E.Either<StoreError, void>> => {
|
||||
const storePath = await getStorePath()
|
||||
diag("store", `Store.set(${namespace}, ${key}) on path:`, storePath)
|
||||
return module().set(storePath, namespace, key, value, options)
|
||||
},
|
||||
|
||||
|
|
@ -117,7 +165,23 @@ export const Store = (() => {
|
|||
key: string
|
||||
): Promise<E.Either<StoreError, T | undefined>> => {
|
||||
const storePath = await getStorePath()
|
||||
return module().get<T>(storePath, namespace, key)
|
||||
diag("store", `Store.get(${namespace}, ${key}) on path:`, storePath)
|
||||
const result = await module().get<T>(storePath, namespace, key)
|
||||
if (E.isRight(result)) {
|
||||
const val = result.right
|
||||
const shape =
|
||||
val === undefined
|
||||
? "undefined"
|
||||
: val === null
|
||||
? "null"
|
||||
: typeof val === "object"
|
||||
? `object(${Object.keys(val as Record<string, unknown>).length} keys)`
|
||||
: typeof val
|
||||
diag("store", `Store.get(${namespace}, ${key}) => Right(${shape})`)
|
||||
} else {
|
||||
diag("store", `Store.get(${namespace}, ${key}) => Left:`, result.left)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
remove: async (
|
||||
|
|
@ -160,5 +224,10 @@ export const Store = (() => {
|
|||
const storePath = await getStorePath()
|
||||
return module().watch(storePath, namespace, key)
|
||||
},
|
||||
|
||||
extend: async (namespace: string): Promise<ScopedStore> => {
|
||||
const storePath = await getStorePath()
|
||||
return extendStore(module(), storePath, namespace)
|
||||
},
|
||||
} as const
|
||||
})()
|
||||
|
|
|
|||
|
|
@ -47,7 +47,31 @@ export default <HoppModule>{
|
|||
routes,
|
||||
})
|
||||
|
||||
// on desktop, org webviews carry their context as ?org= in the URL
|
||||
// (e.g. app://hoppscotch/?org=test-org.hoppscotch.io). Vue Router
|
||||
// strips query params during internal navigation, so we capture the
|
||||
// initial ?org= value and re-inject it into every subsequent route.
|
||||
// this is the single source of truth for org context on desktop,
|
||||
// replacing the previous window.__HOPPSCOTCH_ORG__ global approach.
|
||||
const initialOrgParam = new URLSearchParams(window.location.search).get(
|
||||
"org"
|
||||
)
|
||||
|
||||
router.beforeEach(async (to, from) => {
|
||||
// preserve the ?org= query param across all route transitions.
|
||||
// the param originates from the Rust load command which sets it
|
||||
// on the initial webview URL. without this guard, navigating to
|
||||
// /orgs/login-required and then calling window.location.reload()
|
||||
// would lose the org context.
|
||||
if (initialOrgParam && !to.query.org) {
|
||||
return {
|
||||
path: to.path,
|
||||
hash: to.hash,
|
||||
params: to.params,
|
||||
query: { ...to.query, org: initialOrgParam },
|
||||
}
|
||||
}
|
||||
|
||||
_isLoadingInitialRoute.value = isInitialRoute(from)
|
||||
|
||||
const onBeforeRouteChangePromises: Promise<any>[] = []
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import { NativeKernelInterceptorService } from "~/platform/std/kernel-intercepto
|
|||
import { performMigrations } from "~/helpers/migrations"
|
||||
import { initBackendGQLClient } from "~/helpers/backend/GQLClient"
|
||||
import { getKernelMode } from "@hoppscotch/kernel"
|
||||
import { diag } from "~/kernel/log"
|
||||
|
||||
type InitEvent =
|
||||
| { type: "STORE_READY" }
|
||||
|
|
@ -146,25 +147,37 @@ export class InitializationService extends Service<InitEvent> {
|
|||
}
|
||||
|
||||
public async initPre() {
|
||||
diag("init", "initPre() start")
|
||||
await this.initStore()
|
||||
diag("init", "initPre() store done")
|
||||
await this.initPersistenceFirst()
|
||||
diag("init", "initPre() persistenceFirst done")
|
||||
|
||||
if (getKernelMode() === "desktop") {
|
||||
await this.initNativeKernelNetworking()
|
||||
diag("init", "initPre() nativeKernelNetworking done")
|
||||
}
|
||||
|
||||
await this.initBackendClient()
|
||||
diag("init", "initPre() backendClient done")
|
||||
await this.initTabs()
|
||||
diag("init", "initPre() tabs done, initPre complete")
|
||||
}
|
||||
|
||||
public async initAuthAndSync() {
|
||||
diag("init", "initAuthAndSync() start")
|
||||
await this.initAuth()
|
||||
diag("init", "initAuthAndSync() auth done")
|
||||
await this.initSync()
|
||||
diag("init", "initAuthAndSync() sync done, initAuthAndSync complete")
|
||||
}
|
||||
|
||||
public async initPost() {
|
||||
diag("init", "initPost() start")
|
||||
await this.initPersistenceLater()
|
||||
diag("init", "initPost() persistenceLater done")
|
||||
performMigrations()
|
||||
diag("init", "initPost() migrations done, initPost complete")
|
||||
}
|
||||
|
||||
public isInitialized() {
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import {
|
|||
import { StoreError } from "@hoppscotch/kernel"
|
||||
|
||||
import { Store } from "~/kernel/store"
|
||||
import { diag } from "~/kernel/log"
|
||||
import { GQLTabService } from "~/services/tab/graphql"
|
||||
import { RESTTabService } from "~/services/tab/rest"
|
||||
import {
|
||||
|
|
@ -128,6 +129,8 @@ export const STORE_KEYS = {
|
|||
CURRENT_ENVIRONMENT_VALUE: "currentEnvironmentValue",
|
||||
CURRENT_SORT_VALUES: "currentSortValues",
|
||||
SCHEMA_VERSION: "schema_version",
|
||||
LOGIN_STATE: "login_state",
|
||||
EMAIL_FOR_SIGN_IN: "emailForSignIn",
|
||||
} as const
|
||||
|
||||
interface Migration {
|
||||
|
|
@ -204,14 +207,24 @@ export class PersistenceService extends Service {
|
|||
}
|
||||
|
||||
async init(): Promise<E.Either<StoreError, void>> {
|
||||
diag(
|
||||
"persistence",
|
||||
"PersistenceService.init() called, about to Store.init()"
|
||||
)
|
||||
const initResult = await Store.init()
|
||||
if (E.isLeft(initResult)) {
|
||||
diag(
|
||||
"persistence",
|
||||
"PersistenceService Store.init() FAILED:",
|
||||
initResult.left
|
||||
)
|
||||
console.error(
|
||||
"[PersistenceService] Failed to initialize store:",
|
||||
initResult.left
|
||||
)
|
||||
return initResult
|
||||
}
|
||||
diag("persistence", "PersistenceService Store.init() succeeded")
|
||||
return initResult
|
||||
}
|
||||
|
||||
|
|
@ -364,6 +377,10 @@ export class PersistenceService extends Service {
|
|||
}
|
||||
|
||||
private async setupSettingsPersistence() {
|
||||
diag(
|
||||
"persistence",
|
||||
"setupSettingsPersistence() loading settings from store"
|
||||
)
|
||||
const loadResult = await Store.get<any>(
|
||||
STORE_NAMESPACE,
|
||||
STORE_KEYS.SETTINGS
|
||||
|
|
@ -372,12 +389,37 @@ export class PersistenceService extends Service {
|
|||
try {
|
||||
if (E.isRight(loadResult)) {
|
||||
const data = loadResult.right ?? getDefaultSettings()
|
||||
diag(
|
||||
"persistence",
|
||||
"settings loaded, BG_COLOR:",
|
||||
data?.BG_COLOR,
|
||||
"THEME_COLOR:",
|
||||
data?.THEME_COLOR
|
||||
)
|
||||
diag(
|
||||
"persistence",
|
||||
"settings keys:",
|
||||
data ? Object.keys(data).join(", ") : "(null/default)"
|
||||
)
|
||||
const result = SETTINGS_SCHEMA.safeParse(data)
|
||||
|
||||
if (result.success) {
|
||||
const migratedSettings = performSettingsDataMigrations(result.data)
|
||||
diag(
|
||||
"persistence",
|
||||
"settings migrated, BG_COLOR:",
|
||||
migratedSettings?.BG_COLOR,
|
||||
"THEME_COLOR:",
|
||||
migratedSettings?.THEME_COLOR
|
||||
)
|
||||
bulkApplySettings(migratedSettings)
|
||||
diag("persistence", "settings applied via bulkApplySettings")
|
||||
} else {
|
||||
diag(
|
||||
"persistence",
|
||||
"settings schema validation FAILED:",
|
||||
result.error?.message
|
||||
)
|
||||
this.showErrorToast(STORE_KEYS.SETTINGS)
|
||||
await Store.set(
|
||||
STORE_NAMESPACE,
|
||||
|
|
@ -385,8 +427,15 @@ export class PersistenceService extends Service {
|
|||
data
|
||||
)
|
||||
}
|
||||
} else {
|
||||
diag(
|
||||
"persistence",
|
||||
"settings load returned Left (error):",
|
||||
loadResult.left
|
||||
)
|
||||
}
|
||||
} catch (_e) {
|
||||
diag("persistence", "settings parse error:", String(_e))
|
||||
console.error(`Failed parsing persisted SETTINGS:`, loadResult)
|
||||
}
|
||||
|
||||
|
|
@ -460,6 +509,10 @@ export class PersistenceService extends Service {
|
|||
}
|
||||
|
||||
private async setupRESTCollectionsPersistence() {
|
||||
diag(
|
||||
"persistence",
|
||||
"setupRESTCollectionsPersistence() loading REST collections"
|
||||
)
|
||||
const restLoadResult = await Store.get<any>(
|
||||
STORE_NAMESPACE,
|
||||
STORE_KEYS.REST_COLLECTIONS
|
||||
|
|
@ -468,11 +521,22 @@ export class PersistenceService extends Service {
|
|||
try {
|
||||
if (E.isRight(restLoadResult)) {
|
||||
const data = restLoadResult.right ?? []
|
||||
diag(
|
||||
"persistence",
|
||||
"REST collections loaded, count:",
|
||||
Array.isArray(data) ? data.length : "(not array)",
|
||||
"first name:",
|
||||
Array.isArray(data) && data[0]?.name ? data[0].name : "(none)"
|
||||
)
|
||||
const result = z.array(REST_COLLECTION_SCHEMA).safeParse(data)
|
||||
|
||||
if (result.success) {
|
||||
const translatedData = result.data.map(translateToNewRESTCollection)
|
||||
|
||||
diag(
|
||||
"persistence",
|
||||
"REST collections translated, count:",
|
||||
translatedData.length
|
||||
)
|
||||
setRESTCollections(translatedData)
|
||||
} else {
|
||||
console.error(`Failed with `, result.error, data)
|
||||
|
|
@ -1012,12 +1076,16 @@ export class PersistenceService extends Service {
|
|||
}
|
||||
|
||||
public async setupFirst() {
|
||||
diag("persistence", "setupFirst() start")
|
||||
await this.init()
|
||||
diag("persistence", "setupFirst() init done, running migrations")
|
||||
await this.runMigrations()
|
||||
await this.checkAndMigrateOldSettings()
|
||||
diag("persistence", "setupFirst() complete")
|
||||
}
|
||||
|
||||
public async setupLater() {
|
||||
diag("persistence", "setupLater() start - loading all persisted data")
|
||||
await Promise.all([
|
||||
this.setupLocalStatePersistence(),
|
||||
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@
|
|||
"@fontsource-variable/roboto-mono": "5.2.8",
|
||||
"@hoppscotch/common": "workspace:^",
|
||||
"@hoppscotch/kernel": "workspace:^",
|
||||
"@hoppscotch/plugin-appload": "github:CuriousCorrelation/tauri-plugin-appload#168ff9533258a56de184fb69ad32f8a7f61bae0d",
|
||||
"@hoppscotch/plugin-appload": "github:CuriousCorrelation/tauri-plugin-appload#0d58d53be2bc75aeb5916bd0d77794fd209426af",
|
||||
"@hoppscotch/ui": "0.2.5",
|
||||
"@tauri-apps/api": "2.1.1",
|
||||
"@tauri-apps/plugin-fs": "2.0.2",
|
||||
|
|
|
|||
|
|
@ -17,6 +17,29 @@ use crate::{
|
|||
ui, RemoveOptions, RemoveResponse, Result,
|
||||
};
|
||||
|
||||
/// Writes a line to appload.diag.log for debugging window lifecycle events.
|
||||
/// This runs at the Rust level so it captures events even when JS logging
|
||||
/// fails (e.g. webview destroyed before JS can write). Best-effort: silently
|
||||
/// ignores any IO errors.
|
||||
///
|
||||
/// The log directory comes from `Config::log_dir`, set by the host app during
|
||||
/// plugin initialization. If no log_dir was configured, this is a no-op.
|
||||
fn diag_log(msg: &str) {
|
||||
let Some(dir) = crate::DIAG_LOG_DIR.get() else {
|
||||
return;
|
||||
};
|
||||
let _ = std::fs::create_dir_all(dir);
|
||||
let path = dir.join("appload.diag.log");
|
||||
let _ = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&path)
|
||||
.and_then(|mut f| {
|
||||
use std::io::Write;
|
||||
writeln!(f, "[{}] [RUST] {}", chrono::Utc::now().to_rfc3339(), msg)
|
||||
});
|
||||
}
|
||||
|
||||
/// Maximum length for window labels/hosts
|
||||
const MAX_HOST_LENGTH: usize = 255;
|
||||
|
||||
|
|
@ -88,53 +111,92 @@ pub async fn load<R: Runtime>(app: AppHandle<R>, options: LoadOptions) -> Result
|
|||
let current_label = format!("{}-curr", base_label);
|
||||
let alternate_label = format!("{}-next", base_label);
|
||||
|
||||
let label = if app.get_webview_window(¤t_label).is_some() {
|
||||
alternate_label
|
||||
let has_curr = app.get_webview_window(¤t_label).is_some();
|
||||
let has_next = app.get_webview_window(&alternate_label).is_some();
|
||||
let label = if has_curr {
|
||||
alternate_label.clone()
|
||||
} else {
|
||||
current_label
|
||||
current_label.clone()
|
||||
};
|
||||
|
||||
// Determine the webview host:
|
||||
// - If `host` is provided, use it (for cloud-for-orgs support)
|
||||
// - Otherwise, use the bundle name
|
||||
let window_host = options
|
||||
.host
|
||||
.clone()
|
||||
.unwrap_or_else(|| options.bundle_name.clone());
|
||||
let sanitized_host = sanitize_window_label(&window_host)?;
|
||||
// All webviews use the bundle name as the URL host so they share the same
|
||||
// origin (app://{bundle_name}/). This is critical because Tauri v2's IPC
|
||||
// validates the webview origin at runtime and rejects origins it doesn't
|
||||
// recognize. Using different hosts per org (e.g. app://test_org_hoppscotch_io)
|
||||
// would break all IPC communication in the org webview.
|
||||
//
|
||||
// For cloud-for-orgs, the org host is passed as a query parameter instead.
|
||||
// The JS side reads window.location.search to get the org context, and the
|
||||
// kernel store uses the query param to maintain per-org file isolation.
|
||||
let sanitized_bundle = sanitize_window_label(&options.bundle_name)?;
|
||||
|
||||
let url = match &options.host {
|
||||
Some(host) => {
|
||||
// pass the original host value as-is in the query param so the JS
|
||||
// side can extract the org domain without reversing sanitization.
|
||||
// URL query values don't need the same restrictions as hostnames
|
||||
format!(
|
||||
"app://{}/?org={}",
|
||||
sanitized_bundle.to_lowercase(),
|
||||
host.to_lowercase()
|
||||
)
|
||||
}
|
||||
None => format!("app://{}/", sanitized_bundle.to_lowercase()),
|
||||
};
|
||||
|
||||
// list all existing webview windows so the diag log shows the full picture
|
||||
let existing_windows: Vec<String> = app
|
||||
.webview_windows()
|
||||
.keys()
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
diag_log(&format!(
|
||||
"LOAD called: bundle={}, host={:?}, title={}, url={}, label={}, has_curr={}, has_next={}, existing_windows={:?}",
|
||||
options.bundle_name,
|
||||
options.host,
|
||||
options.window.title,
|
||||
url,
|
||||
label,
|
||||
has_curr,
|
||||
has_next,
|
||||
existing_windows
|
||||
));
|
||||
|
||||
tracing::info!(
|
||||
?options,
|
||||
bundle = %options.bundle_name,
|
||||
host = %sanitized_host,
|
||||
%url,
|
||||
window_label = %label,
|
||||
"Loading bundle"
|
||||
);
|
||||
|
||||
let url = format!("app://{}/", sanitized_host.to_lowercase());
|
||||
tracing::debug!(%url, "Generated app URL");
|
||||
|
||||
let host_mapper = app.state::<Arc<HostMapper>>();
|
||||
host_mapper.register(
|
||||
&sanitized_host.to_lowercase(),
|
||||
&sanitized_bundle.to_lowercase(),
|
||||
&options.bundle_name.to_lowercase(),
|
||||
);
|
||||
tracing::debug!(
|
||||
host = %sanitized_host.to_lowercase(),
|
||||
host = %sanitized_bundle.to_lowercase(),
|
||||
bundle = %options.bundle_name.to_lowercase(),
|
||||
"Registered host mapping"
|
||||
);
|
||||
|
||||
let sanitized_title = sanitize_window_label(&options.window.title)?;
|
||||
|
||||
let window =
|
||||
match WebviewWindowBuilder::new(&app, &label, WebviewUrl::App(url.parse().unwrap()))
|
||||
// Build the webview with the kernel init script. Org context is carried
|
||||
// via the ?org= query param in the URL (set above) and preserved across
|
||||
// Vue Router navigations by a beforeEach guard in modules/router.ts.
|
||||
let builder =
|
||||
WebviewWindowBuilder::new(&app, &label, WebviewUrl::App(url.parse().unwrap()))
|
||||
.initialization_script(crate::KERNEL_JS)
|
||||
.title(sanitized_title)
|
||||
.inner_size(options.window.width, options.window.height)
|
||||
.resizable(options.window.resizable)
|
||||
.disable_drag_drop_handler()
|
||||
.build()
|
||||
.disable_drag_drop_handler();
|
||||
|
||||
let window = match builder.build()
|
||||
{
|
||||
Ok(window) => window,
|
||||
Err(e) => {
|
||||
|
|
@ -143,7 +205,7 @@ pub async fn load<R: Runtime>(app: AppHandle<R>, options: LoadOptions) -> Result
|
|||
?label,
|
||||
"Failed to create window, cleaning up host mapping"
|
||||
);
|
||||
host_mapper.unregister(&sanitized_host.to_lowercase());
|
||||
host_mapper.unregister(&sanitized_bundle.to_lowercase());
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
|
|
@ -164,11 +226,17 @@ pub async fn load<R: Runtime>(app: AppHandle<R>, options: LoadOptions) -> Result
|
|||
})?;
|
||||
}
|
||||
|
||||
let is_visible = window.is_visible().unwrap_or(false);
|
||||
let response = LoadResponse {
|
||||
success: window.is_visible().unwrap_or(false),
|
||||
window_label: label,
|
||||
success: is_visible,
|
||||
window_label: label.clone(),
|
||||
};
|
||||
|
||||
diag_log(&format!(
|
||||
"LOAD complete: label={}, visible={}, success={}",
|
||||
label, is_visible, response.success
|
||||
));
|
||||
|
||||
tracing::info!(?response, "Bundle loaded successfully");
|
||||
Ok(response)
|
||||
}
|
||||
|
|
@ -177,16 +245,48 @@ pub async fn load<R: Runtime>(app: AppHandle<R>, options: LoadOptions) -> Result
|
|||
pub async fn close<R: Runtime>(app: AppHandle<R>, options: CloseOptions) -> Result<CloseResponse> {
|
||||
tracing::info!(?options, "Starting window close process");
|
||||
|
||||
let existing_windows: Vec<String> = app
|
||||
.webview_windows()
|
||||
.keys()
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
diag_log(&format!(
|
||||
"CLOSE called: window_label={}, existing_windows={:?}",
|
||||
options.window_label,
|
||||
existing_windows
|
||||
));
|
||||
|
||||
let Some(window) = app.get_webview_window(&options.window_label) else {
|
||||
diag_log(&format!(
|
||||
"CLOSE: window {} not found or already closed",
|
||||
options.window_label
|
||||
));
|
||||
tracing::info!(window_label = %options.window_label, "Window not found or already closed");
|
||||
return Ok(CloseResponse { success: true });
|
||||
};
|
||||
|
||||
window.close().map_err(|e| {
|
||||
diag_log(&format!(
|
||||
"CLOSE: failed to close window {}: {:?}",
|
||||
options.window_label, e
|
||||
));
|
||||
tracing::error!(?e, window_label = %options.window_label, "Failed to close window");
|
||||
e
|
||||
})?;
|
||||
|
||||
let remaining_windows: Vec<String> = app
|
||||
.webview_windows()
|
||||
.keys()
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
diag_log(&format!(
|
||||
"CLOSE complete: closed={}, remaining_windows={:?}",
|
||||
options.window_label,
|
||||
remaining_windows
|
||||
));
|
||||
|
||||
let response = CloseResponse { success: true };
|
||||
|
||||
tracing::info!(?response, "Window close process completed");
|
||||
|
|
|
|||
|
|
@ -16,6 +16,13 @@ pub struct Config {
|
|||
pub storage: StorageConfig,
|
||||
#[serde(skip)]
|
||||
pub vendor: VendorConfig,
|
||||
// optional log directory for diagnostic logging from the plugin layer.
|
||||
// when set, the plugin writes best-effort diag lines (window lifecycle
|
||||
// events, etc.) to `appload.diag.log` inside this directory. the host
|
||||
// app is responsible for passing its own log directory here so the
|
||||
// plugin doesn't need to know about app-specific path conventions
|
||||
#[serde(skip)]
|
||||
pub log_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
|
|
|
|||
|
|
@ -1,4 +1,40 @@
|
|||
;(() => {
|
||||
console.log("Setting desktop kernel mode")
|
||||
window.__KERNEL_MODE__ = "desktop"
|
||||
|
||||
// write bundle identity to the log file on disk so we can trace which
|
||||
// webview is which across webkit relaunches (console logs get wiped).
|
||||
// runs before any JS modules load, so we use the raw Tauri IPC channel
|
||||
// instead of @tauri-apps/api.
|
||||
//
|
||||
// log webview identity to disk so we can trace which webview is which
|
||||
// across webkit relaunches (console logs get wiped)
|
||||
Promise.resolve().then(function () {
|
||||
var params = new URLSearchParams(window.location.search)
|
||||
var orgParam = params.get("org")
|
||||
var tag = orgParam ? "org(" + orgParam + ")" : "vendored"
|
||||
|
||||
var line = [
|
||||
"",
|
||||
"========================================================================",
|
||||
"WEBVIEW INIT " + new Date().toISOString(),
|
||||
" tag : " + tag,
|
||||
" ?org= : " + (orgParam || "(not set)"),
|
||||
" href : " + window.location.href,
|
||||
" hostname : " + window.location.hostname,
|
||||
" origin : " + window.location.origin,
|
||||
"========================================================================",
|
||||
"",
|
||||
].join("\n")
|
||||
|
||||
// __TAURI_INTERNALS__ is always present before initialization_scripts run
|
||||
if (window.__TAURI_INTERNALS__) {
|
||||
window.__TAURI_INTERNALS__.invoke("append_log", {
|
||||
filename: "appload.diag.log",
|
||||
content: line,
|
||||
}).catch(function (err) {
|
||||
console.warn("[kernel.js] Failed to write init log:", err)
|
||||
})
|
||||
}
|
||||
})
|
||||
})()
|
||||
|
|
|
|||
|
|
@ -11,12 +11,19 @@
|
|||
html_favicon_url = "https://github.com/<Placeholder>/<Placeholder>/raw/main/packages/app/public/favicon.ico"
|
||||
)]
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use tauri::{
|
||||
plugin::{Builder, Plugin, TauriPlugin},
|
||||
Manager, Runtime,
|
||||
};
|
||||
|
||||
// log directory for the plugin-level diagnostic logger (commands::diag_log).
|
||||
// set once during plugin init from Config::log_dir. a OnceLock because
|
||||
// diag_log is a free function called from command handlers that don't
|
||||
// carry the config around
|
||||
static DIAG_LOG_DIR: OnceLock<PathBuf> = OnceLock::new();
|
||||
|
||||
pub use config::Config;
|
||||
pub use config::{ApiConfig, CacheConfig, StorageConfig};
|
||||
pub use models::*;
|
||||
|
|
@ -56,6 +63,10 @@ pub fn init<R: Runtime>(config: Config) -> TauriPlugin<R> {
|
|||
.setup(move |app, api| {
|
||||
tracing::info!("Initializing appload plugin");
|
||||
|
||||
if let Some(log_dir) = &config.log_dir {
|
||||
let _ = DIAG_LOG_DIR.set(log_dir.clone());
|
||||
}
|
||||
|
||||
tracing::debug!("Using provided configuration settings.");
|
||||
let storage_root = config.storage.root_dir.clone();
|
||||
|
||||
|
|
|
|||
|
|
@ -53,11 +53,30 @@ impl UriHandler {
|
|||
|
||||
tracing::debug!(host = %host, path = %path, "Handling request");
|
||||
|
||||
match self.fetch_content(host, path).await {
|
||||
Ok(content) => {
|
||||
// Try to fetch the requested path. If it fails and the path looks
|
||||
// like a SPA route (no file extension), fall back to index.html so
|
||||
// the client-side router can handle it.
|
||||
let fetch_result = match self.fetch_content(host, path).await {
|
||||
Ok(content) => Ok((content, path)),
|
||||
Err(e) if !path.is_empty() && !path.contains('.') => {
|
||||
tracing::info!(
|
||||
host = %host,
|
||||
path = %path,
|
||||
"Path not found and has no extension, falling back to index.html for SPA routing"
|
||||
);
|
||||
self.fetch_content(host, "")
|
||||
.await
|
||||
.map(|content| (content, ""))
|
||||
.map_err(|_| e)
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
};
|
||||
|
||||
match fetch_result {
|
||||
Ok((content, resolved_path)) => {
|
||||
tracing::info!(host = %host, path = %path, content_length = %content.len(), "Successfully retrieved file content");
|
||||
|
||||
let mime_type = Self::determine_mime(path);
|
||||
let mime_type = Self::determine_mime(resolved_path);
|
||||
let csp = match self.config.app.security.csp.as_ref() {
|
||||
Some(csp) => {
|
||||
tracing::debug!("Using configured CSP");
|
||||
|
|
|
|||
2
packages/hoppscotch-desktop/src-tauri/Cargo.lock
generated
2
packages/hoppscotch-desktop/src-tauri/Cargo.lock
generated
|
|
@ -5541,7 +5541,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "tauri-plugin-appload"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/CuriousCorrelation/tauri-plugin-appload?rev=168ff9533258a56de184fb69ad32f8a7f61bae0d#168ff9533258a56de184fb69ad32f8a7f61bae0d"
|
||||
source = "git+https://github.com/CuriousCorrelation/tauri-plugin-appload?rev=0d58d53be2bc75aeb5916bd0d77794fd209426af#0d58d53be2bc75aeb5916bd0d77794fd209426af"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"blake3",
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ tauri-plugin-store = "2.2.0"
|
|||
tauri-plugin-dialog = "2.2.0"
|
||||
tauri-plugin-fs = "2.2.0"
|
||||
tauri-plugin-deep-link = "2.2.0"
|
||||
tauri-plugin-appload = { git = "https://github.com/CuriousCorrelation/tauri-plugin-appload", rev = "168ff9533258a56de184fb69ad32f8a7f61bae0d" }
|
||||
tauri-plugin-appload = { git = "https://github.com/CuriousCorrelation/tauri-plugin-appload", rev = "0d58d53be2bc75aeb5916bd0d77794fd209426af" }
|
||||
tauri-plugin-relay = { git = "https://github.com/CuriousCorrelation/tauri-plugin-relay", rev = "7cf09c1ad31e228758738c2f4e1c8fe9cc141291" }
|
||||
axum = "0.8.1"
|
||||
tower-http = { version = "0.6.2", features = ["cors"] }
|
||||
|
|
|
|||
|
|
@ -59,6 +59,9 @@ impl HoppApploadConfig {
|
|||
bundle_path: self.bundle_path.clone(),
|
||||
manifest_path: self.manifest_path.clone(),
|
||||
})
|
||||
.log_dir(
|
||||
path::logs_dir().unwrap_or_else(|_| std::env::temp_dir()),
|
||||
)
|
||||
.build()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -260,6 +260,8 @@ pub fn run() {
|
|||
path::get_store_dir,
|
||||
path::get_backup_dir,
|
||||
path::get_logs_dir,
|
||||
logger::append_log,
|
||||
path::get_appload_registry,
|
||||
])
|
||||
.run(tauri::generate_context!());
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use file_rotate::{compression::Compression, suffix::AppendCount, ContentLimit, FileRotate};
|
||||
|
|
@ -50,3 +51,27 @@ pub fn setup(log_dir: &PathBuf) -> Result<LogGuard, Box<dyn std::error::Error>>
|
|||
|
||||
Ok(LogGuard(guard))
|
||||
}
|
||||
|
||||
// appends content to a log file inside `logs_dir()`. bypasses the Tauri
|
||||
// fs plugin so the write isn't subject to the scope in capabilities.json.
|
||||
// the filename is caller-controlled but confined to `logs_dir()` to
|
||||
// prevent arbitrary file writes
|
||||
#[tauri::command]
|
||||
pub fn append_log(filename: String, content: String) -> Result<(), String> {
|
||||
let dir = path::logs_dir().map_err(|e| e.to_string())?;
|
||||
let path = dir.join(&filename);
|
||||
|
||||
// safety: reject any path traversal attempts
|
||||
if path.parent() != Some(&dir) {
|
||||
return Err("invalid log filename".to_string());
|
||||
}
|
||||
|
||||
let mut file = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&path)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
file.write_all(content.as_bytes())
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -90,6 +90,28 @@ pub fn get_logs_dir() -> Result<String, String> {
|
|||
.map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
// exposes the appload storage registry so the JS side can match the current
|
||||
// webview's hostname back to the original server URL. this is needed because
|
||||
// the app:// URL encoding is lossy (generate_bundle_name in appload replaces
|
||||
// both dots and hyphens with underscores, so "test-org" and "test_org" would
|
||||
// produce the same bundle name). returns an empty registry on fresh installs.
|
||||
//
|
||||
// FE-1140: the lossy encoding means two distinct org domains that differ only
|
||||
// by hyphens vs underscores would collide at the bundle name level. DNS rules
|
||||
// make this unlikely in practice but the encoding should be hardened later
|
||||
#[tauri::command]
|
||||
pub fn get_appload_registry() -> Result<String, String> {
|
||||
let registry_path = config_dir()
|
||||
.map_err(|err| err.to_string())?
|
||||
.join("registry.json");
|
||||
|
||||
if !registry_path.exists() {
|
||||
return Ok(r#"{"version":1,"servers":{}}"#.to_string());
|
||||
}
|
||||
|
||||
std::fs::read_to_string(®istry_path).map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
pub fn log_file_path() -> PathBuf {
|
||||
platform_logs_dir().join(format!("{}.log", APP_ID))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,19 @@ import type {
|
|||
} from "@hoppscotch/common/platform/instance"
|
||||
import { VENDORED_INSTANCE_CONFIG } from "@hoppscotch/common/platform/instance"
|
||||
|
||||
// simple diag logger for the main window (runs before kernel log module is available)
|
||||
function mainDiag(msg: string) {
|
||||
const line = `[${new Date().toISOString()}] [MAIN] ${msg}\n`
|
||||
if ((window as any).__TAURI_INTERNALS__) {
|
||||
;(window as any).__TAURI_INTERNALS__
|
||||
.invoke("append_log", {
|
||||
filename: "io.hoppscotch.desktop.diag.log",
|
||||
content: line,
|
||||
})
|
||||
.catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
export enum AppState {
|
||||
LOADING = "loading",
|
||||
UPDATE_AVAILABLE = "update_available",
|
||||
|
|
@ -60,17 +73,23 @@ export function useAppInitialization() {
|
|||
instance: VENDORED_INSTANCE_CONFIG,
|
||||
})
|
||||
|
||||
mainDiag("loadVendoredInstance: calling load(bundleName=Hoppscotch)")
|
||||
console.log("Loading vendored app...")
|
||||
|
||||
const loadResp = await load({
|
||||
bundleName: VENDORED_INSTANCE_CONFIG.bundleName!,
|
||||
window: { title: "Hoppscotch" },
|
||||
})
|
||||
|
||||
mainDiag(
|
||||
`loadVendoredInstance: load result success=${loadResp.success}, label=${loadResp.windowLabel}`
|
||||
)
|
||||
if (!loadResp.success) {
|
||||
throw new Error("Failed to load Hoppscotch Vendored")
|
||||
}
|
||||
|
||||
console.log("Vendored app loaded successfully")
|
||||
mainDiag("loadVendoredInstance: closing main window")
|
||||
close({ windowLabel: "main" })
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : String(err)
|
||||
|
|
@ -88,12 +107,27 @@ export function useAppInitialization() {
|
|||
}
|
||||
|
||||
const loadVendoredIfMatches = async (instance: Instance) => {
|
||||
if (
|
||||
instance.kind === "vendored" ||
|
||||
instance.bundleName === VENDORED_INSTANCE_CONFIG.bundleName
|
||||
) {
|
||||
mainDiag(
|
||||
`loadVendoredIfMatches: kind=${instance.kind}, displayName=${instance.displayName}, bundleName=${instance.bundleName}`
|
||||
)
|
||||
|
||||
// cloud-org instances share the same bundleName as vendored ("Hoppscotch")
|
||||
// because they use the same app bundle, just loaded with a different org
|
||||
// context via the host parameter. we must check kind, not bundleName, to
|
||||
// distinguish them. without this, restarting the app after connecting to an
|
||||
// org would incorrectly load vendored (no host param = no org context).
|
||||
// "cloud" (default cloud, e.g. hoppscotch.io) also uses the vendored bundle
|
||||
// and doesn't need a download step.
|
||||
if (instance.kind === "vendored" || instance.kind === "cloud") {
|
||||
mainDiag(
|
||||
"loadVendoredIfMatches: matched vendored, calling loadVendoredInstance"
|
||||
)
|
||||
await loadVendoredInstance()
|
||||
} else {
|
||||
} else if (instance.kind === "cloud-org") {
|
||||
// cloud-org: uses the vendored bundle but needs the host parameter so the
|
||||
// webview gets the org context (?org= query param). skip the download
|
||||
// step since cloud-org shares the vendored bundle which is already
|
||||
// available locally.
|
||||
try {
|
||||
statusMessage.value = `Loading ${instance.displayName}...`
|
||||
|
||||
|
|
@ -102,20 +136,18 @@ export function useAppInitialization() {
|
|||
target: instance.serverUrl,
|
||||
})
|
||||
|
||||
await download({ serverUrl: instance.serverUrl })
|
||||
|
||||
// cloud-org instances pass serverUrl as host so window.location.hostname reflects the
|
||||
// org subdomain (like acme.hoppscotch.io). This becomes the source of truth for org
|
||||
// context throughout the app instead of needing to pass state through multiple layers.
|
||||
const host =
|
||||
instance.kind === "cloud-org" ? instance.serverUrl : undefined
|
||||
|
||||
mainDiag(
|
||||
`loadVendoredIfMatches: loading cloud-org instance, bundle=${instance.bundleName}, host=${instance.serverUrl}`
|
||||
)
|
||||
const loadResp = await load({
|
||||
bundleName: instance.bundleName!,
|
||||
host,
|
||||
host: instance.serverUrl,
|
||||
window: { title: "Hoppscotch" },
|
||||
})
|
||||
|
||||
mainDiag(
|
||||
`loadVendoredIfMatches: load result success=${loadResp.success}, label=${loadResp.windowLabel}`
|
||||
)
|
||||
if (!loadResp.success) {
|
||||
throw new Error(`Failed to load ${instance.displayName}`)
|
||||
}
|
||||
|
|
@ -126,6 +158,62 @@ export function useAppInitialization() {
|
|||
})
|
||||
|
||||
console.log(`Successfully loaded instance: ${instance.displayName}`)
|
||||
mainDiag("loadVendoredIfMatches: closing main window")
|
||||
close({ windowLabel: "main" })
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : String(err)
|
||||
console.error(
|
||||
`Failed to load cloud-org instance ${instance.displayName}:`,
|
||||
errorMessage
|
||||
)
|
||||
|
||||
await saveConnectionState({
|
||||
status: "error",
|
||||
target: instance.serverUrl,
|
||||
message: errorMessage,
|
||||
})
|
||||
|
||||
mainDiag(
|
||||
`loadVendoredIfMatches: FAILED to load cloud-org ${instance.displayName}, falling back to vendored. error=${errorMessage}`
|
||||
)
|
||||
console.log("Falling back to vendored instance")
|
||||
await loadVendoredInstance()
|
||||
}
|
||||
} else {
|
||||
// self-hosted or other non-vendored instances: need to download the
|
||||
// bundle from the server before loading
|
||||
try {
|
||||
statusMessage.value = `Loading ${instance.displayName}...`
|
||||
|
||||
await saveConnectionState({
|
||||
status: "connecting",
|
||||
target: instance.serverUrl,
|
||||
})
|
||||
|
||||
await download({ serverUrl: instance.serverUrl })
|
||||
|
||||
mainDiag(
|
||||
`loadVendoredIfMatches: loading non-vendored instance, bundle=${instance.bundleName}`
|
||||
)
|
||||
const loadResp = await load({
|
||||
bundleName: instance.bundleName!,
|
||||
window: { title: "Hoppscotch" },
|
||||
})
|
||||
|
||||
mainDiag(
|
||||
`loadVendoredIfMatches: load result success=${loadResp.success}, label=${loadResp.windowLabel}`
|
||||
)
|
||||
if (!loadResp.success) {
|
||||
throw new Error(`Failed to load ${instance.displayName}`)
|
||||
}
|
||||
|
||||
await saveConnectionState({
|
||||
status: "connected",
|
||||
instance: instance,
|
||||
})
|
||||
|
||||
console.log(`Successfully loaded instance: ${instance.displayName}`)
|
||||
mainDiag("loadVendoredIfMatches: closing main window")
|
||||
close({ windowLabel: "main" })
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : String(err)
|
||||
|
|
@ -140,6 +228,9 @@ export function useAppInitialization() {
|
|||
message: errorMessage,
|
||||
})
|
||||
|
||||
mainDiag(
|
||||
`loadVendoredIfMatches: FAILED to load ${instance.displayName}, falling back to vendored. error=${errorMessage}`
|
||||
)
|
||||
console.log("Falling back to vendored instance")
|
||||
await loadVendoredInstance()
|
||||
}
|
||||
|
|
@ -150,9 +241,19 @@ export function useAppInitialization() {
|
|||
try {
|
||||
statusMessage.value = "Loading application..."
|
||||
|
||||
// Both the main window and the vendored webview's InstanceService
|
||||
// share hoppscotch-unified.store for connection state and recent
|
||||
// instances. The InstanceService's detectCurrentInstanceFromHostname
|
||||
// persists the detected instance (including cloud-org) to this store,
|
||||
// so on restart the main window can resume the correct instance.
|
||||
const connectionState = await persistence.getConnectionState()
|
||||
const recentInstances = await persistence.getRecentInstances()
|
||||
|
||||
mainDiag(`loadRecent: connectionState=${JSON.stringify(connectionState)}`)
|
||||
mainDiag(
|
||||
`loadRecent: connectionState.status=${connectionState?.status ?? "(null)"}, instance.kind=${connectionState?.status === "connected" ? connectionState.instance?.kind : "(n/a)"}, instance.displayName=${connectionState?.status === "connected" ? connectionState.instance?.displayName : "(n/a)"}, recentInstances.length=${recentInstances.length}`
|
||||
)
|
||||
mainDiag(`loadRecent: recentInstances=${JSON.stringify(recentInstances)}`)
|
||||
console.log("Current connection state:", connectionState)
|
||||
console.log("Recent instances:", recentInstances)
|
||||
|
||||
|
|
@ -160,6 +261,9 @@ export function useAppInitialization() {
|
|||
switch (connectionState.status) {
|
||||
case "connected":
|
||||
if (connectionState.instance) {
|
||||
mainDiag(
|
||||
`loadRecent: resuming connected instance: kind=${connectionState.instance.kind}, displayName=${connectionState.instance.displayName}`
|
||||
)
|
||||
statusMessage.value = `Connecting to ${connectionState.instance.displayName}...`
|
||||
try {
|
||||
await loadVendoredIfMatches(connectionState.instance)
|
||||
|
|
|
|||
|
|
@ -12,6 +12,10 @@ import { VERSIONS as STORE_VERSIONS } from "./store"
|
|||
import { STORE_IMPLS as WEB_STORE_IMPLS } from "./store/impl/web"
|
||||
import { STORE_IMPLS as DESKTOP_STORE_IMPLS } from "./store/impl/desktop"
|
||||
|
||||
import { VERSIONS as LOG_VERSIONS } from "./log"
|
||||
import { LOG_IMPLS as WEB_LOG_IMPLS } from "./log/impl/web"
|
||||
import { LOG_IMPLS as DESKTOP_LOG_IMPLS } from "./log/impl/desktop"
|
||||
|
||||
export interface KernelInfo {
|
||||
name: string
|
||||
version: Version
|
||||
|
|
@ -23,6 +27,7 @@ export interface KernelAPI {
|
|||
io: typeof IO_VERSIONS.v1.api
|
||||
relay: typeof RELAY_VERSIONS.v1.api
|
||||
store: typeof STORE_VERSIONS.v1.api
|
||||
log: typeof LOG_VERSIONS.v1.api
|
||||
}
|
||||
|
||||
export type KernelMode = "web" | "desktop"
|
||||
|
|
@ -49,6 +54,7 @@ export function initKernel(mode?: KernelMode): KernelAPI {
|
|||
io: DESKTOP_IO_IMPLS.v1.api,
|
||||
relay: DESKTOP_RELAY_IMPLS.v1.api,
|
||||
store: DESKTOP_STORE_IMPLS.v1.api,
|
||||
log: DESKTOP_LOG_IMPLS.v1.api,
|
||||
}
|
||||
|
||||
window.__KERNEL__ = kernel
|
||||
|
|
@ -63,6 +69,7 @@ export function initKernel(mode?: KernelMode): KernelAPI {
|
|||
io: WEB_IO_IMPLS.v1.api,
|
||||
relay: WEB_RELAY_IMPLS.v1.api,
|
||||
store: WEB_STORE_IMPLS.v1.api,
|
||||
log: WEB_LOG_IMPLS.v1.api,
|
||||
}
|
||||
|
||||
window.__KERNEL__ = kernel
|
||||
|
|
@ -120,4 +127,9 @@ export type {
|
|||
StoredData,
|
||||
StoreEventEmitter,
|
||||
StoreV1,
|
||||
ScopedStore,
|
||||
} from "@store/v/1"
|
||||
|
||||
export { extend as extendStore } from "@store/v/1"
|
||||
|
||||
export type { LogLevel, LogCapability, LogError, LogV1 } from "@log/v/1"
|
||||
|
|
|
|||
5
packages/hoppscotch-kernel/src/log/impl/desktop/index.ts
Normal file
5
packages/hoppscotch-kernel/src/log/impl/desktop/index.ts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
import { implementation as logV1 } from "./v/1"
|
||||
|
||||
export const LOG_IMPLS = {
|
||||
v1: logV1,
|
||||
} as const
|
||||
193
packages/hoppscotch-kernel/src/log/impl/desktop/v/1.ts
Normal file
193
packages/hoppscotch-kernel/src/log/impl/desktop/v/1.ts
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
import * as E from "fp-ts/Either"
|
||||
|
||||
import type { VersionedAPI } from "@type/versioning"
|
||||
import type { LogV1, LogLevel } from "@log/v/1"
|
||||
|
||||
// in-memory buffer backing the "buffer" capability (same as web impl).
|
||||
// see impl/web/v/1.ts for the full rationale. on desktop the buffer
|
||||
// supplements the disk log: disk gives persistence, buffer gives
|
||||
// programmatic retrieval for future kernel APIs (getLogs(), getLogsByTag())
|
||||
//
|
||||
// window assignment is an intentional debugging hatch for DevTools
|
||||
// inspection. will be internalized once the kernel retrieval API lands
|
||||
const buffer: string[] = []
|
||||
|
||||
if (typeof window !== "undefined") {
|
||||
;(window as any).__DIAG_LOGS__ = buffer
|
||||
;(window as any).__dumpDiagLogs__ = () => buffer.join("\n")
|
||||
}
|
||||
|
||||
const FLUSH_INTERVAL_MS = 500
|
||||
|
||||
// lazy-loaded Tauri invoke. loaded once, shared across all instances
|
||||
let invoke:
|
||||
| (<T>(cmd: string, args?: Record<string, unknown>) => Promise<T>)
|
||||
| null = null
|
||||
let invokePromise: Promise<void> | null = null
|
||||
|
||||
const ensureInvoke = async () => {
|
||||
if (invoke) return
|
||||
if (!invokePromise) {
|
||||
invokePromise = import("@tauri-apps/api/core").then((m) => {
|
||||
invoke = m.invoke
|
||||
})
|
||||
}
|
||||
await invokePromise
|
||||
}
|
||||
|
||||
class TauriLogManager {
|
||||
private static instances: Map<string, TauriLogManager> = new Map()
|
||||
|
||||
// the filename (not full path) passed to the Rust `append_log` command.
|
||||
// the Rust side joins this with `logs_dir()` so writes are always
|
||||
// confined to the correct directory regardless of build type
|
||||
private filename: string
|
||||
private initialized = false
|
||||
private pendingWrites: string[] = []
|
||||
private flushTimer: ReturnType<typeof setTimeout> | null = null
|
||||
|
||||
private constructor(filename: string) {
|
||||
this.filename = filename
|
||||
}
|
||||
|
||||
static new(filename: string): TauriLogManager {
|
||||
if (TauriLogManager.instances.has(filename)) {
|
||||
return TauriLogManager.instances.get(filename)!
|
||||
}
|
||||
const instance = new TauriLogManager(filename)
|
||||
TauriLogManager.instances.set(filename, instance)
|
||||
return instance
|
||||
}
|
||||
|
||||
async init(): Promise<void> {
|
||||
if (this.initialized) return
|
||||
|
||||
try {
|
||||
await ensureInvoke()
|
||||
|
||||
// write a session header so we know which webview this came from
|
||||
const orgCtx =
|
||||
new URLSearchParams(window.location.search).get("org") ?? "(none)"
|
||||
const header = [
|
||||
"",
|
||||
"=".repeat(72),
|
||||
`LOG SESSION START ${new Date().toISOString()}`,
|
||||
` org context : ${orgCtx}`,
|
||||
` href : ${window.location.href}`,
|
||||
` host : ${window.location.host}`,
|
||||
` __KERNEL__ : ${window.__KERNEL__ ? "present" : "MISSING"}`,
|
||||
"=".repeat(72),
|
||||
"",
|
||||
].join("\n")
|
||||
|
||||
await invoke!("append_log", {
|
||||
filename: this.filename,
|
||||
content: header,
|
||||
})
|
||||
|
||||
this.initialized = true
|
||||
|
||||
// flush any writes that accumulated before init completed
|
||||
if (this.pendingWrites.length > 0) {
|
||||
this.scheduleFlush()
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn("[kernel-log] Failed to initialize file logger:", err)
|
||||
}
|
||||
}
|
||||
|
||||
private async flush(): Promise<void> {
|
||||
if (!invoke || this.pendingWrites.length === 0) return
|
||||
|
||||
const batch = this.pendingWrites.join("\n") + "\n"
|
||||
const snapshot = this.pendingWrites
|
||||
this.pendingWrites = []
|
||||
|
||||
try {
|
||||
await invoke("append_log", {
|
||||
filename: this.filename,
|
||||
content: batch,
|
||||
})
|
||||
} catch (err) {
|
||||
// re-queue failed entries (prepend before any new writes that
|
||||
// accumulated during the await) so they're retried on next flush
|
||||
this.pendingWrites = snapshot.concat(this.pendingWrites)
|
||||
console.warn("[kernel-log] Failed to flush logs to disk:", err)
|
||||
this.scheduleFlush()
|
||||
}
|
||||
}
|
||||
|
||||
private scheduleFlush(): void {
|
||||
if (this.flushTimer) return
|
||||
this.flushTimer = setTimeout(() => {
|
||||
this.flushTimer = null
|
||||
this.flush()
|
||||
}, FLUSH_INTERVAL_MS)
|
||||
}
|
||||
|
||||
log(level: string, tag: string, message: string, data?: unknown): void {
|
||||
const ts = new Date().toISOString()
|
||||
const dataPart =
|
||||
data !== undefined
|
||||
? ` ${
|
||||
typeof data === "string"
|
||||
? data
|
||||
: (() => {
|
||||
try {
|
||||
return JSON.stringify(data)
|
||||
} catch {
|
||||
return String(data)
|
||||
}
|
||||
})()
|
||||
}`
|
||||
: ""
|
||||
const line = `[${ts}] [${level.toUpperCase()}] [${tag}] ${message}${dataPart}`
|
||||
|
||||
// 1. console (same as web)
|
||||
if (level === "debug") console.debug(line)
|
||||
else if (level === "warn") console.warn(line)
|
||||
else if (level === "error") console.error(line)
|
||||
else console.log(line)
|
||||
|
||||
// 2. in-memory buffer
|
||||
buffer.push(line)
|
||||
if (buffer.length > 5000) buffer.splice(0, buffer.length - 5000)
|
||||
|
||||
// 3. file (batched)
|
||||
this.pendingWrites.push(line)
|
||||
this.scheduleFlush()
|
||||
}
|
||||
}
|
||||
|
||||
export const implementation: VersionedAPI<LogV1> = {
|
||||
version: { major: 1, minor: 0, patch: 0 },
|
||||
api: {
|
||||
id: "tauri-log",
|
||||
capabilities: new Set(["console", "file", "buffer"]),
|
||||
|
||||
async init(logPath: string) {
|
||||
try {
|
||||
const manager = TauriLogManager.new(logPath)
|
||||
await manager.init()
|
||||
return E.right(undefined)
|
||||
} catch (error) {
|
||||
return E.left({
|
||||
kind: "init",
|
||||
message: error instanceof Error ? error.message : "Unknown error",
|
||||
cause: error,
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
async log(
|
||||
logPath: string,
|
||||
level: LogLevel,
|
||||
tag: string,
|
||||
message: string,
|
||||
data?: unknown
|
||||
) {
|
||||
const manager = TauriLogManager.new(logPath)
|
||||
manager.log(level, tag, message, data)
|
||||
},
|
||||
},
|
||||
}
|
||||
5
packages/hoppscotch-kernel/src/log/impl/web/index.ts
Normal file
5
packages/hoppscotch-kernel/src/log/impl/web/index.ts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
import { implementation as logV1 } from "./v/1"
|
||||
|
||||
export const LOG_IMPLS = {
|
||||
v1: logV1,
|
||||
} as const
|
||||
96
packages/hoppscotch-kernel/src/log/impl/web/v/1.ts
Normal file
96
packages/hoppscotch-kernel/src/log/impl/web/v/1.ts
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
import * as E from "fp-ts/Either"
|
||||
|
||||
import type { VersionedAPI } from "@type/versioning"
|
||||
import type { LogV1, LogLevel } from "@log/v/1"
|
||||
|
||||
// in-memory buffer backing the "buffer" capability (see LogCapability).
|
||||
// console.log is fire-and-forget with no retrieval path (no console.getAll()),
|
||||
// so this buffer exists for log introspection: future kernel log iterations
|
||||
// will expose retrieval APIs (getLogs(), getLogsByTag()) for in-app
|
||||
// diagnostics, "send logs to support" flows, and test assertions.
|
||||
//
|
||||
// the window assignment below is an intentional debugging hatch so the
|
||||
// buffer can be inspected from DevTools. it will be internalized once the
|
||||
// proper kernel retrieval API lands, but the buffer itself stays as a
|
||||
// declared capability in v1
|
||||
const buffer: string[] = []
|
||||
|
||||
if (typeof window !== "undefined") {
|
||||
;(window as any).__DIAG_LOGS__ = buffer
|
||||
;(window as any).__dumpDiagLogs__ = () => buffer.join("\n")
|
||||
}
|
||||
|
||||
class BrowserLogManager {
|
||||
private static instance: BrowserLogManager
|
||||
|
||||
private constructor() {}
|
||||
|
||||
static new(): BrowserLogManager {
|
||||
if (!BrowserLogManager.instance) {
|
||||
BrowserLogManager.instance = new BrowserLogManager()
|
||||
}
|
||||
return BrowserLogManager.instance
|
||||
}
|
||||
|
||||
private format(
|
||||
level: string,
|
||||
tag: string,
|
||||
message: string,
|
||||
data?: unknown
|
||||
): string {
|
||||
const ts = new Date().toISOString()
|
||||
const dataPart =
|
||||
data !== undefined
|
||||
? ` ${
|
||||
typeof data === "string"
|
||||
? data
|
||||
: (() => {
|
||||
try {
|
||||
return JSON.stringify(data)
|
||||
} catch {
|
||||
return String(data)
|
||||
}
|
||||
})()
|
||||
}`
|
||||
: ""
|
||||
return `[${ts}] [${level.toUpperCase()}] [${tag}] ${message}${dataPart}`
|
||||
}
|
||||
|
||||
log(level: string, tag: string, message: string, data?: unknown): void {
|
||||
const line = this.format(level, tag, message, data)
|
||||
|
||||
// write to appropriate console method
|
||||
if (level === "debug") console.debug(line)
|
||||
else if (level === "warn") console.warn(line)
|
||||
else if (level === "error") console.error(line)
|
||||
else console.log(line)
|
||||
|
||||
// push to in-memory buffer
|
||||
buffer.push(line)
|
||||
if (buffer.length > 5000) buffer.splice(0, buffer.length - 5000)
|
||||
}
|
||||
}
|
||||
|
||||
export const implementation: VersionedAPI<LogV1> = {
|
||||
version: { major: 1, minor: 0, patch: 0 },
|
||||
api: {
|
||||
id: "browser-log",
|
||||
capabilities: new Set(["console", "buffer"]),
|
||||
|
||||
// web doesn't need file init, but the API is consistent
|
||||
async init(_logPath: string) {
|
||||
return E.right(undefined)
|
||||
},
|
||||
|
||||
async log(
|
||||
_logPath: string,
|
||||
level: LogLevel,
|
||||
tag: string,
|
||||
message: string,
|
||||
data?: unknown
|
||||
) {
|
||||
const manager = BrowserLogManager.new()
|
||||
manager.log(level, tag, message, data)
|
||||
},
|
||||
},
|
||||
}
|
||||
9
packages/hoppscotch-kernel/src/log/index.ts
Normal file
9
packages/hoppscotch-kernel/src/log/index.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import { v1 } from "./v/1"
|
||||
|
||||
export type { LogV1, LogLevel, LogCapability, LogError } from "./v/1"
|
||||
|
||||
export const VERSIONS = {
|
||||
v1,
|
||||
} as const
|
||||
|
||||
export const latest = v1
|
||||
46
packages/hoppscotch-kernel/src/log/v/1.ts
Normal file
46
packages/hoppscotch-kernel/src/log/v/1.ts
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import type { VersionedAPI } from "@type/versioning"
|
||||
import * as E from "fp-ts/Either"
|
||||
|
||||
export type LogLevel = "debug" | "info" | "warn" | "error"
|
||||
|
||||
// "console": writes to the browser console (fire-and-forget, no retrieval)
|
||||
// "file": writes to disk via Tauri append_log (desktop only)
|
||||
// "buffer": in-memory circular buffer for log introspection. backs future
|
||||
// retrieval APIs (getLogs(), getLogsByTag()) for in-app diagnostics,
|
||||
// "send logs to support" flows, and test assertions. web declares
|
||||
// ["console", "buffer"], desktop declares all three
|
||||
export type LogCapability = "console" | "file" | "buffer"
|
||||
|
||||
export type LogError =
|
||||
| { kind: "init"; message: string; cause?: unknown }
|
||||
| { kind: "write"; message: string; cause?: unknown }
|
||||
|
||||
export interface LogV1 {
|
||||
readonly id: string
|
||||
readonly capabilities: Set<LogCapability>
|
||||
|
||||
// on web this is a no-op. on desktop it opens/creates the log file
|
||||
// at `logPath` for persistent logging
|
||||
init(logPath: string): Promise<E.Either<LogError, void>>
|
||||
|
||||
// fire-and-forget: logging should never block the caller.
|
||||
// on web writes to console only. on desktop writes to console and file
|
||||
log(
|
||||
logPath: string,
|
||||
level: LogLevel,
|
||||
tag: string,
|
||||
message: string,
|
||||
data?: unknown
|
||||
): Promise<void>
|
||||
}
|
||||
|
||||
export const v1: VersionedAPI<LogV1> = {
|
||||
version: { major: 1, minor: 0, patch: 0 },
|
||||
api: {
|
||||
id: "default",
|
||||
capabilities: new Set(),
|
||||
|
||||
init: async () => E.left({ kind: "init", message: "Not implemented" }),
|
||||
log: async () => {},
|
||||
},
|
||||
}
|
||||
|
|
@ -128,6 +128,45 @@ export interface StoreV1 {
|
|||
): Promise<StoreEventEmitter<StoreEvents>>
|
||||
}
|
||||
|
||||
export interface ScopedStore {
|
||||
isAvailable(): Promise<boolean>
|
||||
set(key: string, value: unknown): Promise<void>
|
||||
get<T>(key: string): Promise<T | null>
|
||||
remove(key: string): Promise<void>
|
||||
}
|
||||
|
||||
export function extend(
|
||||
store: StoreV1,
|
||||
storePath: string,
|
||||
namespace: string
|
||||
): ScopedStore {
|
||||
return {
|
||||
async isAvailable(): Promise<boolean> {
|
||||
try {
|
||||
return E.isRight(await store.init(storePath))
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
},
|
||||
|
||||
async set(key: string, value: unknown): Promise<void> {
|
||||
const result = await store.set(storePath, namespace, key, value)
|
||||
if (E.isLeft(result)) throw new Error(result.left.message)
|
||||
},
|
||||
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
const result = await store.get<T>(storePath, namespace, key)
|
||||
if (E.isLeft(result)) return null
|
||||
return result.right ?? null
|
||||
},
|
||||
|
||||
async remove(key: string): Promise<void> {
|
||||
const result = await store.remove(storePath, namespace, key)
|
||||
if (E.isLeft(result)) throw new Error(result.left.message)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const v1: VersionedAPI<StoreV1> = {
|
||||
version: { major: 1, minor: 0, patch: 0 },
|
||||
api: {
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
"@io/*": ["src/io/*"],
|
||||
"@relay/*": ["src/relay/*"],
|
||||
"@store/*": ["src/store/*"],
|
||||
"@log/*": ["src/log/*"],
|
||||
"@type/*": ["src/type/*"],
|
||||
"@util/*": ["src/util/*"]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ export default defineConfig({
|
|||
'@io': resolve(__dirname, './src/io'),
|
||||
'@relay': resolve(__dirname, './src/relay'),
|
||||
'@store': resolve(__dirname, './src/store'),
|
||||
'@log': resolve(__dirname, './src/log'),
|
||||
'@type': resolve(__dirname, './src/type'),
|
||||
'@util': resolve(__dirname, './src/util')
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@
|
|||
"@hoppscotch/common": "workspace:^",
|
||||
"@hoppscotch/data": "workspace:^",
|
||||
"@hoppscotch/kernel": "workspace:^",
|
||||
"@hoppscotch/plugin-appload": "github:CuriousCorrelation/tauri-plugin-appload#168ff9533258a56de184fb69ad32f8a7f61bae0d",
|
||||
"@hoppscotch/plugin-appload": "github:CuriousCorrelation/tauri-plugin-appload#0d58d53be2bc75aeb5916bd0d77794fd209426af",
|
||||
"@hoppscotch/ui": "0.2.5",
|
||||
"@import-meta-env/unplugin": "0.6.3",
|
||||
"@tauri-apps/api": "2.1.1",
|
||||
|
|
|
|||
|
|
@ -1067,8 +1067,8 @@ importers:
|
|||
specifier: workspace:^
|
||||
version: link:../hoppscotch-kernel
|
||||
'@hoppscotch/plugin-appload':
|
||||
specifier: github:CuriousCorrelation/tauri-plugin-appload#168ff9533258a56de184fb69ad32f8a7f61bae0d
|
||||
version: '@CuriousCorrelation/plugin-appload@https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/168ff9533258a56de184fb69ad32f8a7f61bae0d'
|
||||
specifier: github:CuriousCorrelation/tauri-plugin-appload#0d58d53be2bc75aeb5916bd0d77794fd209426af
|
||||
version: '@CuriousCorrelation/plugin-appload@https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/0d58d53be2bc75aeb5916bd0d77794fd209426af'
|
||||
'@hoppscotch/ui':
|
||||
specifier: 0.2.5
|
||||
version: 0.2.5(eslint@9.39.2(jiti@2.6.1))(terser@5.44.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.5.0)(jiti@2.6.1)(sass@1.98.0)(terser@5.44.1)(yaml@2.8.2))(vue@3.5.30(typescript@5.9.3))
|
||||
|
|
@ -1406,8 +1406,8 @@ importers:
|
|||
specifier: workspace:^
|
||||
version: link:../hoppscotch-kernel
|
||||
'@hoppscotch/plugin-appload':
|
||||
specifier: github:CuriousCorrelation/tauri-plugin-appload#168ff9533258a56de184fb69ad32f8a7f61bae0d
|
||||
version: '@CuriousCorrelation/plugin-appload@https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/168ff9533258a56de184fb69ad32f8a7f61bae0d'
|
||||
specifier: github:CuriousCorrelation/tauri-plugin-appload#0d58d53be2bc75aeb5916bd0d77794fd209426af
|
||||
version: '@CuriousCorrelation/plugin-appload@https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/0d58d53be2bc75aeb5916bd0d77794fd209426af'
|
||||
'@hoppscotch/ui':
|
||||
specifier: 0.2.5
|
||||
version: 0.2.5(eslint@9.39.2(jiti@2.6.1))(terser@5.44.1)(typescript@5.9.3)(vite@7.3.1(@types/node@25.5.0)(jiti@2.6.1)(sass@1.98.0)(terser@5.44.1)(yaml@2.8.2))(vue@3.5.30(typescript@5.9.3))
|
||||
|
|
@ -1781,6 +1781,10 @@ packages:
|
|||
graphql:
|
||||
optional: true
|
||||
|
||||
'@CuriousCorrelation/plugin-appload@https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/0d58d53be2bc75aeb5916bd0d77794fd209426af':
|
||||
resolution: {tarball: https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/0d58d53be2bc75aeb5916bd0d77794fd209426af}
|
||||
version: 0.1.0
|
||||
|
||||
'@CuriousCorrelation/plugin-appload@https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/168ff9533258a56de184fb69ad32f8a7f61bae0d':
|
||||
resolution: {tarball: https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/168ff9533258a56de184fb69ad32f8a7f61bae0d}
|
||||
version: 0.1.0
|
||||
|
|
@ -13034,6 +13038,10 @@ snapshots:
|
|||
optionalDependencies:
|
||||
graphql: 16.13.1
|
||||
|
||||
'@CuriousCorrelation/plugin-appload@https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/0d58d53be2bc75aeb5916bd0d77794fd209426af':
|
||||
dependencies:
|
||||
'@tauri-apps/api': 2.9.1
|
||||
|
||||
'@CuriousCorrelation/plugin-appload@https://codeload.github.com/CuriousCorrelation/tauri-plugin-appload/tar.gz/168ff9533258a56de184fb69ad32f8a7f61bae0d':
|
||||
dependencies:
|
||||
'@tauri-apps/api': 2.9.1
|
||||
|
|
|
|||
Loading…
Reference in a new issue