Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified web-console/bun.lockb
Binary file not shown.
2 changes: 1 addition & 1 deletion web-console/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
"@skeletonlabs/skeleton": "^3.0.0-next.10",
"@skeletonlabs/skeleton-svelte": "^1.0.0-next.18",
"@square/svelte-store": "^1.0.18",
"@streamparser/json": "^0.0.21",
"@streamparser/json": "^0.0.22",
"@svelte-bin/clipboard": "^0.1.4",
"@sveltejs/adapter-auto": "^3.3.1",
"@sveltejs/adapter-static": "^3.0.8",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
import { isPipelineInteractive } from '$lib/functions/pipelines/status'
import type { SQLValueJS } from '$lib/types/sql.ts'
import {
CustomJSONParserTransformStream,
parseCancellable
BatchingWritableStream,
CustomJSONParserTransformStream
} from '$lib/functions/pipelines/changeStream'
import invariant from 'tiny-invariant'
import WarningBanner from '$lib/components/pipelines/editor/WarningBanner.svelte'
Expand Down Expand Up @@ -104,44 +104,47 @@
}
}
}
const { cancel } = parseCancellable(
result,
{
pushChanges,
onBytesSkipped: (skippedBytes) => {
if (!adhocQueries[pipelineName].queries[i]?.result) {
return
}
adhocQueries[pipelineName].queries[i].result.totalSkippedBytes += skippedBytes
},
onParseEnded: () => {
if (!adhocQueries[pipelineName].queries[i]) {
return
}
// Add field for the next query if the last query did not yield an error right away
if (
adhocQueries[pipelineName].queries.length === i + 1 &&
((row) => !row || isDataRow(row))(
adhocQueries[pipelineName].queries[i].result?.rows().at(0)
)
) {
adhocQueries[pipelineName].queries.push({ query: '' })
}
adhocQueries[pipelineName].queries[i].progress = false
},
onNetworkError(e, injectValue) {
injectValue({ error: e.message })
const sink = new BatchingWritableStream({
pushChanges,
onParseEnded: () => {
if (!adhocQueries[pipelineName].queries[i]) {
return
}
// Add field for the next query if the last query did not yield an error right away
if (
adhocQueries[pipelineName].queries.length === i + 1 &&
((row) => !row || isDataRow(row))(
adhocQueries[pipelineName].queries[i].result?.rows().at(0)
)
) {
adhocQueries[pipelineName].queries.push({ query: '' })
}
adhocQueries[pipelineName].queries[i].progress = false
},
new CustomJSONParserTransformStream<Record<string, SQLValueJS>>({
paths: ['$'],
separator: ''
}),
{
bufferSize: 8 * 1024 * 1024
onNetworkError(e, injectValue) {
injectValue({ error: e.message })
}
)
adhocQueries[pipelineName].queries[i].result.endResultStream = cancel
})
result.stream
.pipeThrough(
new CustomJSONParserTransformStream<Record<string, SQLValueJS>>({
paths: ['$'],
separator: '',
onBytesSkipped: (skippedBytes) => {
if (!adhocQueries[pipelineName].queries[i]?.result) {
return
}
adhocQueries[pipelineName].queries[i].result.totalSkippedBytes += skippedBytes
}
})
)
.pipeTo(sink)
adhocQueries[pipelineName].queries[i].result.endResultStream = () => {
try {
result.cancel()
} catch {}
sink.abort()
}
}
</script>

Expand Down
144 changes: 83 additions & 61 deletions web-console/src/lib/components/pipelines/editor/TabChangeStream.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -54,72 +54,94 @@
pipelinesRelations[pipelineName][relationName].cancelStream = undefined
return undefined
}
const { cancel } = parseCancellable(
result,
{
pushChanges: (rows: XgressEntry[]) => {
const initialLen = changeStream[pipelineName].rows.length
const lastRelationName = ((headerIdx) =>
headerIdx !== undefined
? ((header) => (header && 'relationName' in header ? header.relationName : null))(
changeStream[pipelineName].rows[headerIdx]
)
: null)(changeStream[pipelineName].headers.at(-1))
const offset = pushAsCircularBuffer(
() => changeStream[pipelineName].rows,
bufferSize,
(v: Row) => v
)(
[
...(relationName !== lastRelationName
? ([
{
relationName,
columns: Object.keys(
((row) => ('insert' in row ? row.insert : row.delete))(rows[0])
).map((name) => {
return pipelinesRelations[pipelineName][relationName].fields[
normalizeCaseIndependentName({ name })
]
})
}
] as Row[])
: [])
].concat(rows)

const sink = new BatchingWritableStream({
pushChanges: (rows: XgressEntry[]) => {
const initialLen = changeStream[pipelineName].rows.length
const lastRelationName = ((headerIdx) =>
headerIdx !== undefined
? ((header) => (header && 'relationName' in header ? header.relationName : null))(
changeStream[pipelineName].rows[headerIdx]
)
: null)(changeStream[pipelineName].headers.at(-1))
const push = pushAsCircularBuffer(
() => changeStream[pipelineName].rows,
bufferSize,
(v: Row) => v
)(
[
...(relationName !== lastRelationName
? ([
{
relationName,
columns: Object.keys(
((row) => ('insert' in row ? row.insert : row.delete))(rows[0])
).map((name) => {
return pipelinesRelations[pipelineName][relationName].fields[
normalizeCaseIndependentName({ name })
]
})
}
] as Row[])
: [])
].concat(rows)
)
let pushOffset = push.offset
let replacementHeader: Row[] = []
if (push.offset > 0) {
// Re-insert the header of the beginning of the list, if needed
const firstHeaderIndex = Math.max(
changeStream[pipelineName].headers.findIndex((v) => v > push.offset) - 1,
0
)
const row = changeStream[pipelineName].rows.at(
changeStream[pipelineName].headers[firstHeaderIndex]
)
if (relationName !== lastRelationName) {
changeStream[pipelineName].headers.push(initialLen)
const firstNewRow = changeStream[pipelineName].rows.at(push.offset)
if (row && 'relationName' in row && firstNewRow && !('relationName' in firstNewRow)) {
replacementHeader = [row]
--pushOffset
changeStream[pipelineName].headers.splice(firstHeaderIndex + 1, 0, pushOffset)
}
changeStream[pipelineName].headers = changeStream[pipelineName].headers
.map((i) => i - offset)
.filter((i) => i >= 0)
},
onBytesSkipped: (skippedBytes) => {
pushAsCircularBuffer(
() => changeStream[pipelineName].rows,
bufferSize,
(v) => v
)([{ relationName, skippedBytes }])
changeStream[pipelineName].totalSkippedBytes += skippedBytes
},
onParseEnded: () =>
(pipelinesRelations[pipelineName][relationName].cancelStream = undefined)
}
push(replacementHeader)
if (relationName !== lastRelationName) {
changeStream[pipelineName].headers.push(initialLen)
}
changeStream[pipelineName].headers = changeStream[pipelineName].headers
.map((i) => i - pushOffset)
.filter((i) => i >= 0)
},
new CustomJSONParserTransformStream<XgressEntry>({
paths: ['$.json_data.*'],
separator: ''
}),
{
bufferSize: 8 * 1024 * 1024
}
)
return () => {
cancel()
onParseEnded: () =>
(pipelinesRelations[pipelineName][relationName].cancelStream = undefined)
})

result.stream
.pipeThrough(
new CustomJSONParserTransformStream<XgressEntry>({
paths: ['$.json_data.*'],
separator: '',
onBytesSkipped: (skippedBytes) => {
pushAsCircularBuffer(
() => changeStream[pipelineName].rows,
bufferSize,
(v) => v
)([{ relationName, skippedBytes }])
changeStream[pipelineName].totalSkippedBytes += skippedBytes
}
})
)
.pipeTo(sink)
return async () => {
try {
result.cancel()
} catch {}
await sink.abort()
}
})
return () => {
request.then((cancel) => {
cancel?.()
request.then(async (cancel) => {
await cancel?.()
pipelinesRelations[pipelineName][relationName].cancelStream = undefined
;({ rows: changeStream[pipelineName].rows, headers: changeStream[pipelineName].headers } =
filterOutRows(
Expand Down Expand Up @@ -173,8 +195,8 @@
import { Pane, PaneGroup, PaneResizer } from 'paneforge'
import type { Field, Relation } from '$lib/services/manager'
import {
BatchingWritableStream,
CustomJSONParserTransformStream,
parseCancellable,
pushAsCircularBuffer
} from '$lib/functions/pipelines/changeStream'
import JSONbig from 'true-json-bigint'
Expand Down
56 changes: 29 additions & 27 deletions web-console/src/lib/components/pipelines/editor/TabLogs.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import LogsStreamList from '$lib/components/pipelines/editor/LogsStreamList.svelte'

import {
parseCancellable,
BatchingWritableStream,
pushAsCircularBuffer,
SplitNewlineTransformStream
} from '$lib/functions/pipelines/changeStream'
Expand Down Expand Up @@ -113,36 +113,38 @@
tryRestartStream(pipelineName, 5000)
return
}
const { cancel } = parseCancellable(
result,
{
pushChanges: (changes: string[]) => {
const droppedNum = pushAsCircularBuffer(
() => streams[pipelineName].rows,
bufferSize,
(v: string) => v
)(changes)
streams[pipelineName].firstRowIndex += droppedNum
},
onParseEnded: (reason) => {
streams[pipelineName].stream = { closed: {} }
if (reason === 'cancelled' || !areLogsExpected(pipelineStatusName)) {
return
}
tryRestartStream(pipelineName, 5000)
},
onBytesSkipped(bytes) {
streams[pipelineName].totalSkippedBytes += bytes
}
const sink = new BatchingWritableStream({
pushChanges: (changes: string[]) => {
const droppedNum = pushAsCircularBuffer(
() => streams[pipelineName].rows,
bufferSize,
(v: string) => v
)(changes)
streams[pipelineName].firstRowIndex += droppedNum.offset
},
new SplitNewlineTransformStream(),
{
bufferSize: 16 * 1024 * 1024
onParseEnded: (reason) => {
streams[pipelineName].stream = { closed: {} }
if (reason === 'cancelled' || !areLogsExpected(pipelineStatusName)) {
return
}
tryRestartStream(pipelineName, 5000)
}
)
// onBytesSkipped(bytes) {
// streams[pipelineName].totalSkippedBytes += bytes
// }
})
result.stream.pipeThrough(new SplitNewlineTransformStream()).pipeTo(sink)
streams[pipelineName] = {
firstRowIndex: 0,
stream: { open: result.stream, stop: cancel },
stream: {
open: result.stream,
stop: () => {
try {
result.cancel()
} catch {}
sink.abort()
}
},
rows: [],
rowBoundaries: [],
totalSkippedBytes: 0
Expand Down
Loading