Compare commits

..

No commits in common. "e027bc234ef59c757ca490d70e38625e092ee908" and "e9af92c00fd48d5ec2bbf9d6cfb1720a0784197a" have entirely different histories.

6 changed files with 40 additions and 80 deletions

View file

@ -1,5 +1,4 @@
import { IterLines } from "./util.ts";
import { writeLiveRecord } from "./write-live.ts";
type PlcOperation = unknown;
@ -16,8 +15,6 @@ const sleep = (timeout: number) => new Promise((r) => setTimeout(r, timeout));
export class DirectoryTailer {
public abort = new AbortController();
lastBatchCIDs = new Set<string>();
latestDate: string | undefined;
saveRaw: boolean = true; // set to false in production so you don't double-store plc data
@ -37,7 +34,13 @@ export class DirectoryTailer {
)
);
await writeLiveRecord(entry, raw);
const didplc = "did:plc:".length;
const prefix = entry.did.substring(didplc, didplc + 2);
const out = "./data/plc/live/" + prefix;
await Deno.writeTextFile(out, raw + "\n", {
append: true,
});
}
async fetchExports() {
@ -69,19 +72,13 @@ export class DirectoryTailer {
let entry: ExportEntry | undefined;
const promises = [];
const cids = new Set<string>();
for (const line of new IterLines(text)) {
entry = JSON.parse(line) as unknown as ExportEntry;
if (this.lastBatchCIDs.has(entry.cid)) continue;
this.latestDate = entry.createdAt;
cids.add(entry.cid);
promises.push(this.processRecord(entry, line));
}
await Promise.all(promises);
this.lastBatchCIDs = cids;
if (entry) {
this.latestDate = entry.createdAt;
const write = Deno.writeTextFile("./data/latest-date", this.latestDate);

View file

@ -1,28 +1,8 @@
import { TextLineStream } from "jsr:@std/streams@1/text-line-stream";
import { ExportEntry } from "./directory-tailer.ts";
import "./write-compacted.ts";
import "./write-live.ts";
const addOperations = async (
stream: ReadableStream<Uint8Array>,
did: string,
operations: ExportEntry[]
) => {
const lines = stream
.pipeThrough(new TextDecoderStream())
.pipeThrough(new TextLineStream());
for await (const line of lines.values()) {
if (!line.startsWith(did)) continue;
const [_did, _createdAt, _cid, rawEntry] = line.split("\u001f", 4);
const entry = JSON.parse(rawEntry) as unknown as ExportEntry;
operations.push(entry);
}
};
export const getOperations = async (did: string) => {
const operations: ExportEntry[] = [];
const operations = [];
const didplc = "did:plc:".length;
const prefix = did.substring(didplc, didplc + 2);
@ -34,11 +14,6 @@ export const getOperations = async (did: string) => {
a.name < b.name ? -1 : a.name > b.name ? 1 : 0
);
for (const entry of compactedEntries) {
// TODO: if we assume that compacted files are *sorted*, we get ordering by did and createdAt,
// which gives us a complete op log for each did. we can store a little size prefix for a block
// and seek over dids we don't care about, giving us whole contiguous op logs in a compacted file.
// but for now we just un-zstd it and skip individual lines we don't care about the same as a live file
const process = new Deno.Command("zstd", {
args: [
"-d",
@ -51,17 +26,31 @@ export const getOperations = async (did: string) => {
stderr: "piped",
}).spawn();
await addOperations(process.stdout, did, operations);
const lines = process.stdout
.pipeThrough(new TextDecoderStream())
.pipeThrough(new TextLineStream());
for await (const line of lines.values()) {
const entry = JSON.parse(line) as unknown as ExportEntry;
if (entry.did !== did) continue;
operations.push(entry);
}
await process.status;
}
for (const dir of ["compacting", "live"]) {
try {
const f = await Deno.open(`./data/plc/${dir}/${prefix}`, { read: true });
await addOperations(f.readable, did, operations);
} catch (_err) {
// ignore
try {
const f = await Deno.open(`./data/plc/live/${prefix}`, { read: true });
const lines = f.readable
.pipeThrough(new TextDecoderStream())
.pipeThrough(new TextLineStream());
for await (const line of lines.values()) {
const entry = JSON.parse(line) as unknown as ExportEntry;
if (entry.did !== did) continue;
operations.push(entry);
}
} catch (_err) {
// ignore
}
return operations;

View file

@ -1,3 +1,7 @@
import { ensureDir } from "jsr:@std/fs@1";
import { DirectoryTailer } from "./directory-tailer.ts";
await ensureDir("./data/plc/compacted");
await ensureDir("./data/plc/live");
export const tailer = new DirectoryTailer();

View file

@ -17,15 +17,14 @@ export const catchUp = async () => {
lineReader.releaseLock();
}
tailer.lastBatchCIDs.clear();
let lastLine: string | undefined;
for await (const line of lineStream.values()) {
try {
const entry = JSON.parse(line) as unknown as ExportEntry;
tailer.latestDate = entry.createdAt;
tailer.lastBatchCIDs.add(entry.cid);
} catch (_err) {
// ignore
}
lastLine = line;
}
if (lastLine) {
const entry = JSON.parse(lastLine) as unknown as ExportEntry;
tailer.latestDate = entry.createdAt;
}
};

View file

@ -1,10 +0,0 @@
import { ensureDir } from "jsr:@std/fs@1";
// TODO: automate compaction here
// 1. take note of latest-date
// 2. move data/plc/live/* to data/plc/compacting/*
// 3. zstd data/plc/compacting/*
// 4. move data/plc/compacting/*.zstd to data/plc/compacted/<date>/*.zstd
await ensureDir("./data/plc/compacted");
await ensureDir("./data/plc/compacting");

View file

@ -1,19 +0,0 @@
import { ensureDir } from "jsr:@std/fs@1";
import { ExportEntry } from "./directory-tailer.ts";
await ensureDir("./data/plc/live");
export async function writeLiveRecord(entry: ExportEntry, raw: string) {
const didplc = "did:plc:".length;
const prefix = entry.did.substring(didplc, didplc + 2);
const out = "./data/plc/live/" + prefix;
await Deno.writeTextFile(
out,
[entry.did, entry.createdAt, entry.cid, raw].join("\u001f") + "\n",
{
append: true,
}
);
}