Basic Custom Logger
Pass a custom logger to the source to configure logging for your entire pipeline.Copy
import { createTarget } from "@subsquid/pipes";
import { evmPortalSource } from "@subsquid/pipes/evm";
import pino from "pino";
async function main() {
const transport = pino.transport({
target: "pino-pretty",
options: {
colorize: true,
translateTime: "HH:MM:ss",
},
});
const source = evmPortalSource({
portal: "https://portal.sqd.dev/datasets/ethereum-mainnet",
logger: pino(transport),
});
const target = createTarget({
write: async ({ logger, read }) => {
for await (const { data } of read()) {
logger.info({ count: data.length }, "Processed batch");
}
},
});
await source.pipeTo(target);
}
void main()
Integration with Cloud Services
You can use any Pino transport to send logs to cloud services. Pass the configured logger to the source.- GCP Cloud Logging
- Sentry
- Multiple Transports
Copy
import { createTarget } from "@subsquid/pipes";
import { evmPortalSource } from "@subsquid/pipes/evm";
import pino from "pino";
async function main() {
const transport = pino.transport({
target: "@google-cloud/logging-pino",
options: {
projectId: "your-project-id",
logName: "pipes-indexer",
},
});
const source = evmPortalSource({
portal: "https://portal.sqd.dev/datasets/ethereum-mainnet",
logger: pino(transport),
});
const target = createTarget({
write: async ({ logger, read }) => {
for await (const { data } of read()) {
logger.info(
{
blocksProcessed: data.blocks?.length,
eventsCount: data.transfer?.length,
},
"Batch processed"
);
}
},
});
await source.pipeTo(target);
}
void main()
Copy
import { createTarget } from "@subsquid/pipes";
import { evmPortalSource } from "@subsquid/pipes/evm";
import pino from "pino";
async function main() {
const transport = pino.transport({
target: "pino-sentry-transport",
options: {
sentry: {
dsn: process.env.SENTRY_DSN,
environment: "production",
},
level: "error", // Only send errors to Sentry
},
});
const source = evmPortalSource({
portal: "https://portal.sqd.dev/datasets/ethereum-mainnet",
logger: pino(transport),
});
const target = createTarget({
write: async ({ logger, read }) => {
for await (const { data } of read()) {
try {
await processData(data);
logger.info({ count: data.length }, "Batch processed");
} catch (error) {
logger.error({ error, data }, "Failed to process batch");
}
}
},
});
await source.pipeTo(target);
}
void main()
Copy
import { createTarget } from "@subsquid/pipes";
import { evmPortalSource } from "@subsquid/pipes/evm";
import pino from "pino";
async function main() {
const transport = pino.transport({
targets: [
{
target: "pino-pretty",
options: { colorize: true },
level: "info",
},
{
target: "@google-cloud/logging-pino",
options: { projectId: "your-project-id" },
level: "info",
},
{
target: "pino-sentry-transport",
options: { sentry: { dsn: process.env.SENTRY_DSN } },
level: "error",
},
],
});
const source = evmPortalSource({
portal: "https://portal.sqd.dev/datasets/ethereum-mainnet",
logger: pino(transport),
});
const target = createTarget({
write: async ({ logger, read }) => {
for await (const { data } of read()) {
logger.info({ count: data.length }, "Processed batch");
}
},
});
await source.pipeTo(target);
}
void main()
The
ctx.logger in transformers and targets is the same logger instance passed to the source. Configure logging at the source level, then use ctx.logger throughout your pipeline for consistent logging.
