From a046031e65c42b071019df25ee37e318b43a48d8 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Sun, 2 Jan 2022 22:37:50 -0700 Subject: [PATCH 01/14] Complete rewrite of the prior half of the pipeline to use getConfirmedSignatures --- .vscode/launch.json | 96 +++++++--- architecture.puml | 28 ++- docker-compose.yml | 100 ++++++---- minio-data/solana-blocks/.keep | 0 src/event-transformer/index.ts | 37 ++-- .../transformers/InstructionTransformer.ts | 8 +- .../transformers/Transformer.ts | 4 +- .../transformers/anchorProgram.ts | 4 +- .../transformers/programSpec.ts | 4 +- .../transformers/tokenAccounts.ts | 4 +- src/kafka-s3-block-uploader.ts | 172 ------------------ src/kafka-s3-slot-identifier.ts | 129 ------------- src/kafka-signature-collector.ts | 108 +++++++++++ src/kafka-signature-identifier.ts | 151 +++++++++++++++ src/kafka-signature-processor.ts | 93 ++++++++++ src/setup/kafka.ts | 2 +- strata-compose/docker-compose.yml | 25 +++ 17 files changed, 551 insertions(+), 414 deletions(-) delete mode 100644 minio-data/solana-blocks/.keep delete mode 100644 src/kafka-s3-block-uploader.ts delete mode 100644 src/kafka-s3-slot-identifier.ts create mode 100644 src/kafka-signature-collector.ts create mode 100644 src/kafka-signature-identifier.ts create mode 100644 src/kafka-signature-processor.ts diff --git a/.vscode/launch.json b/.vscode/launch.json index af84e40..ce1e9a5 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -25,11 +25,11 @@ { "type": "pwa-node", "request": "launch", - "name": "Identify Slots", + "name": "Signatures", "skipFiles": [ "/**" ], - "program": "${workspaceFolder}/src/kafka-s3-slot-identifier.ts", + "program": "${workspaceFolder}/src/kafka-signature-identifier.ts", "preLaunchTask": "tsc: build - tsconfig.json", "outFiles": [ "${workspaceFolder}/dist/lib/**/*.js" @@ -37,37 +37,92 @@ "env": { "SOLANA_URL": "http://127.0.0.1:8899", "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", - "KAFKA_TOPIC": "json.solana.slots", + "KAFKA_TOPIC": "json.solana.signatures", + "ADDRESS": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" // "ACCOUNTS": "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL,TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA,4K8fnycnTESeyad4DqfXPF8TbkuyscPK4EjAwY35emyW,AiYPQudWgXerJ1BdKfH5HkEamnPXSHAfPK2ThhKFkkDw" } }, { "type": "pwa-node", "request": "launch", - "name": "Process Blocks", + "name": "Mainnet Signatures", "skipFiles": [ "/**" ], - "program": "${workspaceFolder}/src/kafka-s3-block-uploader.ts", + "program": "${workspaceFolder}/src/kafka-signature-identifier.ts", "preLaunchTask": "tsc: build - tsconfig.json", "outFiles": [ "${workspaceFolder}/dist/lib/**/*.js" ], "env": { - "SOLANA_URL": "http://127.0.0.1:8899", - "KAFKA_GROUP_ID": "kafka-s3-block-uploader", - "S3_ENDPOINT": "http://localhost:9000", - "S3_ACCESS_KEY_ID": "minioadmin", - "S3_SECRET_ACCESS_KEY": "minioadmin", - "S3_PREFIX": "blocks", - "S3_BUCKET": "solana-blocks", + "SOLANA_URL": "https://wumbo.genesysgo.net", "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", - "KAFKA_INPUT_TOPIC": "json.solana.slots", - "KAFKA_TOPIC": "json.solana.blocks", - "ACCOUNTS": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA,TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA,namesLPneVptA9Z5rqUDD9tMTWEJwofgaYwp8cawRkX,TBondz6ZwSM5fs4v2GpnVBMuwoncPkFLFR9S422ghhN,TCo1sP6RwuCuyHPHjxgzcrq4dX4BKf9oRQ3aJMcdFry" + "KAFKA_TOPIC": "json.solana.signatures", + "ADDRESS": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" // "ACCOUNTS": "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL,TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA,4K8fnycnTESeyad4DqfXPF8TbkuyscPK4EjAwY35emyW,AiYPQudWgXerJ1BdKfH5HkEamnPXSHAfPK2ThhKFkkDw" } }, + { + "type": "pwa-node", + "request": "launch", + "name": "Mainnet Process Signatures", + "skipFiles": [ + "/**" + ], + "program": "${workspaceFolder}/src/kafka-signature-processor.ts", + "preLaunchTask": "tsc: build - tsconfig.json", + "outFiles": [ + "${workspaceFolder}/dist/lib/**/*.js" + ], + "env": { + "SOLANA_URL": "https://wumbo.genesysgo.net", + "KAFKA_GROUP_ID": "kafka-signature-processor", + "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", + "KAFKA_INPUT_TOPIC": "json.solana.signatures", + "KAFKA_TOPIC": "json.solana.transactions" + } + }, + { + "type": "pwa-node", + "request": "launch", + "name": "Process Signatures", + "skipFiles": [ + "/**" + ], + "program": "${workspaceFolder}/src/kafka-signature-processor.ts", + "preLaunchTask": "tsc: build - tsconfig.json", + "outFiles": [ + "${workspaceFolder}/dist/lib/**/*.js" + ], + "env": { + "SOLANA_URL": "http://127.0.0.1:8899", + "KAFKA_GROUP_ID": "kafka-signature-processor", + "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", + "KAFKA_INPUT_TOPIC": "json.solana.signatures", + "KAFKA_TOPIC": "json.solana.transactions", + } + }, + { + "type": "pwa-node", + "request": "launch", + "name": "Collect Signatures", + "skipFiles": [ + "/**" + ], + "program": "${workspaceFolder}/src/kafka-signature-collector.ts", + "preLaunchTask": "tsc: build - tsconfig.json", + "outFiles": [ + "${workspaceFolder}/dist/lib/**/*.js" + ], + "env": { + "SOLANA_URL": "http://127.0.0.1:8899", + "KAFKA_GROUP_ID": "kafka-signature-collector", + "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", + "KAFKA_INPUT_TOPIC": "json.solana.signatures", + "KAFKA_TOPIC": "json.solana.collected-signatures", + "KAFKA_OFFSET_RESET": "earliest" + } + }, { "type": "pwa-node", "request": "launch", @@ -82,16 +137,11 @@ ], "env": { "SOLANA_URL": "http://127.0.0.1:8899", - "ANCHOR_IDLS": "TBondz6ZwSM5fs4v2GpnVBMuwoncPkFLFR9S422ghhN,TCo1sP6RwuCuyHPHjxgzcrq4dX4BKf9oRQ3aJMcdFry", - "S3_ENDPOINT": "http://localhost:9000", - "S3_ACCESS_KEY_ID": "minioadmin", - "S3_SECRET_ACCESS_KEY": "minioadmin", - "S3_PREFIX": "blocks", - "S3_BUCKET": "solana-blocks", + "ANCHOR_IDLS": "TBondmkCYxaPCKG4CHYfVTcwQ8on31xnJrPzk8F8WsS,TCo1sfSr2nCudbeJPykbif64rG9K1JNMGzrtzvPmp3y", "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", - "KAFKA_INPUT_TOPIC": "json.solana.blocks", + "KAFKA_INPUT_TOPIC": "json.solana.transactions", "KAFKA_OUTPUT_TOPIC": "json.solana.events", - "KAFKA_OFFSET_RESET": "earliest", + "KAFKA_OFFSET_RESET": "latest", "KAFKA_GROUP_ID": "solana-event-transformer", } }, diff --git a/architecture.puml b/architecture.puml index 555faa5..95c9850 100644 --- a/architecture.puml +++ b/architecture.puml @@ -1,8 +1,9 @@ @startuml architecture package "Kafka + KSQL" as k { - queue blocks - queue slots + queue signatures_star + queue signatures + queue transactions queue token_account_balance_changes queue bonding_token_account_balance_changes queue reserve_token_account_balance_changes @@ -10,8 +11,6 @@ package "Kafka + KSQL" as k { queue token_bonding_base_storage #fff queue token_bonding_target_mint #fff queue events - queue token_bonding_supply_changes - events --> token_bonding_initializes @@ -24,28 +23,27 @@ package "Kafka + KSQL" as k { token_bonding_base_storage --> reserve_token_account_balance_changes } -agent KafkaS3BlockUploader +agent KafkaSignatureIdentifier agent AccountLeaderboard agent TopTokensLeaderboard agent collectiveAPI -agent SlotIdentiier +agent KafkaSignatureCollector +agent KafkaSignatureProcessor cloud Solana -cloud S3 database Redis person UI -Solana -> SlotIdentiier -SlotIdentiier -> slots -Solana --> KafkaS3BlockUploader -slots --> KafkaS3BlockUploader -KafkaS3BlockUploader -> blocks -KafkaS3BlockUploader -> S3 +Solana -> KafkaSignatureIdentifier +KafkaSignatureIdentifier --> signatures_star +signatures_star --> KafkaSignatureCollector +KafkaSignatureCollector -> signatures +signatures -> KafkaSignatureProcessor +KafkaSignatureProcessor --> transactions agent EventTransformer -S3 -> EventTransformer -blocks --> EventTransformer +transactions --> EventTransformer EventTransformer -> events TopTokensLeaderboard --> Redis diff --git a/docker-compose.yml b/docker-compose.yml index 7d549b2..667425b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,6 +27,7 @@ services: KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 KAFKA_TRANSACTION_STATE_LOG_NUM_PARTITIONS: 1 + kowl: image: quay.io/cloudhut/kowl:master ports: @@ -47,17 +48,6 @@ services: - zookeeper environment: ZK_HOSTS: zookeeper:2181 - minio: - image: minio/minio:latest - command: "server /data --console-address \":9001\"" - volumes: - - ./minio-data:/data - environment: - MINIO_ROOT_USER: minioadmin - MINIO_ROOT_PASSWORD: minioadmin - ports: - - 9000:9000 - - 9001:9001 redis: image: redis:latest @@ -100,44 +90,83 @@ services: entrypoint: /bin/sh tty: true - slot-identifier: + token-signature-identifier: extra_hosts: - "host.docker.internal:host-gateway" restart: always image: data-pipelines:latest - container_name: slot-identifier - command: node dist/lib/kafka-s3-slot-identifier.js + command: node dist/lib/kafka-signature-identifier.js depends_on: - kafka environment: SOLANA_URL: "http://host.docker.internal:8899" KAFKA_BOOTSTRAP_SERVERS: "kafka:9092" - KAFKA_TOPIC: json.solana.slots - - block-processor: + KAFKA_TOPIC: json.solana.signatures.token + ADDRESS: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + + atoken-signature-identifier: + extra_hosts: + - "host.docker.internal:host-gateway" + restart: always + image: data-pipelines:latest + command: node dist/lib/kafka-signature-identifier.js + depends_on: + - kafka + environment: + SOLANA_URL: "http://host.docker.internal:8899" + KAFKA_BOOTSTRAP_SERVERS: "kafka:9092" + KAFKA_TOPIC: json.solana.signatures.atoken + ADDRESS: ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL + + name-signature-identifier: extra_hosts: - "host.docker.internal:host-gateway" restart: always image: data-pipelines:latest - container_name: block-processor - command: node dist/lib/kafka-s3-block-uploader.js + command: node dist/lib/kafka-signature-identifier.js depends_on: - - slot-identifier - kafka - - minio environment: SOLANA_URL: "http://host.docker.internal:8899" - KAFKA_GROUP_ID: "kafka-s3-block-uploader" - S3_ENDPOINT: "http://minio:9000" - S3_ACCESS_KEY_ID: "minioadmin" - S3_SECRET_ACCESS_KEY: "minioadmin" - S3_PREFIX: "blocks" - S3_BUCKET: "solana-blocks" KAFKA_BOOTSTRAP_SERVERS: "kafka:9092" - KAFKA_INPUT_TOPIC: "json.solana.slots" - KAFKA_TOPIC: "json.solana.blocks" - ACCOUNTS: "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA,TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA,namesLPneVptA9Z5rqUDD9tMTWEJwofgaYwp8cawRkX,TBondz6ZwSM5fs4v2GpnVBMuwoncPkFLFR9S422ghhN,TCo1sP6RwuCuyHPHjxgzcrq4dX4BKf9oRQ3aJMcdFry" + KAFKA_TOPIC: json.solana.signatures.name + ADDRESS: namesLPneVptA9Z5rqUDD9tMTWEJwofgaYwp8cawRkX + signature-collector: + extra_hosts: + - "host.docker.internal:host-gateway" + restart: always + image: data-pipelines:latest + command: node dist/lib/kafka-signature-collector.js + depends_on: + - kafka + - name-signature-identifier + - atoken-signature-identifier + - token-signature-identifier + + environment: + KAFKA_BOOTSTRAP_SERVERS: "kafka:9092" + KAFKA_INPUT_TOPIC: json.solana.signatures..* + KAFKA_TOPIC: json.solana.signatures + KAFKA_GROUP_ID: kafka-signature-collector + KAFKA_OFFSET_RESET: earliest + + signature-processor: + extra_hosts: + - "host.docker.internal:host-gateway" + restart: always + image: data-pipelines:latest + command: node dist/lib/kafka-signature-processor.js + depends_on: + - kafka + environment: + KAFKA_BOOTSTRAP_SERVERS: "kafka:9092" + KAFKA_INPUT_TOPIC: json.solana.signatures + KAFKA_TOPIC: json.solana.transactions + SOLANA_URL: "http://host.docker.internal:8899" + KAFKA_GROUP_ID: kafka-signature-processor-1 + KAFKA_OFFSET_RESET: earliest + event-transformer: extra_hosts: - "host.docker.internal:host-gateway" @@ -145,20 +174,13 @@ services: image: data-pipelines:latest container_name: event-transformer depends_on: - - block-processor - kafka - - minio command: node dist/lib/event-transformer environment: SOLANA_URL: "http://host.docker.internal:8899" - ANCHOR_IDLS: "TBondz6ZwSM5fs4v2GpnVBMuwoncPkFLFR9S422ghhN,TCo1sP6RwuCuyHPHjxgzcrq4dX4BKf9oRQ3aJMcdFry" - S3_ENDPOINT: "http://minio:9000" - S3_ACCESS_KEY_ID: "minioadmin" - S3_SECRET_ACCESS_KEY: "minioadmin" - S3_PREFIX: "blocks" - S3_BUCKET: "solana-blocks" + ANCHOR_IDLS: "TBondmkCYxaPCKG4CHYfVTcwQ8on31xnJrPzk8F8WsS,TCo1sfSr2nCudbeJPykbif64rG9K1JNMGzrtzvPmp3y" KAFKA_BOOTSTRAP_SERVERS: "kafka:9092" - KAFKA_INPUT_TOPIC: "json.solana.blocks" + KAFKA_INPUT_TOPIC: "json.solana.transactions" KAFKA_OUTPUT_TOPIC: "json.solana.events" KAFKA_OFFSET_RESET: "earliest" KAFKA_GROUP_ID: "solana-event-transformer" diff --git a/minio-data/solana-blocks/.keep b/minio-data/solana-blocks/.keep deleted file mode 100644 index e69de29..0000000 diff --git a/src/event-transformer/index.ts b/src/event-transformer/index.ts index 8c52af8..59528b9 100644 --- a/src/event-transformer/index.ts +++ b/src/event-transformer/index.ts @@ -1,10 +1,9 @@ import "./borsh"; import { Program, Provider, Wallet as NodeWallet } from "@project-serum/anchor"; -import { BlockResponse, Keypair, PublicKey } from "@solana/web3.js"; +import { BlockResponse, ConfirmedTransaction, Keypair, PublicKey, Transaction } from "@solana/web3.js"; import BN from "bn.js"; import { Message as KafkaMessage, Producer, TopicMessages } from "kafkajs"; import { kafka } from "../setup/kafka"; -import { s3 } from "../setup/s3"; import { connection } from "../setup/solana"; import "../utils/borshWithPubkeys"; import AnchorProgramTransformer from "./transformers/anchorProgram"; @@ -28,8 +27,8 @@ function hasIntersect(set1: Set, set2: Set): boolean { return [...set1].some(x => set2.has(x)); } -function processTxn(transformers: Transformer[], block: BlockResponse & { slot: number }, txn: BlockTransaction): KafkaMessage[] { - const accounts = txn.transaction.message.accountKeys.map((key) => ( +function processTxn(transformers: Transformer[], txn: ConfirmedTransaction): KafkaMessage[] { + const accounts = txn.transaction.compileMessage().accountKeys.map((key) => ( // @ts-ignore new PublicKey(new BN(key._bn, 'hex')) )); @@ -43,16 +42,15 @@ function processTxn(transformers: Transformer[], block: BlockResponse & { slot: return { type, payload, - slot: block.slot, - recentBlockhash: txn.transaction.message.recentBlockhash, - blockTime: block.blockTime, - blockhash: block.blockhash + slot: txn.slot, + recentBlockhash: txn.transaction.recentBlockhash, + blockTime: txn.blockTime } }) .map((item: any) => ({ - key: block.slot.toString(), + key: item.slot.toString(), value: JSON.stringify(item), - timestamp: ((block.blockTime || 0) * 1000).toString() + timestamp: ((item.blockTime || 0) * 1000).toString() })) } @@ -124,19 +122,12 @@ async function run() { const results = (await Promise.all( messages .map((message: any) => JSON.parse(message.value!.toString())) - .filter(item => !item.skipped) - .map(async (value: any) => { - const s3Resp = (await s3.getObject({ - Key: value.Key, - Bucket: value.Bucket - }).promise()) - const block: BlockResponse & { slot: number } = JSON.parse(s3Resp.Body!.toString()); - const ret: KafkaMessage[] = block.transactions - .filter(txn => !txn.meta?.err) - .flatMap((txn: BlockTransaction) => processTxn(transformers, block, txn)) - - return ret; - }) + .map(txn => ({ + ...txn, + transaction: Transaction.from(txn.transaction) + })) + .filter((txn: ConfirmedTransaction) => !txn.meta?.err) + .flatMap((txn: ConfirmedTransaction) => processTxn(transformers, txn)) )).flat() console.log(`Sending batch of ${results.length} events`) await publishFixedBatches(producer, { diff --git a/src/event-transformer/transformers/InstructionTransformer.ts b/src/event-transformer/transformers/InstructionTransformer.ts index 9abd8e0..bd77df1 100644 --- a/src/event-transformer/transformers/InstructionTransformer.ts +++ b/src/event-transformer/transformers/InstructionTransformer.ts @@ -1,11 +1,11 @@ -import { CompiledInstruction, ConfirmedTransactionMeta, Message, PublicKey, Transaction } from "@solana/web3.js"; +import { CompiledInstruction, ConfirmedTransaction, ConfirmedTransactionMeta, Message, PublicKey, Transaction, TransactionInstruction } from "@solana/web3.js"; import { BlockTransaction, TransformedMessage, Transformer } from "./Transformer"; export abstract class InstructionTransformer implements Transformer { abstract get relevantKeys(): Set; - transform(accountKeys: PublicKey[], transaction: BlockTransaction): TransformedMessage[] { - const indexedNormalInstrs = transaction.transaction.message.instructions + transform(accountKeys: PublicKey[], transaction: ConfirmedTransaction): TransformedMessage[] { + const indexedNormalInstrs = transaction.transaction.compileMessage().instructions .map((instruction, index) => ({ instruction, instructionIndex: index, innerInstructionIndex: null })) const indexedInnerInstrs = (transaction.meta?.innerInstructions || []) .flatMap((innerInstruction) => @@ -23,5 +23,5 @@ export abstract class InstructionTransformer implements Transformer { ) } - abstract transformInstruction(accountKeys: PublicKey[], transaction: BlockTransaction, instruction: CompiledInstruction): TransformedMessage[] + abstract transformInstruction(accountKeys: PublicKey[], transaction: ConfirmedTransaction, instruction: CompiledInstruction): TransformedMessage[] } \ No newline at end of file diff --git a/src/event-transformer/transformers/Transformer.ts b/src/event-transformer/transformers/Transformer.ts index 5694bac..19a4d10 100644 --- a/src/event-transformer/transformers/Transformer.ts +++ b/src/event-transformer/transformers/Transformer.ts @@ -1,4 +1,4 @@ -import { ConfirmedTransactionMeta, Message, PublicKey } from "@solana/web3.js"; +import { ConfirmedTransaction, ConfirmedTransactionMeta, Message, PublicKey } from "@solana/web3.js"; export type BlockTransaction = { transaction: { @@ -14,5 +14,5 @@ export interface TransformedMessage { export interface Transformer { get relevantKeys(): Set - transform(accountKeys: PublicKey[], transaction: BlockTransaction): TransformedMessage[] + transform(accountKeys: PublicKey[], transaction: ConfirmedTransaction): TransformedMessage[] } \ No newline at end of file diff --git a/src/event-transformer/transformers/anchorProgram.ts b/src/event-transformer/transformers/anchorProgram.ts index a821987..451e2d0 100644 --- a/src/event-transformer/transformers/anchorProgram.ts +++ b/src/event-transformer/transformers/anchorProgram.ts @@ -1,4 +1,4 @@ -import { Message, PublicKey, TokenBalance, MessageArgs, CompiledInstruction } from "@solana/web3.js"; +import { Message, PublicKey, TokenBalance, MessageArgs, CompiledInstruction, ConfirmedTransaction } from "@solana/web3.js"; import { BlockTransaction, Transformer } from "./Transformer"; import { BinaryReader, deserializeUnchecked, baseDecode } from "borsh"; import BN from "bn.js"; @@ -70,7 +70,7 @@ export default class AnchorProgramTransformer extends InstructionTransformer { }, {} as Record) } - transformInstruction(accountKeys: PublicKey[], transaction: BlockTransaction, instruction: CompiledInstruction): any[] { + transformInstruction(accountKeys: PublicKey[], transaction: ConfirmedTransaction, instruction: CompiledInstruction | CompiledInstruction): any[] { const programId = accountKeys[instruction.programIdIndex].toBase58(); const ixData = bs58.decode(instruction.data); let codedInstruction = this.coder.instruction.decode(ixData); diff --git a/src/event-transformer/transformers/programSpec.ts b/src/event-transformer/transformers/programSpec.ts index b7eb3ed..3b197bc 100644 --- a/src/event-transformer/transformers/programSpec.ts +++ b/src/event-transformer/transformers/programSpec.ts @@ -1,4 +1,4 @@ -import { CompiledInstruction, PublicKey, TokenBalance } from "@solana/web3.js"; +import { CompiledInstruction, ConfirmedTransaction, PublicKey, TokenBalance } from "@solana/web3.js"; import { BlockTransaction, Transformer } from "./Transformer"; import { BinaryReader, deserializeUnchecked, baseDecode } from "borsh"; import BN from "bn.js"; @@ -60,7 +60,7 @@ export default class ProgramSpecTransformer extends InstructionTransformer { return pids; } - transformInstruction(accountKeys: PublicKey[], transaction: BlockTransaction, instruction: CompiledInstruction): any[] { + transformInstruction(accountKeys: PublicKey[], transaction: ConfirmedTransaction, instruction: CompiledInstruction): any[] { const index = instruction.data.length == 0 ? 0 : new BinaryReader(baseDecode(instruction.data)).readU8(); const programId = accountKeys[instruction.programIdIndex].toBase58() const command = this.programIdAndIndexToCommand.get(programId)?.get(index) diff --git a/src/event-transformer/transformers/tokenAccounts.ts b/src/event-transformer/transformers/tokenAccounts.ts index fe3a8f2..38a7db1 100644 --- a/src/event-transformer/transformers/tokenAccounts.ts +++ b/src/event-transformer/transformers/tokenAccounts.ts @@ -1,4 +1,4 @@ -import { PublicKey, TokenBalance } from "@solana/web3.js"; +import { ConfirmedTransaction, PublicKey, TokenBalance } from "@solana/web3.js"; import { BlockTransaction, Transformer } from "./Transformer"; import BN from "bn.js"; @@ -21,7 +21,7 @@ export default class TokenAccountTransformer implements Transformer { return new Set(["TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"]) } - transform(accountKeys: PublicKey[], transaction: BlockTransaction): any[] { + transform(accountKeys: PublicKey[], transaction: ConfirmedTransaction): any[] { function toPubkeyAmount({ accountIndex, mint, uiTokenAmount: { decimals, amount } }: TokenBalance) { return { mint, diff --git a/src/kafka-s3-block-uploader.ts b/src/kafka-s3-block-uploader.ts deleted file mode 100644 index 31023a1..0000000 --- a/src/kafka-s3-block-uploader.ts +++ /dev/null @@ -1,172 +0,0 @@ -import { Finality } from "@solana/web3.js"; -import { Message as KafkaMessage } from "kafkajs"; -import { kafka } from "./setup/kafka"; -import { s3 } from "./setup/s3"; -import { connection } from "./setup/solana"; -import { v4 as uuidv4 } from "uuid"; - -const START_SLOT = process.env["START_SLOT"]; -const FINALITY: Finality = (process.env["FINALITY"] || 'finalized') as Finality; -const S3_BUCKET = process.env["S3_BUCKET"]! -const S3_PREFIX = process.env["S3_PREFIX"]! -const ACCOUNTS = new Set(process.env["ACCOUNTS"]!.split(",")) -const { KAFKA_TOPIC, KAFKA_INPUT_TOPIC, KAFKA_GROUP_ID } = process.env - -const producer = kafka.producer() - -/** - * Format bytes as human-readable text. - * - * @param bytes Number of bytes. - * @param si True to use metric (SI) units, aka powers of 1000. False to use - * binary (IEC), aka powers of 1024. - * @param dp Number of decimal places to display. - * - * @return Formatted string. - */ - function humanFileSize(bytes: number, si=false, dp=1) { - const thresh = si ? 1000 : 1024; - - if (Math.abs(bytes) < thresh) { - return bytes + ' B'; - } - - const units = si - ? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'] - : ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']; - let u = -1; - const r = 10**dp; - - do { - bytes /= thresh; - ++u; - } while (Math.round(Math.abs(bytes) * r) / r >= thresh && u < units.length - 1); - - - return bytes.toFixed(dp) + ' ' + units[u]; -} - -async function processSlot(slot: number) { - let outputMsg: KafkaMessage; - try { - const block = await connection.getBlock(slot, { commitment: FINALITY }); - const fileInfo = { - Bucket: S3_BUCKET, - Key: `${S3_PREFIX}/${slot.toString().padStart(16, '0')}.json.gz`, - } - const filteredBlock = { - ...block, - slot, - transactions: block?.transactions.filter(transaction => transaction.transaction.message.accountKeys.some(key => ACCOUNTS.has(key.toBase58()))) - } - - if ((filteredBlock.transactions?.length || 0) > 0) { - await s3.putObject({ - ...fileInfo, - Body: JSON.stringify(filteredBlock), - ContentType: 'application/json; charset=utf-8', - ContentEncoding: 'gzip' - }).promise() - - const contentLength = (await s3.headObject(fileInfo).promise()).ContentLength; - const { parentSlot, blockTime, previousBlockhash, blockhash } = block || {}; - - const result = { - parentSlot, - blockTime, - previousBlockhash, - blockhash, - slot, - contentLength, - ...fileInfo - } - console.log(`Processed slot ${slot}, size: ${humanFileSize(contentLength || 0)}`) - - outputMsg = { - key: slot.toString(), - value: JSON.stringify(result), - timestamp: ((block?.blockTime || 0) * 1000).toString() - }; - } else { - console.log(`No transactions in slot ${slot}`) - outputMsg = { - key: slot.toString(), - value: JSON.stringify({ - slot, - skipped: true - }), - timestamp: ((block?.blockTime || 0) * 1000).toString() - }; - } - } catch (e) { - console.error(e); - outputMsg = { - key: slot.toString(), - value: JSON.stringify({ - slot, - skipped: true, - error: e.message, - }) - }; - } - - await producer.send({ - topic: KAFKA_TOPIC!, - messages: [outputMsg] - }) -} - -type PromFunc = () => Promise; -async function linearPromiseAll(funcs: PromFunc[]): Promise { - const results = []; - for(let func of funcs) { - results.push(await func()); - } - - return results; -} - -async function run() { - const consumer = kafka.consumer({ - groupId: KAFKA_GROUP_ID!, - maxBytes: 200 - }); - const admin = kafka.admin(); - await admin.connect(); - // Force failure if topic doesn't exist - await admin.fetchTopicMetadata({ topics: [KAFKA_INPUT_TOPIC!] }) - await admin.disconnect(); - - await producer.connect(); - await consumer.connect(); - await consumer.subscribe({ - topic: KAFKA_INPUT_TOPIC!, - fromBeginning: process.env["KAFKA_OFFSET_RESET"] === "earliest" - }); - - return new Promise((resolve, reject) => { - consumer.run({ - eachBatchAutoResolve: false, - autoCommitThreshold: 1, - eachBatch: async ({ batch: { messages }, resolveOffset, heartbeat }) => { - try { - await linearPromiseAll( - messages.map(({ value, offset }) => ({ ...JSON.parse(value!.toString()), offset })).map(({ slot, offset }) => async () => { - await processSlot(slot) - await resolveOffset(offset) - await heartbeat() - }) - ) - } catch (e) { - reject(e); - } - } - }) - }); -} - -run().catch(e => { - console.error(e); - console.error(e.stack); - process.exit(1); -}) \ No newline at end of file diff --git a/src/kafka-s3-slot-identifier.ts b/src/kafka-s3-slot-identifier.ts deleted file mode 100644 index 6945181..0000000 --- a/src/kafka-s3-slot-identifier.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { Finality } from "@solana/web3.js"; -import { kafka } from "./setup/kafka"; -import { connection } from "./setup/solana"; -import { v4 as uuidv4 } from "uuid"; - -const START_SLOT = process.env["START_SLOT"]; -const FINALITY: Finality = (process.env["FINALITY"] || 'finalized') as Finality; -const { KAFKA_TOPIC } = process.env - -const producer = kafka.producer() - -async function getKafkaSlot(): Promise { - console.log("Searching for last max block...") - const consumer = kafka.consumer({ - groupId: `kafka-s3-block-identifier-${uuidv4()}` - }); - - await consumer.connect(); - await consumer.subscribe({ - topic: KAFKA_TOPIC!, - fromBeginning: false - }); - const admin = kafka.admin(); - await admin.connect(); - - let maxSlot: number | null = null - consumer.run({ - eachBatchAutoResolve: false, - eachBatch: async ({ batch: { messages } }) => { - messages.forEach(message => { - const msgSlot = JSON.parse(message.value!.toString()).slot - if (msgSlot > (maxSlot || 0)) { - maxSlot = msgSlot - } - }) - } - }) - - const offsets = await admin.fetchTopicOffsets(KAFKA_TOPIC!) - await Promise.all( - offsets.map(async offset => { - await consumer.seek({ - topic: KAFKA_TOPIC!, - partition: offset.partition, - offset: (Number(offset.high) - 1).toString() - }) - }) - ); - - return new Promise((resolve, reject) => { - setTimeout(async () => { - try { - await admin.disconnect(); - await consumer.disconnect() - } catch (e) { - reject(e) - } - console.log(`Found slot ${maxSlot} in kafka`) - resolve(maxSlot ? maxSlot + 1 : maxSlot) - }, 10 * 1000) - }) -} - -function sleep(ms: number) { - return new Promise((resolve) => { - setTimeout(() => resolve(null), ms); - }) -} - -async function run() { - const admin = kafka.admin(); - await admin.connect() - try { - await admin.fetchTopicMetadata({ topics: [KAFKA_TOPIC!] }) - } catch (e) { - const created = await admin.createTopics({ - waitForLeaders: true, - topics: [{ - topic: KAFKA_TOPIC!, - numPartitions: process.env.NUM_PARTITIONS ? Number(process.env.NUM_PARTITIONS) : 6 - }] - }) - if (!created) { - console.log("Could not create topic", KAFKA_TOPIC); - throw new Error("Failed to create topic") - } else { - console.log("Created topic", KAFKA_TOPIC); - } - } - await admin.disconnect() - - const startSlot = (START_SLOT && Number(START_SLOT)) || await getKafkaSlot() || await connection.getSlot(FINALITY); - await producer.connect(); - let currentSlot = startSlot; - let maxSlot = await connection.getSlot(FINALITY); - - let msgs = []; - let batchSize = 100; - while (true) { - if (currentSlot <= maxSlot) { - msgs.push({ - topic: KAFKA_TOPIC!, - messages: [{ - key: `${currentSlot}`, - value: JSON.stringify({ - slot: currentSlot - }) - }] - }) - currentSlot += 1 - } - if (currentSlot > maxSlot || msgs.length > batchSize) { - await producer.sendBatch({ - acks: 1, - topicMessages: msgs - }); - msgs = []; - console.log("Caught up, fetching current slot") - maxSlot = await connection.getSlot(FINALITY) - await sleep(2000) // If you set this too low, get too many requests - } - } -} - -run().catch(e => { - console.error(e); - console.error(e.stack); - process.exit(1); -}) \ No newline at end of file diff --git a/src/kafka-signature-collector.ts b/src/kafka-signature-collector.ts new file mode 100644 index 0000000..0b85575 --- /dev/null +++ b/src/kafka-signature-collector.ts @@ -0,0 +1,108 @@ +// Gets the signatures and filters for unique sigs, combining them into one topic + +import { ConfirmedSignatureInfo, Finality } from "@solana/web3.js"; +import { kafka } from "./setup/kafka"; + +const { KAFKA_TOPIC, KAFKA_INPUT_TOPIC, KAFKA_GROUP_ID } = process.env + +const producer = kafka.producer() + +class LRU { + max: number; + cache: Map; + + constructor(max = 10) { + this.max = max; + this.cache = new Map(); + } + + get(key: A): B | undefined { + let item = this.cache.get(key); + if (item) { + // refresh key + this.cache.delete(key); + this.cache.set(key, item); + } + return item; + } + + set(key: A, val: B) { + // refresh key + if (this.cache.has(key)) this.cache.delete(key); + // evict oldest + else if (this.cache.size == this.max) this.cache.delete(this.first()); + this.cache.set(key, val); + } + + first() { + return this.cache.keys().next().value; + } +} + +async function run() { + const admin = kafka.admin(); + await admin.connect() + try { + await admin.fetchTopicMetadata({ topics: [KAFKA_TOPIC!] }) + } catch (e) { + const created = await admin.createTopics({ + waitForLeaders: true, + topics: [{ + topic: KAFKA_TOPIC!, + numPartitions: process.env.NUM_PARTITIONS ? Number(process.env.NUM_PARTITIONS) : 6 + }] + }) + if (!created) { + console.log("Could not create topic", KAFKA_TOPIC); + throw new Error("Failed to create topic") + } else { + console.log("Created topic", KAFKA_TOPIC); + } + } + await admin.disconnect() + + const consumer = kafka.consumer({ + groupId: KAFKA_GROUP_ID!, + maxBytes: process.env.MAX_BYTES ? Number(process.env.MAX_BYTES) : undefined + }); + + await producer.connect(); + await consumer.connect(); + await consumer.subscribe({ + topic: new RegExp(KAFKA_INPUT_TOPIC!), + fromBeginning: process.env["KAFKA_OFFSET_RESET"] === "earliest", + }); + + const lru = new LRU(process.env.LRU_MAX ? Number(process.env.LRU_MAX) : 100000); + + return new Promise((resolve, reject) => { + consumer.run({ + eachBatchAutoResolve: true, + autoCommitThreshold: process.env.AUTO_COMMIT_THRESHOLD ? Number(process.env.AUTO_COMMIT_THRESHOLD) : 20, + eachBatch: async ({ batch: { messages } }) => { + try { + const sendable = messages.filter(m => + !lru.get(m.key) + ).map(m => { + lru.set(m.key, m.key); + return m; + }); + if (sendable.length > 0) { + await producer.send({ + topic: KAFKA_TOPIC!, + messages: sendable + }) + } + } catch (e) { + reject(e); + } + } + }) + }); +} + +run().catch(e => { + console.error(e); + console.error(e.stack); + process.exit(1); +}) \ No newline at end of file diff --git a/src/kafka-signature-identifier.ts b/src/kafka-signature-identifier.ts new file mode 100644 index 0000000..0e0e16e --- /dev/null +++ b/src/kafka-signature-identifier.ts @@ -0,0 +1,151 @@ +import { ConfirmedSignatureInfo, Connection, Finality, PublicKey } from "@solana/web3.js"; +import { kafka } from "./setup/kafka"; +import { connection } from "./setup/solana"; +import { v4 as uuidv4 } from "uuid"; + +const START_SIGNATURE = process.env["START_SIGNATURE"]; +const FINALITY: Finality = (process.env["FINALITY"] || 'finalized') as Finality; +const { KAFKA_TOPIC } = process.env +const ADDRESS = new PublicKey(process.env["ADDRESS"]!); +const SLEEP_TIME = process.env.SLEEP_TIME ? Number(process.env.SLEEP_TIME) : 2000; + +const producer = kafka.producer() + +async function getKafkaSignature(): Promise { + console.log("Searching for last max block...") + const consumer = kafka.consumer({ + groupId: `kafka-s3-signature-identifier-${uuidv4()}`, + maxBytes: 20 + }); + + await consumer.connect(); + await consumer.subscribe({ + topic: KAFKA_TOPIC!, + fromBeginning: false + }); + const admin = kafka.admin(); + await admin.connect(); + + let signature: string | null = null + consumer.run({ + eachBatchAutoResolve: false, + eachBatch: async ({ batch: { messages } }) => { + const message = messages[0]; + if (message) { + signature = JSON.parse(message.value!.toString()).signature + } + } + }) + + const offsets = await admin.fetchTopicOffsets(KAFKA_TOPIC!) + await Promise.all( + offsets.map(async offset => { + await consumer.seek({ + topic: KAFKA_TOPIC!, + partition: offset.partition, + offset: (Number(offset.high) - 1).toString() + }) + }) + ); + + return new Promise((resolve, reject) => { + setTimeout(async () => { + try { + await admin.disconnect(); + await consumer.disconnect() + } catch (e) { + reject(e) + } + console.log(`Found signature ${signature} in kafka`) + resolve(signature) + }, 10 * 1000) + }) +} + +function sleep(ms: number) { + return new Promise((resolve) => { + setTimeout(() => resolve(null), ms); + }) +} + +async function getAllSignatures(connection: Connection, until?: string, before?: string): Promise { + const signatures = await connection.getConfirmedSignaturesForAddress2( + ADDRESS, + { + until, + before + }, + FINALITY + ); + + if (signatures.length === 1000) { + return [...signatures, ...await getAllSignatures(connection, until, signatures[signatures.length - 1].signature)] + } + + return signatures; +} + +async function run() { + const admin = kafka.admin(); + await admin.connect() + try { + await admin.fetchTopicMetadata({ topics: [KAFKA_TOPIC!] }) + } catch (e) { + const created = await admin.createTopics({ + waitForLeaders: true, + topics: [{ + topic: KAFKA_TOPIC!, + numPartitions: process.env.NUM_PARTITIONS ? Number(process.env.NUM_PARTITIONS) : 1 + }] + }) + if (!created) { + console.log("Could not create topic", KAFKA_TOPIC); + throw new Error("Failed to create topic") + } else { + console.log("Created topic", KAFKA_TOPIC); + } + } + await admin.disconnect() + + const startSignature = START_SIGNATURE || await getKafkaSignature() || await getSolanaRecentSignature() + + await producer.connect(); + let currentSignature = startSignature; + while (true) { + const signatures = await getAllSignatures(connection, currentSignature || undefined) + if (signatures.length > 0) { + console.log(`Fetched ${signatures.length} signatures from ${currentSignature}`); + currentSignature = signatures[0].signature + await producer.sendBatch({ + acks: 1, + topicMessages: [{ + topic: KAFKA_TOPIC!, + messages: signatures.map(sig => ({ + value: JSON.stringify(sig), + key: sig.signature.toString(), + timestamp: ((sig.blockTime || 0) * 1000).toString() + })) + }] + }); + } else { + await sleep(SLEEP_TIME); + } + } +} + +run().catch(e => { + console.error(e); + console.error(e.stack); + process.exit(1); +}) + +async function getSolanaRecentSignature(): Promise { + const curr = (await connection.getSignaturesForAddress(ADDRESS, { + limit: 1 + }, FINALITY)) + if (curr.length > 0) { + return curr[0].signature; + } + + return null; +} diff --git a/src/kafka-signature-processor.ts b/src/kafka-signature-processor.ts new file mode 100644 index 0000000..edf1cf6 --- /dev/null +++ b/src/kafka-signature-processor.ts @@ -0,0 +1,93 @@ +import { ConfirmedSignatureInfo, Finality } from "@solana/web3.js"; +import { kafka } from "./setup/kafka"; +import { connection } from "./setup/solana"; + +const FINALITY: Finality = (process.env["FINALITY"] || 'finalized') as Finality; +const { KAFKA_TOPIC, KAFKA_INPUT_TOPIC, KAFKA_GROUP_ID } = process.env + +const producer = kafka.producer() + + +async function processSignature(signature: ConfirmedSignatureInfo) { + const txn = await connection.getConfirmedTransaction(signature.signature, FINALITY); + const outputMsg = { + key: signature.signature.toString(), + value: JSON.stringify({ + ...txn, + transaction: txn?.transaction.serialize({ + requireAllSignatures: false, + verifySignatures: false + }).toJSON().data + }), + timestamp: ((signature?.blockTime || 0) * 1000).toString() + }; + + await producer.send({ + topic: KAFKA_TOPIC!, + messages: [outputMsg] + }) +} + +function groupByN(n: number, data: T[]): T[][] { + let result = []; + for (let i = 0; i < data.length; i += n) result.push(data.slice(i, i + n)); + return result; +}; + +type PromFunc = () => Promise; +async function promiseAllGrouped(size: number, funcs: PromFunc[]): Promise { + const results: A[] = []; + const grouped = groupByN(size, funcs); + for(let funcs of grouped) { + await Promise.all(funcs.map(async func => { + results.push(await func()); + })) + } + + return results; +} + +async function run() { + const consumer = kafka.consumer({ + groupId: KAFKA_GROUP_ID!, + maxBytes: process.env.MAX_BYTES ? Number(process.env.MAX_BYTES) : undefined + }); + const admin = kafka.admin(); + await admin.connect(); + // Force failure if topic doesn't exist + await admin.fetchTopicMetadata({ topics: [KAFKA_INPUT_TOPIC!] }) + await admin.disconnect(); + + await producer.connect(); + await consumer.connect(); + await consumer.subscribe({ + topic: KAFKA_INPUT_TOPIC!, + fromBeginning: process.env["KAFKA_OFFSET_RESET"] === "earliest", + }); + + return new Promise((resolve, reject) => { + consumer.run({ + eachBatchAutoResolve: true, + autoCommitThreshold: process.env.AUTO_COMMIT_THRESHOLD ? Number(process.env.AUTO_COMMIT_THRESHOLD) : 20, + eachBatch: async ({ batch: { messages }, heartbeat, commitOffsetsIfNecessary }) => { + try { + await promiseAllGrouped( + process.env.GROUP_SIZE ? Number(process.env.GROUP_SIZE) : 5, + messages.map(({ value, offset }) => ({ ...JSON.parse(value!.toString()), offset })).map((confirmedSignatureInfo) => async () => { + await processSignature(confirmedSignatureInfo) + await heartbeat() + }) + ) + } catch (e) { + reject(e); + } + } + }) + }); +} + +run().catch(e => { + console.error(e); + console.error(e.stack); + process.exit(1); +}) \ No newline at end of file diff --git a/src/setup/kafka.ts b/src/setup/kafka.ts index e328b81..9846404 100644 --- a/src/setup/kafka.ts +++ b/src/setup/kafka.ts @@ -6,7 +6,7 @@ const ssl = !!KAFKA_SSL_ENABLED // This creates a client instance that is configured to connect to the Kafka broker provided by // the environment variable KAFKA_BOOTSTRAP_SERVER export const kafka = new Kafka({ - clientId: "kafka-s3-block-uploader", + clientId: process.env.GROUP_ID || "no-client-id", brokers: process.env.KAFKA_BOOTSTRAP_SERVERS!.split(","), ssl, sasl diff --git a/strata-compose/docker-compose.yml b/strata-compose/docker-compose.yml index 9dd7173..27a030b 100644 --- a/strata-compose/docker-compose.yml +++ b/strata-compose/docker-compose.yml @@ -1,5 +1,30 @@ version: '2' services: + collective-signature-identifier: + extra_hosts: + - "host.docker.internal:host-gateway" + restart: always + image: data-pipelines:latest + command: node dist/lib/kafka-signature-identifier.js + environment: + SOLANA_URL: "http://host.docker.internal:8899" + KAFKA_BOOTSTRAP_SERVERS: "host.docker.internal:39092" + KAFKA_TOPIC: json.solana.signatures.collective + ADDRESS: TCo1sfSr2nCudbeJPykbif64rG9K1JNMGzrtzvPmp3y + + bonding-signature-identifier: + extra_hosts: + - "host.docker.internal:host-gateway" + restart: always + image: data-pipelines:latest + command: node dist/lib/kafka-signature-identifier.js + environment: + SOLANA_URL: "http://host.docker.internal:8899" + KAFKA_BOOTSTRAP_SERVERS: "host.docker.internal:39092" + KAFKA_TOPIC: json.solana.signatures.bonding + ADDRESS: TBondmkCYxaPCKG4CHYfVTcwQ8on31xnJrPzk8F8WsS + + ksql-bootstrap-1: extra_hosts: - "host.docker.internal:host-gateway" From 6117dd5a11aafece1e21c7dd5a02e3e7ed383875 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Mon, 3 Jan 2022 18:40:39 -0700 Subject: [PATCH 02/14] Fixes --- src/event-transformer/index.ts | 1 + .../transformers/programSpec.ts | 47 ++++++++++--------- src/kafka-signature-collector.ts | 3 +- 3 files changed, 29 insertions(+), 22 deletions(-) diff --git a/src/event-transformer/index.ts b/src/event-transformer/index.ts index 59528b9..4de9b0c 100644 --- a/src/event-transformer/index.ts +++ b/src/event-transformer/index.ts @@ -122,6 +122,7 @@ async function run() { const results = (await Promise.all( messages .map((message: any) => JSON.parse(message.value!.toString())) + .filter(txn => txn.transaction) .map(txn => ({ ...txn, transaction: Transaction.from(txn.transaction) diff --git a/src/event-transformer/transformers/programSpec.ts b/src/event-transformer/transformers/programSpec.ts index 3b197bc..493f86a 100644 --- a/src/event-transformer/transformers/programSpec.ts +++ b/src/event-transformer/transformers/programSpec.ts @@ -61,27 +61,32 @@ export default class ProgramSpecTransformer extends InstructionTransformer { } transformInstruction(accountKeys: PublicKey[], transaction: ConfirmedTransaction, instruction: CompiledInstruction): any[] { - const index = instruction.data.length == 0 ? 0 : new BinaryReader(baseDecode(instruction.data)).readU8(); - const programId = accountKeys[instruction.programIdIndex].toBase58() - const command = this.programIdAndIndexToCommand.get(programId)?.get(index) - const schema = this.programIdToSchema.get(programId); - if (command) { - const accounts = instruction.accounts.reduce((acc, account, index) => { - const instrAccount = command.accounts[index] - if (instrAccount) { - acc.set(instrAccount, accountKeys[account].toBase58()); - } - - return acc; - }, new Map()); - - const args = command.args && schema && deserializeUnchecked(schema, command.args, baseDecode(instruction.data)); - return [{ - type: command.name, - programId, - accounts: Object.fromEntries(accounts), - data: (command.args && Object.fromEntries(transformBN(args))) - }] + try { + const index = instruction.data.length == 0 ? 0 : new BinaryReader(baseDecode(instruction.data)).readU8(); + const programId = accountKeys[instruction.programIdIndex].toBase58() + const command = this.programIdAndIndexToCommand.get(programId)?.get(index) + const schema = this.programIdToSchema.get(programId); + if (command) { + const accounts = instruction.accounts.reduce((acc, account, index) => { + const instrAccount = command.accounts[index] + if (instrAccount) { + acc.set(instrAccount, accountKeys[account].toBase58()); + } + + return acc; + }, new Map()); + + const args = command.args && schema && deserializeUnchecked(schema, command.args, baseDecode(instruction.data)); + return [{ + type: command.name, + programId, + accounts: Object.fromEntries(accounts), + data: (command.args && Object.fromEntries(transformBN(args))) + }] + } + } catch (e: any) { + console.log(`Failed to process ${transaction.transaction.signature}`); + console.error(e); } return []; diff --git a/src/kafka-signature-collector.ts b/src/kafka-signature-collector.ts index 0b85575..831977e 100644 --- a/src/kafka-signature-collector.ts +++ b/src/kafka-signature-collector.ts @@ -49,7 +49,8 @@ async function run() { waitForLeaders: true, topics: [{ topic: KAFKA_TOPIC!, - numPartitions: process.env.NUM_PARTITIONS ? Number(process.env.NUM_PARTITIONS) : 6 + numPartitions: process.env.NUM_PARTITIONS ? Number(process.env.NUM_PARTITIONS) : 6, + replicationFactor: process.env.REPLICATION ? Number(process.env.REPLICATION) : 1 }] }) if (!created) { From 3916ba057db3d1c8423820f4f7051db95e1cd782 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Sun, 9 Jan 2022 16:48:44 -0600 Subject: [PATCH 03/14] Add name events and es --- docker-compose.yml | 46 +++++++++++++++++++++++++++++++++++++++ ksql/0001-events.sql | 51 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 97 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 667425b..6577b98 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -184,3 +184,49 @@ services: KAFKA_OUTPUT_TOPIC: "json.solana.events" KAFKA_OFFSET_RESET: "earliest" KAFKA_GROUP_ID: "solana-event-transformer" + + elasticsearch: + image: elasticsearch:7.16.2 + ports: + - 29200:9200 + environment: + xpack.security.enabled: "false" + ES_JAVA_OPTS: "-Xms1g -Xmx1g" + discovery.type: "single-node" + + kibana: + image: docker.elastic.co/kibana/kibana:7.16.2 + ports: + - 5601:5601 + environment: + ELASTICSEARCH_URL: http://elasticsearch:9200 + ELASTICSEARCH_HOSTS: '["http://elasticsearch:9200"]' + + kafka-connect: + image: confluentinc/cp-kafka-connect:5.4.6 + depends_on: + - zookeeper + - kafka + ports: + - 8085:8085 + environment: + CONNECT_BOOTSTRAP_SERVERS: "kafka:9092" + CONNECT_REST_PORT: 8085 + CONNECT_GROUP_ID: kafka-connect + CONNECT_CONFIG_STORAGE_TOPIC: _kafka-connect-configs + CONNECT_OFFSET_STORAGE_TOPIC: _kafka-connect-offsets + CONNECT_STATUS_STORAGE_TOPIC: _kafka-connect-status + CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_KEY_CONVERTER_SCHEMAS_ENABLE: "false" + CONNECT_VALUE_CONVERTER_SCHEMAS_ENABLE: "false" + CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect-01" + CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO" + CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR" + CONNECT_LOG4J_APPENDER_STDOUT_LAYOUT_CONVERSIONPATTERN: "[%d] %p %X{connector.context}%m (%c:%L)%n" + CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_PLUGIN_PATH: /usr/share/java,/usr/share/confluent-hub-components \ No newline at end of file diff --git a/ksql/0001-events.sql b/ksql/0001-events.sql index 01153e7..af3ec10 100644 --- a/ksql/0001-events.sql +++ b/ksql/0001-events.sql @@ -40,6 +40,19 @@ FROM solana_events WHERE EXTRACTJSONFIELD("payload", '$.programId') = 'metaqbxxUerdq28cj1RbAWkYQm3ybzjb6a8bt518x1s' EMIT CHANGES; +CREATE OR REPLACE STREAM spl_name_service_events +WITH (kafka_topic='json.solana.spl_name_service_events', value_format='json', partitions=1, replicas=1) +AS SELECT + "slot" AS "slot", + "blockhash" AS "blockhash", + "recentBlockhash" AS "recentBlockhash", + "blockTime" AS "blockTime", + "payload" AS "payload", + "type" as "type" +FROM solana_events +WHERE EXTRACTJSONFIELD("payload", '$.programId') = 'namesLPneVptA9Z5rqUDD9tMTWEJwofgaYwp8cawRkX' +EMIT CHANGES; + CREATE OR REPLACE STREAM create_metadata_events WITH (kafka_topic='json.solana.create_metadata_events', value_format='json', partitions=1, replicas=1) AS SELECT @@ -216,3 +229,41 @@ AS SELECT FROM solana_events WHERE "type" = 'InitializeTokenAccount' EMIT CHANGES; + +CREATE OR REPLACE STREAM create_name_events +WITH (kafka_topic='json.solana.create_name_events', value_format='json', partitions=1, replicas=1) +AS SELECT + "slot" AS "slot", + "blockhash" AS "blockhash", + "recentBlockhash" AS "recentBlockhash", + "blockTime" AS "blockTime", + EXTRACTJSONFIELD("payload", '$.instructionIndex') AS "instructionIndex", + EXTRACTJSONFIELD("payload", '$.innerIndex') AS "innerIndex", + EXTRACTJSONFIELD("payload", '$.data.hashedName') AS "hashedName", + EXTRACTJSONFIELD("payload", '$.data.lamports') AS "lamports", + EXTRACTJSONFIELD("payload", '$.data.space') AS "space", + EXTRACTJSONFIELD("payload", '$.accounts.name') AS "name", + EXTRACTJSONFIELD("payload", '$.accounts.owner') AS "owner", + EXTRACTJSONFIELD("payload", '$.accounts.class') AS "class", + EXTRACTJSONFIELD("payload", '$.accounts.parent') AS "parent", + EXTRACTJSONFIELD("payload", '$.accounts.parentOwner') AS "parentOwner" +FROM spl_name_service_events +WHERE "type" = 'CreateNameServiceName' +EMIT CHANGES; + +CREATE OR REPLACE STREAM update_name_events +WITH (kafka_topic='json.solana.update_name_events', value_format='json', partitions=1, replicas=1) +AS SELECT + "slot" AS "slot", + "blockhash" AS "blockhash", + "recentBlockhash" AS "recentBlockhash", + "blockTime" AS "blockTime", + EXTRACTJSONFIELD("payload", '$.instructionIndex') AS "instructionIndex", + EXTRACTJSONFIELD("payload", '$.innerIndex') AS "innerIndex", + EXTRACTJSONFIELD("payload", '$.data.offset') AS "offset", + EXTRACTJSONFIELD("payload", '$.data.data') AS "data", + EXTRACTJSONFIELD("payload", '$.accounts.name') AS "name", + EXTRACTJSONFIELD("payload", '$.accounts.ownerOrClass') AS "ownerOrClass" +FROM spl_name_service_events +WHERE "type" = 'UpdateNameServiceName' +EMIT CHANGES; \ No newline at end of file From f9e2c11745db6d2a459455cd2eedec077727378d Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Mon, 10 Jan 2022 18:42:27 -0600 Subject: [PATCH 04/14] Implement opt out message bot --- .vscode/launch.json | 68 +++++- package.json | 2 + src/kafka-signature-collector.ts | 33 +-- src/kafka-signature-identifier.ts | 2 +- src/twitter-bot/messages-retriever/index.ts | 147 ++++++++++++ src/twitter-bot/opt-out-response/index.ts | 73 ++++++ src/twitter-bot/opt-out/index.ts | 174 ++++++++++++++ .../unclaimed-create-bot}/index.ts | 4 +- src/utils/group.ts | 18 ++ src/utils/lru.ts | 31 +++ yarn.lock | 218 +++++++++++++++++- 11 files changed, 727 insertions(+), 43 deletions(-) create mode 100644 src/twitter-bot/messages-retriever/index.ts create mode 100644 src/twitter-bot/opt-out-response/index.ts create mode 100644 src/twitter-bot/opt-out/index.ts rename src/{twitter-unclaimed-create-bot => twitter-bot/unclaimed-create-bot}/index.ts (94%) create mode 100644 src/utils/group.ts create mode 100644 src/utils/lru.ts diff --git a/.vscode/launch.json b/.vscode/launch.json index ce1e9a5..95e844f 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -259,7 +259,7 @@ "skipFiles": [ "/**" ], - "program": "${workspaceFolder}/src/twitter-unclaimed-create-bot/index.ts", + "program": "${workspaceFolder}/src/twitter-bot/unclaimed-create-bot/index.ts", "preLaunchTask": "tsc: build - tsconfig.json", "outFiles": [ "${workspaceFolder}/dist/lib/**/*.js" @@ -276,6 +276,72 @@ "COLLECTIVE": "", "MESSAGE": "a social token was created for you on Wum.bo. You can now monetize your social media efforts! For more info and to claim your token, visit wum.bo/claim! You can also simply ignore this message. To shut off the token, message us OPT OUT" } + }, { + "type": "pwa-node", + "request": "launch", + "name": "Twitter Messages", + "skipFiles": [ + "/**" + ], + "program": "${workspaceFolder}/src/twitter-bot/messages-retriever/index.ts", + "preLaunchTask": "tsc: build - tsconfig.json", + "outFiles": [ + "${workspaceFolder}/dist/lib/**/*.js" + ], + "env": { + "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", + "KAFKA_TOPIC": "json.solana.twitter_bot_messages", + "TWITTER_CONSUMER_KEY": "", + "TWITTER_CONSUMER_SECRET": "", + "TWITTER_ACCESS_TOKEN_KEY": "", + "TWITTER_ACCESS_TOKEN_SECRET": "", + } + }, { + "type": "pwa-node", + "request": "launch", + "name": "Opt Out", + "skipFiles": [ + "/**" + ], + "program": "${workspaceFolder}/src/twitter-bot/opt-out/index.ts", + "preLaunchTask": "tsc: build - tsconfig.json", + "outFiles": [ + "${workspaceFolder}/dist/lib/**/*.js" + ], + "env": { + "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", + "KAFKA_TOPIC": "json.solana.opt_outs", + "KAFKA_INPUT_TOPIC": "json.solana.twitter_bot_messages", + "KAFKA_GROUP_ID": "opt-out-5", + "KAFKA_OFFSET_RESET": "earliest", + "SOLANA_URL": "https://api.devnet.solana.com", + "TWITTER_TLD": "EEbZHaBD4mreYS6enRqytXvXfmRESLWXXrXbtZLWyd6X", + "TWITTER_SERVICE_ACCOUNT": "[65,132,47,88,190,203,121,144,128,74,168,72,223,142,99,217,37,69,160,251,149,35,244,207,84,215,60,50,97,177,113,194,233,135,171,110,133,84,123,5,221,78,104,240,67,217,2,28,6,229,231,56,141,138,249,55,23,239,192,197,165,117,249,85]", + "PAYER_SERVICE_ACCOUNT": "[65,132,47,88,190,203,121,144,128,74,168,72,223,142,99,217,37,69,160,251,149,35,244,207,84,215,60,50,97,177,113,194,233,135,171,110,133,84,123,5,221,78,104,240,67,217,2,28,6,229,231,56,141,138,249,55,23,239,192,197,165,117,249,85]", + "INCORRECT_MESSAGE_RESPONSE": "I'm sorry, I didn't understand that message. You can say \"OPT OUT\" to opt out." + } + }, { + "type": "pwa-node", + "request": "launch", + "name": "Opt Out Response", + "skipFiles": [ + "/**" + ], + "program": "${workspaceFolder}/src/twitter-bot/opt-out-response/index.ts", + "preLaunchTask": "tsc: build - tsconfig.json", + "outFiles": [ + "${workspaceFolder}/dist/lib/**/*.js" + ], + "env": { + "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", + "KAFKA_INPUT_TOPIC": "json.solana.opt_outs", + "KAFKA_GROUP_ID": "opt-out-respond", + "KAFKA_OFFSET_RESET": "earliest", + "TWITTER_CONSUMER_KEY": "", + "TWITTER_CONSUMER_SECRET": "", + "TWITTER_ACCESS_TOKEN_KEY": "", + "TWITTER_ACCESS_TOKEN_SECRET": "", + } } ] } \ No newline at end of file diff --git a/package.json b/package.json index 2bffa62..8bfe1b7 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,9 @@ "dependencies": { "@project-serum/anchor": "^0.11.1", "@project-serum/common": "^0.0.1-beta.3", + "@solana/spl-name-service": "^0.1.3", "@solana/web3.js": "^1.22.0", + "@strata-foundation/spl-token-collective": "^2.0.6", "@strata-foundation/spl-utils": "^1.1.1", "@types/bs58": "^4.0.1", "@types/uuid": "^8.3.1", diff --git a/src/kafka-signature-collector.ts b/src/kafka-signature-collector.ts index 831977e..b132eaa 100644 --- a/src/kafka-signature-collector.ts +++ b/src/kafka-signature-collector.ts @@ -2,43 +2,12 @@ import { ConfirmedSignatureInfo, Finality } from "@solana/web3.js"; import { kafka } from "./setup/kafka"; +import { LRU } from "./utils/lru" const { KAFKA_TOPIC, KAFKA_INPUT_TOPIC, KAFKA_GROUP_ID } = process.env const producer = kafka.producer() -class LRU { - max: number; - cache: Map; - - constructor(max = 10) { - this.max = max; - this.cache = new Map(); - } - - get(key: A): B | undefined { - let item = this.cache.get(key); - if (item) { - // refresh key - this.cache.delete(key); - this.cache.set(key, item); - } - return item; - } - - set(key: A, val: B) { - // refresh key - if (this.cache.has(key)) this.cache.delete(key); - // evict oldest - else if (this.cache.size == this.max) this.cache.delete(this.first()); - this.cache.set(key, val); - } - - first() { - return this.cache.keys().next().value; - } -} - async function run() { const admin = kafka.admin(); await admin.connect() diff --git a/src/kafka-signature-identifier.ts b/src/kafka-signature-identifier.ts index 0e0e16e..d2a3a3d 100644 --- a/src/kafka-signature-identifier.ts +++ b/src/kafka-signature-identifier.ts @@ -58,7 +58,7 @@ async function getKafkaSignature(): Promise { } console.log(`Found signature ${signature} in kafka`) resolve(signature) - }, 10 * 1000) + }, 60 * 1000) }) } diff --git a/src/twitter-bot/messages-retriever/index.ts b/src/twitter-bot/messages-retriever/index.ts new file mode 100644 index 0000000..d22fee9 --- /dev/null +++ b/src/twitter-bot/messages-retriever/index.ts @@ -0,0 +1,147 @@ +import { LRU } from "../../utils/lru"; +import { kafka } from "../../setup/kafka"; +import { twitterClient } from "../../setup/twitter"; + +const { KAFKA_TOPIC, POLL_DURATION = 60 * 1000 } = process.env + +export async function sleep(ts: number) { + return new Promise((resolve) => { + setTimeout(resolve, ts); + }) +} + +const FETCH_SIZE = 20 +const lru = new LRU(FETCH_SIZE); + +async function populateLRU(): Promise { + console.log("Searching for last messages...") + const consumer = kafka.consumer({ + groupId: `twitter-messages-retriver` + }); + + await consumer.connect(); + await consumer.subscribe({ + topic: KAFKA_TOPIC!, + fromBeginning: false + }); + const admin = kafka.admin(); + await admin.connect(); + + consumer.run({ + eachBatchAutoResolve: false, + eachBatch: async ({ batch: { messages } }) => { + messages.forEach(message => { + const msg = JSON.parse(message.value!.toString()) + lru.set(msg.id, msg); + }) + } + }) + + const offsets = await admin.fetchTopicOffsets(KAFKA_TOPIC!) + await Promise.all( + offsets.map(async offset => { + await consumer.seek({ + topic: KAFKA_TOPIC!, + partition: offset.partition, + offset: Math.max(Number(offset.high) - (FETCH_SIZE + 5), 0).toString() + }) + }) + ); + + return new Promise((resolve, reject) => { + setTimeout(async () => { + try { + await admin.disconnect(); + await consumer.disconnect() + } catch (e) { + reject(e) + } + resolve() + }, 60 * 1000) + }) +} + + +async function getRecentMessages(cursor?: string): Promise { + const messages = await twitterClient.v1.get("/direct_messages/events/list.json", { + cursor + }) + const filtered = messages.events + .filter((message: any) => !lru.get(message.id)); + + filtered.forEach((m: any) => lru.set(m.id, m)); + + // If none of these messages were in our lru, fetch more + if (messages.next_cursor && filtered.length == messages.events.length) { + console.log(`More than ${FETCH_SIZE} new messages, fetching more...`) + await sleep(2000) + return [...filtered, ...await getRecentMessages(messages.next_cursor)] + } + + return filtered; +} + +const userInfoCache = new LRU(1000); + +async function getUserInfo(userId: string): Promise<{ handle: string, name: string }> { + if (userInfoCache.get(userId)) { + return userInfoCache.get(userId)!; + } + + const user = await twitterClient.v1.user({ + user_id: userId + }); + + const ret = { + handle: user.screen_name, + name: user.name, + } + userInfoCache.set(userId, ret); + + return ret; +} + +export type Truthy = T extends false | "" | 0 | null | undefined ? never : T; // from lodash + +export const truthy = (value: T): value is Truthy => !!value; + +async function run() { + const producer = kafka.producer() + await producer.connect(); + await populateLRU(); + + while (true) { + console.log("Fetching messages...") + const messages = await getRecentMessages(); + const messagesWithHandles = (await Promise.all(messages.map(async (message: any) => { + if (message.message_create) { + const userId = message.message_create.sender_id; + + return { + ...await getUserInfo(userId), + body: message.message_create.message_data.text, + id: message.id, + userId, + createdTimestamp: message.created_timestamp + } + } + }))).filter(truthy); + + await producer.send({ + topic: KAFKA_TOPIC!, + messages: messagesWithHandles.map(message => ({ + key: message.id, + value: JSON.stringify(message), + timestamp: message.createdTimestamp as string + })) + }) + await sleep(Number(POLL_DURATION)) + } + +} + +run().catch(e => { + console.error(e); + console.error(e.stack); + process.exit(1); +}) diff --git a/src/twitter-bot/opt-out-response/index.ts b/src/twitter-bot/opt-out-response/index.ts new file mode 100644 index 0000000..ebe431c --- /dev/null +++ b/src/twitter-bot/opt-out-response/index.ts @@ -0,0 +1,73 @@ +import { twitterClient } from "../../setup/twitter"; +import { promiseAllGrouped } from "../../utils/group"; +import { kafka } from "../../setup/kafka"; + +const { KAFKA_INPUT_TOPIC, KAFKA_GROUP_ID } = process.env + +async function run() { + const consumer = kafka.consumer({ + groupId: KAFKA_GROUP_ID!, + maxBytes: process.env.MAX_BYTES ? Number(process.env.MAX_BYTES) : undefined + }); + const admin = kafka.admin(); + await admin.connect(); + // Force failure if topic doesn't exist + await admin.fetchTopicMetadata({ topics: [KAFKA_INPUT_TOPIC!] }) + await admin.disconnect(); + + await consumer.connect(); + await consumer.subscribe({ + topic: KAFKA_INPUT_TOPIC!, + fromBeginning: process.env["KAFKA_OFFSET_RESET"] === "earliest", + }); + + return new Promise((resolve, reject) => { + consumer.run({ + eachBatchAutoResolve: true, + autoCommitThreshold: process.env.AUTO_COMMIT_THRESHOLD ? Number(process.env.AUTO_COMMIT_THRESHOLD) : 20, + eachBatch: async ({ batch: { messages }, heartbeat, commitOffsetsIfNecessary }) => { + try { + await promiseAllGrouped( + process.env.GROUP_SIZE ? Number(process.env.GROUP_SIZE) : 5, + messages.map(({ value, offset }) => ({ ...JSON.parse(value!.toString()), offset })).map((message) => async () => { + const userId = message.userId; + let text; + if (message.error) { + text = message.error; + } else if (message.optedOut) { + text = "Opt Out Successful! We're sorry to see you go." + } + if (userId) { + await twitterClient.v1.post("/direct_messages/events/new.json", { + event: { + type: "message_create", + message_create: { + target: { + recipient_id: userId + }, + message_data: { + text + } + } + } + }, { + forceBodyMode: "json" + }); + } + + await heartbeat(); + }) + ) + } catch (e) { + reject(e); + } + } + }) + }); +} + +run().catch(e => { + console.error(e); + console.error(e.stack); + process.exit(1); +}) \ No newline at end of file diff --git a/src/twitter-bot/opt-out/index.ts b/src/twitter-bot/opt-out/index.ts new file mode 100644 index 0000000..86f19b0 --- /dev/null +++ b/src/twitter-bot/opt-out/index.ts @@ -0,0 +1,174 @@ +import { Provider } from "@project-serum/anchor"; +import { getHashedName, getNameAccountKey, NameRegistryState } from "@solana/spl-name-service"; +import { ConfirmedSignatureInfo, Finality, Keypair, PublicKey } from "@solana/web3.js"; +import { SplTokenCollective } from "@strata-foundation/spl-token-collective"; +import { deserializeUnchecked } from "borsh"; +import { promiseAllGrouped } from "../../utils/group"; +import { kafka } from "../../setup/kafka"; +import { connection } from "../../setup/solana"; +import { Wallet as NodeWallet } from "@project-serum/anchor"; + +const { KAFKA_TOPIC, KAFKA_INPUT_TOPIC, KAFKA_GROUP_ID } = process.env + +const twitterServiceAccount = Keypair.fromSecretKey( + new Uint8Array(JSON.parse(process.env.TWITTER_SERVICE_ACCOUNT!)) +); +const payerServiceAccount = Keypair.fromSecretKey( + new Uint8Array(JSON.parse(process.env.PAYER_SERVICE_ACCOUNT!)) +); + +const TLD = new PublicKey(process.env["TWITTER_TLD"]!); + +const producer = kafka.producer() + +async function getTwitterRegistryKey( + handle: string, + twitterRootParentRegistryKey?: PublicKey +): Promise { + const hashedTwitterHandle = await getHashedName(handle); + const twitterHandleRegistryKey = await getNameAccountKey( + hashedTwitterHandle, + undefined, + twitterRootParentRegistryKey + ); + + return twitterHandleRegistryKey; +} + +async function getOwnerForName( + handle: string | undefined, + tld: PublicKey | undefined +): Promise { + const key = handle && await getTwitterRegistryKey(handle, tld); + if (key) { + const registryRaw = await connection.getAccountInfo(key); + const registry = registryRaw && deserializeUnchecked( + NameRegistryState.schema, + NameRegistryState, + registryRaw.data + ); + + return registry?.owner; + } +} + +export async function getClaimedTokenRefKeyForName( + handle: string, + mint: PublicKey | undefined | null = undefined, + tld: PublicKey +): Promise { + const owner = await getOwnerForName(handle, tld); + if (owner) { + return ( + await SplTokenCollective.ownerTokenRefKey({ + owner, + mint, + }) + )[0]; + } +} +export async function getUnclaimedTokenRefKeyForName( + handle: string, + mint: PublicKey | undefined | null, + tld: PublicKey | undefined +): Promise { + const name = await getTwitterRegistryKey(handle, tld); + + return ( + await SplTokenCollective.ownerTokenRefKey({ + name, + mint: mint || SplTokenCollective.OPEN_COLLECTIVE_MINT_ID, + }) + )[0]; +} + +async function run() { + const consumer = kafka.consumer({ + groupId: KAFKA_GROUP_ID!, + maxBytes: process.env.MAX_BYTES ? Number(process.env.MAX_BYTES) : undefined + }); + const admin = kafka.admin(); + await admin.connect(); + // Force failure if topic doesn't exist + await admin.fetchTopicMetadata({ topics: [KAFKA_INPUT_TOPIC!] }) + await admin.disconnect(); + + await producer.connect(); + await consumer.connect(); + await consumer.subscribe({ + topic: KAFKA_INPUT_TOPIC!, + fromBeginning: process.env["KAFKA_OFFSET_RESET"] === "earliest", + }); + + const tokenCollectiveSdk = await SplTokenCollective.init(new Provider(connection, new NodeWallet(payerServiceAccount), {})); + + return new Promise((resolve, reject) => { + consumer.run({ + eachBatchAutoResolve: true, + autoCommitThreshold: process.env.AUTO_COMMIT_THRESHOLD ? Number(process.env.AUTO_COMMIT_THRESHOLD) : 20, + eachBatch: async ({ batch: { messages }, heartbeat, commitOffsetsIfNecessary }) => { + try { + const toProduceMessages = await promiseAllGrouped( + process.env.GROUP_SIZE ? Number(process.env.GROUP_SIZE) : 5, + messages.map(({ value, offset }) => ({ ...JSON.parse(value!.toString()), offset })).filter(m => m.handle == "redacted_noah" && !m.body.includes("I'm")).map((message) => async () => { + const handle = message.handle; + const dm = message.body as string | undefined; + if (dm?.trim().toUpperCase() === "OPT OUT") { + const unclaimedRefKey = await getUnclaimedTokenRefKeyForName(handle, null, TLD); + const unclaimedTokenRef = await tokenCollectiveSdk.getTokenRef(unclaimedRefKey); + await heartbeat() + + if (!unclaimedTokenRef) { + return { + ...message, + error: `No social token was found for ${handle}` + } + } else if (unclaimedTokenRef?.isOptedOut) { + return { + ...message, + error: `${handle} has already been opted out` + } + } + + if (unclaimedTokenRef) { + const { instructions, signers } = await tokenCollectiveSdk.optOutInstructions({ + tokenRef: unclaimedRefKey!, + handle, + nameParent: TLD + }); + await tokenCollectiveSdk.sendInstructions(instructions, [...signers, twitterServiceAccount], payerServiceAccount.publicKey); + } + + return { + ...message, + optedOut: true, + tokenRef: unclaimedRefKey + } + } else { + return { + ...message, + error: process.env.INCORRECT_MESSAGE_RESPONSE + } + } + }) + ) + await producer.send({ + topic: KAFKA_TOPIC!, + messages: toProduceMessages.map(message => ({ + key: message.handle, + value: JSON.stringify(message) + })) + }) + } catch (e) { + reject(e); + } + } + }) + }); +} + +run().catch(e => { + console.error(e); + console.error(e.stack); + process.exit(1); +}) \ No newline at end of file diff --git a/src/twitter-unclaimed-create-bot/index.ts b/src/twitter-bot/unclaimed-create-bot/index.ts similarity index 94% rename from src/twitter-unclaimed-create-bot/index.ts rename to src/twitter-bot/unclaimed-create-bot/index.ts index 8cbae10..d82a15a 100644 --- a/src/twitter-unclaimed-create-bot/index.ts +++ b/src/twitter-bot/unclaimed-create-bot/index.ts @@ -1,5 +1,5 @@ -import { kafka } from "../setup/kafka"; -import { twitterClient } from "../setup/twitter"; +import { kafka } from "../../setup/kafka"; +import { twitterClient } from "../../setup/twitter"; const { KAFKA_GROUP_ID, KAFKA_INPUT_TOPIC, MESSAGE, COLLECTIVE } = process.env diff --git a/src/utils/group.ts b/src/utils/group.ts new file mode 100644 index 0000000..5e68809 --- /dev/null +++ b/src/utils/group.ts @@ -0,0 +1,18 @@ +export function groupByN(n: number, data: T[]): T[][] { + let result = []; + for (let i = 0; i < data.length; i += n) result.push(data.slice(i, i + n)); + return result; +}; + +type PromFunc = () => Promise; +export async function promiseAllGrouped(size: number, funcs: PromFunc[]): Promise { + const results: A[] = []; + const grouped = groupByN(size, funcs); + for(let funcs of grouped) { + await Promise.all(funcs.map(async func => { + results.push(await func()); + })) + } + + return results; +} \ No newline at end of file diff --git a/src/utils/lru.ts b/src/utils/lru.ts new file mode 100644 index 0000000..acdeb75 --- /dev/null +++ b/src/utils/lru.ts @@ -0,0 +1,31 @@ +export class LRU { + max: number; + cache: Map; + + constructor(max = 10) { + this.max = max; + this.cache = new Map(); + } + + get(key: A): B | undefined { + let item = this.cache.get(key); + if (item) { + // refresh key + this.cache.delete(key); + this.cache.set(key, item); + } + return item; + } + + set(key: A, val: B) { + // refresh key + if (this.cache.has(key)) this.cache.delete(key); + // evict oldest + else if (this.cache.size == this.max) this.cache.delete(this.first()); + this.cache.set(key, val); + } + + first() { + return this.cache.keys().next().value; + } +} diff --git a/yarn.lock b/yarn.lock index b3d49ce..c219b79 100644 --- a/yarn.lock +++ b/yarn.lock @@ -18,6 +18,25 @@ dependencies: regenerator-runtime "^0.13.4" +"@bonfida/spl-name-service@^0.1.22": + version "0.1.24" + resolved "https://registry.yarnpkg.com/@bonfida/spl-name-service/-/spl-name-service-0.1.24.tgz#2403bbff2ff6ea6fd07f91efc74b59926e58f3ed" + integrity sha512-9+iMNPIiTC8BbdzMgqYo7UjGgi7pONv86lOMkO0yxu12/sYw2ogrhap/r+d+BWRahwL4X7jWwDpXo8im2WcyjQ== + dependencies: + "@project-serum/sol-wallet-adapter" "^0.1.5" + "@solana/spl-token" "0.1.3" + "@solana/web3.js" "^1.29.2" + bip32 "^2.0.6" + bn.js "^5.1.3" + borsh "^0.6.0" + bs58 "4.0.1" + buffer-layout "^1.2.0" + core-util-is "^1.0.2" + csv-parser "^3.0.0" + fs "^0.0.1-security" + tweetnacl "^1.0.3" + webpack-dev-server "^3.11.2" + "@ethersproject/bytes@^5.5.0": version "5.5.0" resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.5.0.tgz#cb11c526de657e7b45d2e0f0246fb3b9d29a601c" @@ -84,6 +103,26 @@ snake-case "^3.0.4" toml "^3.0.0" +"@project-serum/anchor@^0.18.0": + version "0.18.2" + resolved "https://registry.yarnpkg.com/@project-serum/anchor/-/anchor-0.18.2.tgz#0f13b5c2046446b7c24cf28763eec90febb28485" + integrity sha512-uyjiN/3Ipp+4hrZRm/hG18HzGLZyvP790LXrCsGO3IWxSl28YRhiGEpKnZycfMW94R7nxdUoE3wY67V+ZHSQBQ== + dependencies: + "@project-serum/borsh" "^0.2.2" + "@solana/web3.js" "^1.17.0" + base64-js "^1.5.1" + bn.js "^5.1.2" + bs58 "^4.0.1" + buffer-layout "^1.2.0" + camelcase "^5.3.1" + crypto-hash "^1.3.0" + eventemitter3 "^4.0.7" + find "^0.3.0" + js-sha256 "^0.9.0" + pako "^2.0.3" + snake-case "^3.0.4" + toml "^3.0.0" + "@project-serum/borsh@^0.2.2": version "0.2.2" resolved "https://registry.yarnpkg.com/@project-serum/borsh/-/borsh-0.2.2.tgz#63e558f2d6eb6ab79086bf499dea94da3182498f" @@ -112,6 +151,14 @@ bn.js "^5.1.2" buffer-layout "^1.2.0" +"@project-serum/sol-wallet-adapter@^0.1.5": + version "0.1.8" + resolved "https://registry.yarnpkg.com/@project-serum/sol-wallet-adapter/-/sol-wallet-adapter-0.1.8.tgz#90c6c1da793d32ed4ba3c67c5702a5bc804ef197" + integrity sha512-lKMgp7bsKpkrtBtIaEjtGuUMke0GUqFUL39Z7cjqsQpTVhkU5Ez4zHyjhXqAEORRGLFbwx/+H6HLpwppxpUDMQ== + dependencies: + bs58 "^4.0.1" + eventemitter3 "^4.0.4" + "@solana/buffer-layout@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@solana/buffer-layout/-/buffer-layout-3.0.0.tgz#b9353caeb9a1589cb77a1b145bcb1a9a93114326" @@ -138,6 +185,18 @@ tweetnacl "^1.0.3" webpack-dev-server "^3.11.2" +"@solana/spl-token@0.1.3": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@solana/spl-token/-/spl-token-0.1.3.tgz#6bf7c1a74cd95dabe8b8164e4c13b987db5be3bd" + integrity sha512-M251on5RDz8VQXoKoQPeLANEyI4qhThKLZBeUiLbFZ93KRgouGfmV5D/bUZXkLF75PlLcARIzU9ptoHOlZ6SbQ== + dependencies: + "@babel/runtime" "^7.10.5" + "@solana/web3.js" "^1.2.2" + bn.js "^5.1.0" + buffer "6.0.3" + buffer-layout "^1.2.0" + dotenv "8.2.0" + "@solana/spl-token@0.1.6": version "0.1.6" resolved "https://registry.yarnpkg.com/@solana/spl-token/-/spl-token-0.1.6.tgz#fa136b0a3db84f07a99bc0e54cf4e91f2d6da2e8" @@ -162,7 +221,7 @@ buffer-layout "^1.2.0" dotenv "10.0.0" -"@solana/web3.js@^1.12.0", "@solana/web3.js@^1.30.2": +"@solana/web3.js@^1.12.0", "@solana/web3.js@^1.2.2", "@solana/web3.js@^1.29.2", "@solana/web3.js@^1.30.2": version "1.31.0" resolved "https://registry.yarnpkg.com/@solana/web3.js/-/web3.js-1.31.0.tgz#7a313d4c1a90b77f27ddbfe845a10d6883e06452" integrity sha512-7nHHx1JNFnrt15e9y8m38I/EJCbaB+bFC3KZVM1+QhybCikFxGMtGA5r7PDC3GEL1R2RZA8yKoLkDKo3vzzqnw== @@ -242,6 +301,44 @@ superstruct "^0.14.2" tweetnacl "^1.0.0" +"@strata-foundation/spl-token-bonding@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-bonding/-/spl-token-bonding-2.0.6.tgz#916da40954d57e12dd0fa28fbd410518ad293471" + integrity sha512-8Kr3Wog5wcgcdib9ous4R9l/SJO4YDcksC3O9Khk+4P0ji6H4hSA8oz4NIk35RfoPxOZ5G3JV+MmITlCoSOFew== + dependencies: + "@project-serum/anchor" "^0.18.0" + "@project-serum/common" "^0.0.1-beta.3" + "@solana/web3.js" "^1.29.2" + "@strata-foundation/spl-utils" "^2.0.0" + bn.js "^5.2.0" + copyfiles "^2.4.1" + +"@strata-foundation/spl-token-collective@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-collective/-/spl-token-collective-2.0.6.tgz#3908c5ebcd21f6edfde0106cfb714479c5137fec" + integrity sha512-wjSsxeAjBolaU8rqDELt7Cd/nTSEAzNgx2JUvQLy1FRLMz25N/i97ASyd6Oq4gbgfscgk8lHmpdH++lE/brAQg== + dependencies: + "@bonfida/spl-name-service" "^0.1.22" + "@project-serum/anchor" "^0.18.0" + "@project-serum/common" "^0.0.1-beta.3" + "@solana/web3.js" "^1.29.2" + "@strata-foundation/spl-token-bonding" "^2.0.6" + "@strata-foundation/spl-token-staking" "^2.0.6" + "@strata-foundation/spl-utils" "^2.0.0" + bn.js "^5.2.0" + copyfiles "^2.4.1" + +"@strata-foundation/spl-token-staking@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-staking/-/spl-token-staking-2.0.6.tgz#66ea3fee9911d42cb49ce2d7935f1ce7f97f01c7" + integrity sha512-+wv55r/e/ytVpvp9vsCNvdwvyXtFYP786yru8IjpnKZnxA3zbpKEBr90Bh7oD6xdiycZOXtqmyz89ywAcvpfuA== + dependencies: + "@project-serum/common" "^0.0.1-beta.3" + "@solana/web3.js" "^1.29.2" + "@strata-foundation/spl-utils" "^2.0.0" + bn.js "^5.2.0" + semver "^7.3.5" + "@strata-foundation/spl-utils@^1.1.1": version "1.1.1" resolved "https://registry.yarnpkg.com/@strata-foundation/spl-utils/-/spl-utils-1.1.1.tgz#abbbdb7fe41d4f96c228c9281c6c3e36ad40ae50" @@ -254,6 +351,18 @@ bs58 "^4.0.1" tweetnacl "^1.0.3" +"@strata-foundation/spl-utils@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@strata-foundation/spl-utils/-/spl-utils-2.0.0.tgz#35ea2596e1a0f95d7eaed1ac60a1c137a1bb267f" + integrity sha512-e/WYg5eA1eJQRspKi5CB3ViulSbEwX5CotQHHMA97bljFMcUCN4sJce+HkaVCiX36gCA/NxW4OBwQ6PBEDpXGw== + dependencies: + "@metaplex/arweave-cost" "^2.0.0" + "@metaplex/js" "^4.3.0" + "@solana/spl-name-service" "^0.1.3" + "@solana/web3.js" "^1.30.2" + bs58 "^4.0.1" + tweetnacl "^1.0.3" + "@tsconfig/node10@^1.0.7": version "1.0.8" resolved "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.8.tgz" @@ -1121,6 +1230,19 @@ copy-descriptor@^0.1.0: resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= +copyfiles@^2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/copyfiles/-/copyfiles-2.4.1.tgz#d2dcff60aaad1015f09d0b66e7f0f1c5cd3c5da5" + integrity sha512-fereAvAvxDrQDOXybk3Qu3dPbOoKoysFMWtkY3mv5BsL8//OSZVL5DCLYqgRfY5cWirgRzlC+WSrxp6Bo3eNZg== + dependencies: + glob "^7.0.5" + minimatch "^3.0.3" + mkdirp "^1.0.4" + noms "0.0.0" + through2 "^2.0.1" + untildify "^4.0.0" + yargs "^16.1.0" + core-util-is@^1.0.2, core-util-is@~1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" @@ -1189,6 +1311,13 @@ crypto@^1.0.1: resolved "https://registry.yarnpkg.com/crypto/-/crypto-1.0.1.tgz#2af1b7cad8175d24c8a1b0778255794a21803037" integrity sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig== +csv-parser@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/csv-parser/-/csv-parser-3.0.0.tgz#b88a6256d79e090a97a1b56451f9327b01d710e7" + integrity sha512-s6OYSXAK3IdKqYO33y09jhypG/bSDHPuyCme/IdEHfWpLf/jKcpitVFyOC6UemgGk8v7Q5u2XE0vvwmanxhGlQ== + dependencies: + minimist "^1.2.0" + debug@2.6.9, debug@^2.2.0, debug@^2.3.3: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" @@ -1386,6 +1515,11 @@ dotenv@10.0.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== +dotenv@8.2.0: + version "8.2.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.2.0.tgz#97e619259ada750eea3e4ea3e26bceea5424b16a" + integrity sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw== + ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" @@ -1465,7 +1599,7 @@ etag@~1.8.1: resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= -eventemitter3@^4.0.0, eventemitter3@^4.0.7: +eventemitter3@^4.0.0, eventemitter3@^4.0.4, eventemitter3@^4.0.7: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== @@ -1784,7 +1918,7 @@ glob@7.1.6: once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.0.3, glob@^7.1.3: +glob@^7.0.3, glob@^7.0.5, glob@^7.1.3: version "7.2.0" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== @@ -2022,7 +2156,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -2264,6 +2398,11 @@ is-wsl@^1.1.0: resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= + isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" @@ -2441,6 +2580,13 @@ lower-case@^2.0.2: dependencies: tslib "^2.0.3" +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + make-error@^1.1.1: version "1.3.6" resolved "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz" @@ -2541,7 +2687,7 @@ minimalistic-crypto-utils@^1.0.1: resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= -minimatch@3.0.4, minimatch@^3.0.4: +minimatch@3.0.4, minimatch@^3.0.3, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== @@ -2568,6 +2714,11 @@ mkdirp@^0.5.1, mkdirp@^0.5.5: dependencies: minimist "^1.2.5" +mkdirp@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + mocha@^8.1.0: version "8.4.0" resolved "https://registry.yarnpkg.com/mocha/-/mocha-8.4.0.tgz#677be88bf15980a3cae03a73e10a0fc3997f0cff" @@ -2697,6 +2848,14 @@ node-gyp-build@^4.2.0: resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.2.3.tgz#ce6277f853835f718829efb47db20f3e4d9c4739" integrity sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg== +noms@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/noms/-/noms-0.0.0.tgz#da8ebd9f3af9d6760919b27d9cdc8092a7332859" + integrity sha1-2o69nzr51nYJGbJ9nNyAkqczKFk= + dependencies: + inherits "^2.0.1" + readable-stream "~1.0.31" + normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" @@ -3021,7 +3180,7 @@ raw-body@2.4.0: iconv-lite "0.4.24" unpipe "1.0.0" -readable-stream@^2.0.1, readable-stream@^2.0.2: +readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@~2.3.6: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== @@ -3043,6 +3202,16 @@ readable-stream@^3.0.6, readable-stream@^3.6.0: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@~1.0.31: + version "1.0.34" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" + integrity sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw= + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + readdirp@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" @@ -3266,6 +3435,13 @@ semver@^6.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^7.3.5: + version "7.3.5" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" + integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== + dependencies: + lru-cache "^6.0.0" + send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" @@ -3534,6 +3710,11 @@ string_decoder@^1.1.1: dependencies: safe-buffer "~5.2.0" +string_decoder@~0.10.x: + version "0.10.31" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" + integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= + string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" @@ -3623,6 +3804,14 @@ text-encoding-utf-8@^1.0.2: resolved "https://registry.npmjs.org/text-encoding-utf-8/-/text-encoding-utf-8-1.0.2.tgz" integrity sha512-8bw4MY9WjdsD2aMtO0OzOCY3pXGYNx2d2FfHRVUKkiCPDWjKuOlhLVASS+pD7VkLTVjW268LYJHwsnPFlBpbAg== +through2@^2.0.1: + version "2.0.5" + resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" + integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== + dependencies: + readable-stream "~2.3.6" + xtend "~4.0.1" + "through@>=2.2.7 <3": version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" @@ -3816,6 +4005,11 @@ unset-value@^1.0.0: has-value "^0.3.1" isobject "^3.0.0" +untildify@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" + integrity sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw== + upath@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" @@ -4064,6 +4258,11 @@ xmlbuilder@~9.0.1: resolved "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz" integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0= +xtend@~4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + y18n@^4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf" @@ -4074,6 +4273,11 @@ y18n@^5.0.5: resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + yargs-parser@20.2.4: version "20.2.4" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" @@ -4102,7 +4306,7 @@ yargs-unparser@2.0.0: flat "^5.0.2" is-plain-obj "^2.1.0" -yargs@16.2.0: +yargs@16.2.0, yargs@^16.1.0: version "16.2.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== From 1b27c7e6e922be83f8f4f106050979759b763fb5 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Mon, 10 Jan 2022 19:03:22 -0600 Subject: [PATCH 05/14] Fix to use generic handle to avoid opting on messaged from self --- .vscode/launch.json | 3 ++- src/twitter-bot/opt-out/index.ts | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 95e844f..74496d9 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -318,7 +318,8 @@ "TWITTER_TLD": "EEbZHaBD4mreYS6enRqytXvXfmRESLWXXrXbtZLWyd6X", "TWITTER_SERVICE_ACCOUNT": "[65,132,47,88,190,203,121,144,128,74,168,72,223,142,99,217,37,69,160,251,149,35,244,207,84,215,60,50,97,177,113,194,233,135,171,110,133,84,123,5,221,78,104,240,67,217,2,28,6,229,231,56,141,138,249,55,23,239,192,197,165,117,249,85]", "PAYER_SERVICE_ACCOUNT": "[65,132,47,88,190,203,121,144,128,74,168,72,223,142,99,217,37,69,160,251,149,35,244,207,84,215,60,50,97,177,113,194,233,135,171,110,133,84,123,5,221,78,104,240,67,217,2,28,6,229,231,56,141,138,249,55,23,239,192,197,165,117,249,85]", - "INCORRECT_MESSAGE_RESPONSE": "I'm sorry, I didn't understand that message. You can say \"OPT OUT\" to opt out." + "INCORRECT_MESSAGE_RESPONSE": "I'm sorry, I didn't understand that message. You can say \"OPT OUT\" to opt out.", + "HANDLE": "redacted_noah" } }, { "type": "pwa-node", diff --git a/src/twitter-bot/opt-out/index.ts b/src/twitter-bot/opt-out/index.ts index 86f19b0..502938f 100644 --- a/src/twitter-bot/opt-out/index.ts +++ b/src/twitter-bot/opt-out/index.ts @@ -110,7 +110,7 @@ async function run() { try { const toProduceMessages = await promiseAllGrouped( process.env.GROUP_SIZE ? Number(process.env.GROUP_SIZE) : 5, - messages.map(({ value, offset }) => ({ ...JSON.parse(value!.toString()), offset })).filter(m => m.handle == "redacted_noah" && !m.body.includes("I'm")).map((message) => async () => { + messages.map(({ value, offset }) => ({ ...JSON.parse(value!.toString()), offset })).filter(m => m.handle != process.env.HANDLE).map((message) => async () => { const handle = message.handle; const dm = message.body as string | undefined; if (dm?.trim().toUpperCase() === "OPT OUT") { From 1d6521ca616b4c2f39099505934b743a0474e5c7 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Tue, 11 Jan 2022 19:36:06 -0600 Subject: [PATCH 06/14] Switch ordering of start slot --- src/kafka-signature-identifier.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/kafka-signature-identifier.ts b/src/kafka-signature-identifier.ts index d2a3a3d..47b994c 100644 --- a/src/kafka-signature-identifier.ts +++ b/src/kafka-signature-identifier.ts @@ -107,7 +107,7 @@ async function run() { } await admin.disconnect() - const startSignature = START_SIGNATURE || await getKafkaSignature() || await getSolanaRecentSignature() + const startSignature = await getKafkaSignature() || START_SIGNATURE || await getSolanaRecentSignature() await producer.connect(); let currentSignature = startSignature; From 175a868a9dac20b9f36a0ef5baeb75d6785acfb4 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Tue, 11 Jan 2022 23:28:03 -0600 Subject: [PATCH 07/14] Better handling under load --- src/kafka-signature-identifier.ts | 29 +++++----- src/kafka-signature-processor.ts | 60 +++++++++++++-------- src/twitter-bot/messages-retriever/index.ts | 5 +- 3 files changed, 54 insertions(+), 40 deletions(-) diff --git a/src/kafka-signature-identifier.ts b/src/kafka-signature-identifier.ts index 47b994c..fa1e2cd 100644 --- a/src/kafka-signature-identifier.ts +++ b/src/kafka-signature-identifier.ts @@ -68,7 +68,7 @@ function sleep(ms: number) { }) } -async function getAllSignatures(connection: Connection, until?: string, before?: string): Promise { +async function sendAllSignatures(connection: Connection, until?: string, before?: string): Promise { const signatures = await connection.getConfirmedSignaturesForAddress2( ADDRESS, { @@ -78,8 +78,20 @@ async function getAllSignatures(connection: Connection, until?: string, before?: FINALITY ); + await producer.sendBatch({ + acks: 1, + topicMessages: [{ + topic: KAFKA_TOPIC!, + messages: signatures.map(sig => ({ + value: JSON.stringify(sig), + key: sig.signature.toString(), + timestamp: ((sig.blockTime || 0) * 1000).toString() + })) + }] + }); + if (signatures.length === 1000) { - return [...signatures, ...await getAllSignatures(connection, until, signatures[signatures.length - 1].signature)] + await sendAllSignatures(connection, until, signatures[signatures.length - 1].signature) } return signatures; @@ -112,21 +124,10 @@ async function run() { await producer.connect(); let currentSignature = startSignature; while (true) { - const signatures = await getAllSignatures(connection, currentSignature || undefined) + const signatures = await sendAllSignatures(connection, currentSignature || undefined) if (signatures.length > 0) { console.log(`Fetched ${signatures.length} signatures from ${currentSignature}`); currentSignature = signatures[0].signature - await producer.sendBatch({ - acks: 1, - topicMessages: [{ - topic: KAFKA_TOPIC!, - messages: signatures.map(sig => ({ - value: JSON.stringify(sig), - key: sig.signature.toString(), - timestamp: ((sig.blockTime || 0) * 1000).toString() - })) - }] - }); } else { await sleep(SLEEP_TIME); } diff --git a/src/kafka-signature-processor.ts b/src/kafka-signature-processor.ts index edf1cf6..da01db9 100644 --- a/src/kafka-signature-processor.ts +++ b/src/kafka-signature-processor.ts @@ -1,4 +1,5 @@ import { ConfirmedSignatureInfo, Finality } from "@solana/web3.js"; +import { truthy } from "./utils/truthy"; import { kafka } from "./setup/kafka"; import { connection } from "./setup/solana"; @@ -7,25 +8,27 @@ const { KAFKA_TOPIC, KAFKA_INPUT_TOPIC, KAFKA_GROUP_ID } = process.env const producer = kafka.producer() - -async function processSignature(signature: ConfirmedSignatureInfo) { +async function processSignature(signature: ConfirmedSignatureInfo): Promise { const txn = await connection.getConfirmedTransaction(signature.signature, FINALITY); - const outputMsg = { + const data = txn?.transaction.serialize({ + requireAllSignatures: false, + verifySignatures: false + }).toJSON().data + const value = JSON.stringify({ + ...txn, + transaction: data + }) + const size = Buffer.byteLength(value); + if (size > 500000) { + console.log("Skipping large message at", signature) + return null; + } + + return { key: signature.signature.toString(), - value: JSON.stringify({ - ...txn, - transaction: txn?.transaction.serialize({ - requireAllSignatures: false, - verifySignatures: false - }).toJSON().data - }), + value, timestamp: ((signature?.blockTime || 0) * 1000).toString() }; - - await producer.send({ - topic: KAFKA_TOPIC!, - messages: [outputMsg] - }) } function groupByN(n: number, data: T[]): T[][] { @@ -35,13 +38,14 @@ function groupByN(n: number, data: T[]): T[][] { }; type PromFunc = () => Promise; -async function promiseAllGrouped(size: number, funcs: PromFunc[]): Promise { +async function promiseAllGrouped(size: number, funcs: PromFunc[], groupFinish: () => Promise): Promise { const results: A[] = []; const grouped = groupByN(size, funcs); for(let funcs of grouped) { await Promise.all(funcs.map(async func => { results.push(await func()); })) + await groupFinish(); } return results; @@ -71,13 +75,25 @@ async function run() { autoCommitThreshold: process.env.AUTO_COMMIT_THRESHOLD ? Number(process.env.AUTO_COMMIT_THRESHOLD) : 20, eachBatch: async ({ batch: { messages }, heartbeat, commitOffsetsIfNecessary }) => { try { - await promiseAllGrouped( - process.env.GROUP_SIZE ? Number(process.env.GROUP_SIZE) : 5, + const groupSize = process.env.GROUP_SIZE ? Number(process.env.GROUP_SIZE) : 5; + const toSend = (await promiseAllGrouped( + groupSize, messages.map(({ value, offset }) => ({ ...JSON.parse(value!.toString()), offset })).map((confirmedSignatureInfo) => async () => { - await processSignature(confirmedSignatureInfo) - await heartbeat() - }) - ) + const msg = await processSignature(confirmedSignatureInfo) + return msg; + }), + heartbeat + )).filter(truthy) + + console.log(`Sending ${toSend.length} transactions`) + const producerGroupSize = process.env.PRODUCER_GROUP_SIZE ? Number(process.env.PRODUCER_GROUP_SIZE) : 20; + await Promise.all(groupByN(producerGroupSize, toSend).map(async (messages) => { + await producer.send({ + topic: KAFKA_TOPIC!, + messages + }); + await heartbeat(); + })); } catch (e) { reject(e); } diff --git a/src/twitter-bot/messages-retriever/index.ts b/src/twitter-bot/messages-retriever/index.ts index d22fee9..ffbb153 100644 --- a/src/twitter-bot/messages-retriever/index.ts +++ b/src/twitter-bot/messages-retriever/index.ts @@ -1,6 +1,7 @@ import { LRU } from "../../utils/lru"; import { kafka } from "../../setup/kafka"; import { twitterClient } from "../../setup/twitter"; +import { truthy } from "../../utils/truthy"; const { KAFKA_TOPIC, POLL_DURATION = 60 * 1000 } = process.env @@ -101,10 +102,6 @@ async function getUserInfo(userId: string): Promise<{ handle: string, name: stri return ret; } -export type Truthy = T extends false | "" | 0 | null | undefined ? never : T; // from lodash - -export const truthy = (value: T): value is Truthy => !!value; - async function run() { const producer = kafka.producer() await producer.connect(); From 47874ed62177bd9c242609b1154f8575b6d05673 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Tue, 18 Jan 2022 21:59:05 -0600 Subject: [PATCH 08/14] Bugfixes and delete zero values --- src/kafka-signature-processor.ts | 39 ++++++++++++++++++-------------- src/leaderboard/index.ts | 19 ++++++++++++---- 2 files changed, 36 insertions(+), 22 deletions(-) diff --git a/src/kafka-signature-processor.ts b/src/kafka-signature-processor.ts index da01db9..7bdf3aa 100644 --- a/src/kafka-signature-processor.ts +++ b/src/kafka-signature-processor.ts @@ -10,25 +10,30 @@ const producer = kafka.producer() async function processSignature(signature: ConfirmedSignatureInfo): Promise { const txn = await connection.getConfirmedTransaction(signature.signature, FINALITY); - const data = txn?.transaction.serialize({ - requireAllSignatures: false, - verifySignatures: false - }).toJSON().data - const value = JSON.stringify({ - ...txn, - transaction: data - }) - const size = Buffer.byteLength(value); - if (size > 500000) { - console.log("Skipping large message at", signature) + try { + const data = txn?.transaction.serialize({ + requireAllSignatures: false, + verifySignatures: false + }).toJSON().data + const value = JSON.stringify({ + ...txn, + transaction: data + }) + const size = Buffer.byteLength(value); + if (size > 500000) { + console.log("Skipping large message at", signature) + return null; + } + + return { + key: signature.signature.toString(), + value, + timestamp: ((signature?.blockTime || 0) * 1000).toString() + }; + } catch (e: any) { + console.error(e); return null; } - - return { - key: signature.signature.toString(), - value, - timestamp: ((signature?.blockTime || 0) * 1000).toString() - }; } function groupByN(n: number, data: T[]): T[][] { diff --git a/src/leaderboard/index.ts b/src/leaderboard/index.ts index 2e85da9..55e45b4 100644 --- a/src/leaderboard/index.ts +++ b/src/leaderboard/index.ts @@ -39,11 +39,15 @@ async function accountPlugin(payload: EachBatchPayload) { .forEach((keyAndValue: any) => { const tokenBonding: string = keyAndValue[0]; const balanceChanges: any[] = keyAndValue[1]; - const scoresAndValues = balanceChanges.flatMap((balanceChange: any) => { + const zeroes = balanceChanges.filter(change => change.tokenAmount === 0) + const positives = balanceChanges.filter(change => change.tokenAmount !== 0) + const scoresAndValues = positives.flatMap((balanceChange: any) => { return [Number(balanceChange.tokenAmount), balanceChange.account] }) // @ts-ignore - batch.zadd(`accounts-by-balance-${tokenBonding}`, 'CH', ...scoresAndValues) + const key = `accounts-by-balance-${tokenBonding}`; + batch.zadd(key, 'CH', ...scoresAndValues); + batch.zrem(key, ...zeroes.map(z => z.account)); }); const result = await promisify(batch.exec).bind(batch)(); const numChanged = result.reduce((a, b) => a + b, 0); @@ -67,11 +71,16 @@ async function topTokens(payload: EachBatchPayload) { .forEach((keyAndValue: any) => { const mint: string = keyAndValue[0]; const balanceChanges: any[] = keyAndValue[1]; - const scoresAndValues = balanceChanges.flatMap((balanceChange: any) => { + const zeroes = balanceChanges.filter(change => change.tokenAmount === 0) + const positives = balanceChanges.filter(change => change.tokenAmount !== 0) + const scoresAndValues = positives.flatMap((balanceChange: any) => { return [Number(balanceChange.tokenAmount), balanceChange.tokenBonding] - }) + }); + + const key = `bonding-by-tvl-${mint}`; // @ts-ignore - batch.zadd(`bonding-by-tvl-${mint}`, 'CH', ...scoresAndValues) + batch.zadd(key, 'CH', ...scoresAndValues); + batch.zrem(key, ...zeroes.map(z => z.tokenBonding)); }); const result = await promisify(batch.exec).bind(batch)(); const numChanged = result.reduce((a, b) => a + b, 0); From 469aa33e7adf8d7b513c6b9af1722a8582898fd1 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Thu, 20 Jan 2022 08:55:34 -0600 Subject: [PATCH 09/14] better message --- src/twitter-bot/unclaimed-create-bot/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/twitter-bot/unclaimed-create-bot/index.ts b/src/twitter-bot/unclaimed-create-bot/index.ts index d82a15a..3312f2c 100644 --- a/src/twitter-bot/unclaimed-create-bot/index.ts +++ b/src/twitter-bot/unclaimed-create-bot/index.ts @@ -35,7 +35,7 @@ async function run() { const handle = message.tokenMetadataName; console.log("Sending req"); - const response = await twitterClient.v2.tweet(`@${handle} ${MESSAGE}`); + const response = await twitterClient.v2.tweet(MESSAGE!.replaceAll("{handle}", handle)); if (response.errors) { console.error(JSON.stringify(response, null, 2)); throw new Error("Failed to post tweet") From 9311fb6e2b28981a700059484f02b1007560b291 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Wed, 26 Jan 2022 12:23:15 -0600 Subject: [PATCH 10/14] Metaplex v2 --- .vscode/launch.json | 4 +- package.json | 1 + .../transformers/specs/tokenMetadata.ts | 36 +++++++++++++++--- strata-compose/docker-compose.yml | 11 ++++++ yarn.lock | 37 +++++++++++++++++++ 5 files changed, 81 insertions(+), 8 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 74496d9..f2aa9d4 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -141,8 +141,8 @@ "KAFKA_BOOTSTRAP_SERVERS": "localhost:29092", "KAFKA_INPUT_TOPIC": "json.solana.transactions", "KAFKA_OUTPUT_TOPIC": "json.solana.events", - "KAFKA_OFFSET_RESET": "latest", - "KAFKA_GROUP_ID": "solana-event-transformer", + "KAFKA_OFFSET_RESET": "earliest", + "KAFKA_GROUP_ID": "solana-event-transformer1", } }, { diff --git a/package.json b/package.json index 8bfe1b7..b00b690 100644 --- a/package.json +++ b/package.json @@ -20,6 +20,7 @@ "typescript": "^4.3.5" }, "dependencies": { + "@metaplex-foundation/mpl-token-metadata": "^1.1.0", "@project-serum/anchor": "^0.11.1", "@project-serum/common": "^0.0.1-beta.3", "@solana/spl-name-service": "^0.1.3", diff --git a/src/event-transformer/transformers/specs/tokenMetadata.ts b/src/event-transformer/transformers/specs/tokenMetadata.ts index eddb94a..dc7adc0 100644 --- a/src/event-transformer/transformers/specs/tokenMetadata.ts +++ b/src/event-transformer/transformers/specs/tokenMetadata.ts @@ -1,10 +1,15 @@ -import { METADATA_SCHEMA } from "@strata-foundation/spl-utils"; +import { CreateMetadataArgs, UpdateMetadataArgs, CreateMetadataV2Args, UpdateMetadataV2Args, MetadataData } from "@metaplex-foundation/mpl-token-metadata"; -const schemaKeys = [...METADATA_SCHEMA.keys()]; export default { programId: "metaqbxxUerdq28cj1RbAWkYQm3ybzjb6a8bt518x1s", - schema: METADATA_SCHEMA, + schema: new Map([ + ...MetadataData.SCHEMA, + ...CreateMetadataV2Args.SCHEMA, + ...CreateMetadataArgs.SCHEMA, + ...UpdateMetadataV2Args.SCHEMA, + ...UpdateMetadataArgs.SCHEMA + ]), commands: [{ name: "CreateMetadata", index: 0, @@ -15,14 +20,33 @@ export default { "payer", "updateAuthority" ], - args: schemaKeys[0] - },{ + args: CreateMetadataArgs + }, { name: "UpdateMetadata", index: 1, accounts: [ "tokenMetadata", "updateAuthority", ], - args: schemaKeys[1] + args: UpdateMetadataArgs + }, { + name: "CreateMetadataV2", + index: 16, + accounts: [ + "tokenMetadata", + "mint", + "mintAuthority", + "payer", + "updateAuthority" + ], + args: CreateMetadataV2Args + }, { + name: "UpdateMetadataV2", + index: 15, + accounts: [ + "tokenMetadata", + "updateAuthority", + ], + args: UpdateMetadataV2Args }] } diff --git a/strata-compose/docker-compose.yml b/strata-compose/docker-compose.yml index 27a030b..947e63d 100644 --- a/strata-compose/docker-compose.yml +++ b/strata-compose/docker-compose.yml @@ -24,6 +24,17 @@ services: KAFKA_TOPIC: json.solana.signatures.bonding ADDRESS: TBondmkCYxaPCKG4CHYfVTcwQ8on31xnJrPzk8F8WsS + metadata-signature-identifier: + extra_hosts: + - "host.docker.internal:host-gateway" + restart: always + image: data-pipelines:latest + command: node dist/lib/kafka-signature-identifier.js + environment: + SOLANA_URL: "http://host.docker.internal:8899" + KAFKA_BOOTSTRAP_SERVERS: "host.docker.internal:39092" + KAFKA_TOPIC: json.solana.signatures.metadata + ADDRESS: metaqbxxUerdq28cj1RbAWkYQm3ybzjb6a8bt518x1s ksql-bootstrap-1: extra_hosts: diff --git a/yarn.lock b/yarn.lock index c219b79..2838ef5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -58,6 +58,23 @@ "@ethersproject/logger" "^5.5.0" hash.js "1.1.7" +"@metaplex-foundation/mpl-core@^0.0.2": + version "0.0.2" + resolved "https://registry.yarnpkg.com/@metaplex-foundation/mpl-core/-/mpl-core-0.0.2.tgz#17ee2cc216e17629d6df1dbba75964625ebbd603" + integrity sha512-UUJ4BlYiWdDegAWmjsNQiNehwYU3QfSFWs3sv4VX0J6/ZrQ28zqosGhQ+I2ZCTEy216finJ82sZWNjuwSWCYyQ== + dependencies: + "@solana/web3.js" "^1.31.0" + bs58 "^4.0.1" + +"@metaplex-foundation/mpl-token-metadata@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@metaplex-foundation/mpl-token-metadata/-/mpl-token-metadata-1.1.0.tgz#1d5dd89d8b0a0e32d060c8d01056e4a5cbedd499" + integrity sha512-4tF+hO5H6eYJ49H72nvuID2nrD54X4yCxqKhbWLxqAI7v5vHSCH2QFVUnqbj3+P4ydxrNyof9MQm3qlzY8KU3g== + dependencies: + "@metaplex-foundation/mpl-core" "^0.0.2" + "@solana/spl-token" "^0.1.8" + "@solana/web3.js" "^1.31.0" + "@metaplex/arweave-cost@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@metaplex/arweave-cost/-/arweave-cost-2.0.0.tgz#74d4dbb5b39e89f81fbc1a50220d1837fe0efdfd" @@ -301,6 +318,26 @@ superstruct "^0.14.2" tweetnacl "^1.0.0" +"@solana/web3.js@^1.31.0": + version "1.32.0" + resolved "https://registry.yarnpkg.com/@solana/web3.js/-/web3.js-1.32.0.tgz#b9821de52d0e773c363516c3dcef9be701295d82" + integrity sha512-jquZ/VBvM3zXAaTJvdWd9mlP0WiZaZqjji0vw5UAsb5IKIossrLhHtyUqMfo41Qkdwu1aVwf7YWG748i4XIJnw== + dependencies: + "@babel/runtime" "^7.12.5" + "@ethersproject/sha2" "^5.5.0" + "@solana/buffer-layout" "^3.0.0" + bn.js "^5.0.0" + borsh "^0.4.0" + bs58 "^4.0.1" + buffer "6.0.1" + cross-fetch "^3.1.4" + jayson "^3.4.4" + js-sha3 "^0.8.0" + rpc-websockets "^7.4.2" + secp256k1 "^4.0.2" + superstruct "^0.14.2" + tweetnacl "^1.0.0" + "@strata-foundation/spl-token-bonding@^2.0.6": version "2.0.6" resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-bonding/-/spl-token-bonding-2.0.6.tgz#916da40954d57e12dd0fa28fbd410518ad293471" From e54f5e83f1fe1bf62203b83a44fbf54d6558a825 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Wed, 26 Jan 2022 12:24:40 -0600 Subject: [PATCH 11/14] Bump web3 --- package.json | 2 +- yarn.lock | 22 +--------------------- 2 files changed, 2 insertions(+), 22 deletions(-) diff --git a/package.json b/package.json index b00b690..b91373f 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,7 @@ "@project-serum/anchor": "^0.11.1", "@project-serum/common": "^0.0.1-beta.3", "@solana/spl-name-service": "^0.1.3", - "@solana/web3.js": "^1.22.0", + "@solana/web3.js": "^1.32.0", "@strata-foundation/spl-token-collective": "^2.0.6", "@strata-foundation/spl-utils": "^1.1.1", "@types/bs58": "^4.0.1", diff --git a/yarn.lock b/yarn.lock index 2838ef5..c0168fc 100644 --- a/yarn.lock +++ b/yarn.lock @@ -298,27 +298,7 @@ superstruct "^0.14.2" tweetnacl "^1.0.0" -"@solana/web3.js@^1.22.0": - version "1.22.0" - resolved "https://registry.npmjs.org/@solana/web3.js/-/web3.js-1.22.0.tgz" - integrity sha512-7BQUiR1AIj2L8KJ8LYsI31iPRLytgF8T4hz7xLlvvBfalpUK7qD2om7frlNpXl8ROUpvruNf83QaectJdZJW1w== - dependencies: - "@babel/runtime" "^7.12.5" - "@solana/buffer-layout" "^3.0.0" - bn.js "^5.0.0" - borsh "^0.4.0" - bs58 "^4.0.1" - buffer "6.0.1" - crypto-hash "^1.2.2" - jayson "^3.4.4" - js-sha3 "^0.8.0" - node-fetch "^2.6.1" - rpc-websockets "^7.4.2" - secp256k1 "^4.0.2" - superstruct "^0.14.2" - tweetnacl "^1.0.0" - -"@solana/web3.js@^1.31.0": +"@solana/web3.js@^1.31.0", "@solana/web3.js@^1.32.0": version "1.32.0" resolved "https://registry.yarnpkg.com/@solana/web3.js/-/web3.js-1.32.0.tgz#b9821de52d0e773c363516c3dcef9be701295d82" integrity sha512-jquZ/VBvM3zXAaTJvdWd9mlP0WiZaZqjji0vw5UAsb5IKIossrLhHtyUqMfo41Qkdwu1aVwf7YWG748i4XIJnw== From b816cfae510f327de28cab6b523607fdacbe12a0 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Wed, 26 Jan 2022 17:50:15 -0600 Subject: [PATCH 12/14] Bump versions --- package.json | 5 +- src/utils/truthy.ts | 3 + yarn.lock | 130 ++++++++++++++------------------------------ 3 files changed, 47 insertions(+), 91 deletions(-) create mode 100644 src/utils/truthy.ts diff --git a/package.json b/package.json index b91373f..5011cc6 100644 --- a/package.json +++ b/package.json @@ -22,11 +22,10 @@ "dependencies": { "@metaplex-foundation/mpl-token-metadata": "^1.1.0", "@project-serum/anchor": "^0.11.1", - "@project-serum/common": "^0.0.1-beta.3", "@solana/spl-name-service": "^0.1.3", "@solana/web3.js": "^1.32.0", - "@strata-foundation/spl-token-collective": "^2.0.6", - "@strata-foundation/spl-utils": "^1.1.1", + "@strata-foundation/spl-token-collective": "^3.0.0", + "@strata-foundation/spl-utils": "^3.0.0", "@types/bs58": "^4.0.1", "@types/uuid": "^8.3.1", "add": "^2.0.6", diff --git a/src/utils/truthy.ts b/src/utils/truthy.ts new file mode 100644 index 0000000..167f6ed --- /dev/null +++ b/src/utils/truthy.ts @@ -0,0 +1,3 @@ +export type Truthy = T extends false | "" | 0 | null | undefined ? never : T; // from lodash + +export const truthy = (value: T): value is Truthy => !!value; diff --git a/yarn.lock b/yarn.lock index c0168fc..3983bb3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -75,10 +75,10 @@ "@solana/spl-token" "^0.1.8" "@solana/web3.js" "^1.31.0" -"@metaplex/arweave-cost@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@metaplex/arweave-cost/-/arweave-cost-2.0.0.tgz#74d4dbb5b39e89f81fbc1a50220d1837fe0efdfd" - integrity sha512-i2FTLtg7Zz9sVJxKHlr3Ek1ibcG4l/we3r5ZQYlCtNFforBj+w6OnyGwdqSaFIkxSyYXqAlo9BY7cc/WEuTccA== +"@metaplex/arweave-cost@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@metaplex/arweave-cost/-/arweave-cost-1.0.4.tgz#35d5d10dfc855463fc7fe18594e175506a4a35d4" + integrity sha512-bJ7knj9bacarfoIgkomDUOIRURYBAIYUg1oJZh1MfAl9w28x1gfMkt2e7H5zK0HDKkS2lmJQ5dpcsR+FGP9zMA== dependencies: axios "^0.24.0" debug "^4.3.2" @@ -120,17 +120,17 @@ snake-case "^3.0.4" toml "^3.0.0" -"@project-serum/anchor@^0.18.0": - version "0.18.2" - resolved "https://registry.yarnpkg.com/@project-serum/anchor/-/anchor-0.18.2.tgz#0f13b5c2046446b7c24cf28763eec90febb28485" - integrity sha512-uyjiN/3Ipp+4hrZRm/hG18HzGLZyvP790LXrCsGO3IWxSl28YRhiGEpKnZycfMW94R7nxdUoE3wY67V+ZHSQBQ== +"@project-serum/anchor@^0.20.1": + version "0.20.1" + resolved "https://registry.yarnpkg.com/@project-serum/anchor/-/anchor-0.20.1.tgz#0937807e807e8332aa708cfef4bcb6cbb88b4129" + integrity sha512-2TuBmGUn9qeYz6sJINJlElrBuPsaUAtYyUsJ3XplEBf1pczrANAgs5ceJUFzdiqGEWLn+84ObSdBeChT/AXYFA== dependencies: "@project-serum/borsh" "^0.2.2" "@solana/web3.js" "^1.17.0" base64-js "^1.5.1" bn.js "^5.1.2" bs58 "^4.0.1" - buffer-layout "^1.2.0" + buffer-layout "^1.2.2" camelcase "^5.3.1" crypto-hash "^1.3.0" eventemitter3 "^4.0.7" @@ -148,26 +148,6 @@ bn.js "^5.1.2" buffer-layout "^1.2.0" -"@project-serum/common@^0.0.1-beta.3": - version "0.0.1-beta.3" - resolved "https://registry.yarnpkg.com/@project-serum/common/-/common-0.0.1-beta.3.tgz#53586eaff9d9fd7e8938b1e12080c935b8b6ad07" - integrity sha512-gnQE/eUydTtto5okCgLWj1M97R9RRPJqnhKklikYI7jP/pnNhDmngSXC/dmfzED2GXSJEIKNIlxVw1k+E2Aw3w== - dependencies: - "@project-serum/serum" "^0.13.21" - bn.js "^5.1.2" - superstruct "0.8.3" - -"@project-serum/serum@^0.13.21": - version "0.13.58" - resolved "https://registry.yarnpkg.com/@project-serum/serum/-/serum-0.13.58.tgz#0f4aaa98f28c1220f8131052cd33b36d43430266" - integrity sha512-g8PNsFiJ3qTGu5B1qHdGIfmXovka3DpC9cMhyYzdKKX3WqdpzHrwKYO/ZuqH+JBp+dGfjYTxRLQ3dNTxYWohHA== - dependencies: - "@project-serum/anchor" "^0.11.1" - "@solana/spl-token" "^0.1.6" - "@solana/web3.js" "^1.21.0" - bn.js "^5.1.2" - buffer-layout "^1.2.0" - "@project-serum/sol-wallet-adapter@^0.1.5": version "0.1.8" resolved "https://registry.yarnpkg.com/@project-serum/sol-wallet-adapter/-/sol-wallet-adapter-0.1.8.tgz#90c6c1da793d32ed4ba3c67c5702a5bc804ef197" @@ -226,7 +206,7 @@ buffer-layout "^1.2.0" dotenv "10.0.0" -"@solana/spl-token@^0.1.6", "@solana/spl-token@^0.1.8": +"@solana/spl-token@^0.1.8": version "0.1.8" resolved "https://registry.yarnpkg.com/@solana/spl-token/-/spl-token-0.1.8.tgz#f06e746341ef8d04165e21fc7f555492a2a0faa6" integrity sha512-LZmYCKcPQDtJgecvWOgT/cnoIQPWjdH+QVyzPcFvyDUiT0DiRjZaam4aqNUyvchLFhzgunv3d9xOoyE34ofdoQ== @@ -318,66 +298,53 @@ superstruct "^0.14.2" tweetnacl "^1.0.0" -"@strata-foundation/spl-token-bonding@^2.0.6": - version "2.0.6" - resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-bonding/-/spl-token-bonding-2.0.6.tgz#916da40954d57e12dd0fa28fbd410518ad293471" - integrity sha512-8Kr3Wog5wcgcdib9ous4R9l/SJO4YDcksC3O9Khk+4P0ji6H4hSA8oz4NIk35RfoPxOZ5G3JV+MmITlCoSOFew== +"@strata-foundation/spl-token-bonding@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-bonding/-/spl-token-bonding-3.0.0.tgz#a80f34319513f3d0df39d9ba44fba266c4391ae7" + integrity sha512-6EnFGtGvr/260cENuWBG6iXnlUbsTlocUBPQ+ix/MrfOORmJ97f4q2oNb9alVffVBVRY1B+Ny6T9bOIs2X6GQg== dependencies: - "@project-serum/anchor" "^0.18.0" - "@project-serum/common" "^0.0.1-beta.3" - "@solana/web3.js" "^1.29.2" - "@strata-foundation/spl-utils" "^2.0.0" + "@project-serum/anchor" "^0.20.1" + "@solana/web3.js" "^1.32.0" + "@strata-foundation/spl-utils" "^3.0.0" bn.js "^5.2.0" copyfiles "^2.4.1" -"@strata-foundation/spl-token-collective@^2.0.6": - version "2.0.6" - resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-collective/-/spl-token-collective-2.0.6.tgz#3908c5ebcd21f6edfde0106cfb714479c5137fec" - integrity sha512-wjSsxeAjBolaU8rqDELt7Cd/nTSEAzNgx2JUvQLy1FRLMz25N/i97ASyd6Oq4gbgfscgk8lHmpdH++lE/brAQg== +"@strata-foundation/spl-token-collective@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-collective/-/spl-token-collective-3.0.0.tgz#2f7c44d2ca2f4e5084a5a845cbb8da75e67b89e2" + integrity sha512-vvZo/5y35pWsVmUYOLHeFHBsUHPMPITN5BIyA246VN1fK/MSTx/3NmGTPSt3GC7aflJhoUvqr9vOOL5AN8yrTQ== dependencies: "@bonfida/spl-name-service" "^0.1.22" - "@project-serum/anchor" "^0.18.0" - "@project-serum/common" "^0.0.1-beta.3" - "@solana/web3.js" "^1.29.2" - "@strata-foundation/spl-token-bonding" "^2.0.6" - "@strata-foundation/spl-token-staking" "^2.0.6" - "@strata-foundation/spl-utils" "^2.0.0" + "@project-serum/anchor" "^0.20.1" + "@solana/web3.js" "^1.32.0" + "@strata-foundation/spl-token-bonding" "^3.0.0" + "@strata-foundation/spl-token-staking" "^3.0.0" + "@strata-foundation/spl-utils" "^3.0.0" bn.js "^5.2.0" copyfiles "^2.4.1" -"@strata-foundation/spl-token-staking@^2.0.6": - version "2.0.6" - resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-staking/-/spl-token-staking-2.0.6.tgz#66ea3fee9911d42cb49ce2d7935f1ce7f97f01c7" - integrity sha512-+wv55r/e/ytVpvp9vsCNvdwvyXtFYP786yru8IjpnKZnxA3zbpKEBr90Bh7oD6xdiycZOXtqmyz89ywAcvpfuA== +"@strata-foundation/spl-token-staking@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@strata-foundation/spl-token-staking/-/spl-token-staking-3.0.0.tgz#90f85c97cc7f043d079ec946bc747f00874330bd" + integrity sha512-d+GTpOim1dNpuGH0cgy6PIJ9BiUTUtq61stnb0UxPrSHJ5RpvLtp4OK+4gWJEKHs6rqRDuJenvoqiZOeBDTzEQ== dependencies: - "@project-serum/common" "^0.0.1-beta.3" - "@solana/web3.js" "^1.29.2" - "@strata-foundation/spl-utils" "^2.0.0" + "@solana/web3.js" "^1.32.0" + "@strata-foundation/spl-utils" "^3.0.0" bn.js "^5.2.0" semver "^7.3.5" -"@strata-foundation/spl-utils@^1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@strata-foundation/spl-utils/-/spl-utils-1.1.1.tgz#abbbdb7fe41d4f96c228c9281c6c3e36ad40ae50" - integrity sha512-8lrugxEHoK/+0prKaSl1a7/cRtAI/zmhr4Xq8TJZtSoaRbAYcI/Bk+Xqs89mQWBe+CAl7v1/TzB96T96UUJR4w== - dependencies: - "@metaplex/arweave-cost" "^2.0.0" - "@metaplex/js" "^4.3.0" - "@solana/spl-name-service" "^0.1.3" - "@solana/web3.js" "^1.30.2" - bs58 "^4.0.1" - tweetnacl "^1.0.3" - -"@strata-foundation/spl-utils@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@strata-foundation/spl-utils/-/spl-utils-2.0.0.tgz#35ea2596e1a0f95d7eaed1ac60a1c137a1bb267f" - integrity sha512-e/WYg5eA1eJQRspKi5CB3ViulSbEwX5CotQHHMA97bljFMcUCN4sJce+HkaVCiX36gCA/NxW4OBwQ6PBEDpXGw== +"@strata-foundation/spl-utils@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@strata-foundation/spl-utils/-/spl-utils-3.0.0.tgz#fd1c3eb219c6e05c391b90058ddcdd8b431e2d71" + integrity sha512-QnTqFKrbauMjSYPuOL7FtE2mltBQiKz7SgtEuFYWez6nEWotaZAa5B8KGn+8dmINmVvqTZCcrdBalsHKRN3LTQ== dependencies: - "@metaplex/arweave-cost" "^2.0.0" + "@metaplex-foundation/mpl-token-metadata" "^1.1.0" + "@metaplex/arweave-cost" "1.0.4" "@metaplex/js" "^4.3.0" "@solana/spl-name-service" "^0.1.3" - "@solana/web3.js" "^1.30.2" + "@solana/web3.js" "^1.32.0" bs58 "^4.0.1" + superstruct "0.14.2" tweetnacl "^1.0.3" "@tsconfig/node10@^1.0.7": @@ -944,7 +911,7 @@ buffer-indexof@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== -buffer-layout@^1.2.0: +buffer-layout@^1.2.0, buffer-layout@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/buffer-layout/-/buffer-layout-1.2.2.tgz#b9814e7c7235783085f9ca4966a0cfff112259d5" integrity sha512-kWSuLN694+KTk8SrYvCqwP2WcgQjoRCiF5b4QDvkkz8EmgD+aWAIceGFKMIAdmF/pH+vpgNV3d3kAKorcdAmWA== @@ -3782,15 +3749,7 @@ strip-json-comments@3.1.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -superstruct@0.8.3: - version "0.8.3" - resolved "https://registry.yarnpkg.com/superstruct/-/superstruct-0.8.3.tgz#fb4d8901aca3bf9f79afab1bbab7a7f335cc4ef2" - integrity sha512-LbtbFpktW1FcwxVIJlxdk7bCyBq/GzOx2FSFLRLTUhWIA1gHkYPIl3aXRG5mBdGZtnPNT6t+4eEcLDCMOuBHww== - dependencies: - kind-of "^6.0.2" - tiny-invariant "^1.0.6" - -superstruct@^0.14.2: +superstruct@0.14.2, superstruct@^0.14.2: version "0.14.2" resolved "https://registry.yarnpkg.com/superstruct/-/superstruct-0.14.2.tgz#0dbcdf3d83676588828f1cf5ed35cda02f59025b" integrity sha512-nPewA6m9mR3d6k7WkZ8N8zpTWfenFH3q9pA2PkuiZxINr9DKB2+40wEQf0ixn8VaGuJ78AB6iWOtStI+/4FKZQ== @@ -3839,11 +3798,6 @@ thunky@^1.0.2: resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== -tiny-invariant@^1.0.6: - version "1.1.0" - resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.1.0.tgz#634c5f8efdc27714b7f386c35e6760991d230875" - integrity sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw== - tiny-secp256k1@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/tiny-secp256k1/-/tiny-secp256k1-1.1.6.tgz#7e224d2bee8ab8283f284e40e6b4acb74ffe047c" From 188642e59e3687de249176e44c28f4833d675d34 Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Thu, 27 Jan 2022 10:57:15 -0600 Subject: [PATCH 13/14] Don't use confirmed transaction because it has serialization inconsistencies --- src/event-transformer/index.ts | 24 ++++++--------- .../transformers/InstructionTransformer.ts | 8 ++--- .../transformers/Transformer.ts | 4 +-- .../transformers/anchorProgram.ts | 4 +-- .../transformers/programSpec.ts | 6 ++-- .../transformers/tokenAccounts.ts | 29 ++++++++++++------- src/kafka-signature-processor.ts | 8 ++--- .../event-transformer/transformers/anchor.ts | 6 ++-- 8 files changed, 43 insertions(+), 46 deletions(-) diff --git a/src/event-transformer/index.ts b/src/event-transformer/index.ts index 4de9b0c..8205628 100644 --- a/src/event-transformer/index.ts +++ b/src/event-transformer/index.ts @@ -1,6 +1,6 @@ import "./borsh"; import { Program, Provider, Wallet as NodeWallet } from "@project-serum/anchor"; -import { BlockResponse, ConfirmedTransaction, Keypair, PublicKey, Transaction } from "@solana/web3.js"; +import { BlockResponse, ConfirmedTransaction, Keypair, PublicKey, Transaction, TransactionResponse } from "@solana/web3.js"; import BN from "bn.js"; import { Message as KafkaMessage, Producer, TopicMessages } from "kafkajs"; import { kafka } from "../setup/kafka"; @@ -27,11 +27,8 @@ function hasIntersect(set1: Set, set2: Set): boolean { return [...set1].some(x => set2.has(x)); } -function processTxn(transformers: Transformer[], txn: ConfirmedTransaction): KafkaMessage[] { - const accounts = txn.transaction.compileMessage().accountKeys.map((key) => ( - // @ts-ignore - new PublicKey(new BN(key._bn, 'hex')) - )); +function processTxn(transformers: Transformer[], txn: TransactionResponse & { signature: string }): KafkaMessage[] { + const accounts = txn.transaction.message.accountKeys.map(k => new PublicKey(k)); const accountsSet = new Set(accounts.map(a => a.toBase58())); return transformers @@ -43,8 +40,9 @@ function processTxn(transformers: Transformer[], txn: ConfirmedTransaction): Kaf type, payload, slot: txn.slot, - recentBlockhash: txn.transaction.recentBlockhash, - blockTime: txn.blockTime + recentBlockhash: txn.transaction.message.recentBlockhash, + blockTime: txn.blockTime, + signatures: txn.signature } }) .map((item: any) => ({ @@ -122,13 +120,9 @@ async function run() { const results = (await Promise.all( messages .map((message: any) => JSON.parse(message.value!.toString())) - .filter(txn => txn.transaction) - .map(txn => ({ - ...txn, - transaction: Transaction.from(txn.transaction) - })) - .filter((txn: ConfirmedTransaction) => !txn.meta?.err) - .flatMap((txn: ConfirmedTransaction) => processTxn(transformers, txn)) + .filter(txn => txn as TransactionResponse & { signature: string }) + .filter((txn: TransactionResponse & { signature: string }) => !txn.meta?.err) + .flatMap((txn: TransactionResponse & { signature: string }) => processTxn(transformers, txn)) )).flat() console.log(`Sending batch of ${results.length} events`) await publishFixedBatches(producer, { diff --git a/src/event-transformer/transformers/InstructionTransformer.ts b/src/event-transformer/transformers/InstructionTransformer.ts index bd77df1..7446a54 100644 --- a/src/event-transformer/transformers/InstructionTransformer.ts +++ b/src/event-transformer/transformers/InstructionTransformer.ts @@ -1,11 +1,11 @@ -import { CompiledInstruction, ConfirmedTransaction, ConfirmedTransactionMeta, Message, PublicKey, Transaction, TransactionInstruction } from "@solana/web3.js"; +import { CompiledInstruction, ConfirmedTransaction, ConfirmedTransactionMeta, Message, PublicKey, Transaction, TransactionInstruction, TransactionResponse } from "@solana/web3.js"; import { BlockTransaction, TransformedMessage, Transformer } from "./Transformer"; export abstract class InstructionTransformer implements Transformer { abstract get relevantKeys(): Set; - transform(accountKeys: PublicKey[], transaction: ConfirmedTransaction): TransformedMessage[] { - const indexedNormalInstrs = transaction.transaction.compileMessage().instructions + transform(accountKeys: PublicKey[], transaction: TransactionResponse & { signature: string }): TransformedMessage[] { + const indexedNormalInstrs = transaction.transaction.message.instructions .map((instruction, index) => ({ instruction, instructionIndex: index, innerInstructionIndex: null })) const indexedInnerInstrs = (transaction.meta?.innerInstructions || []) .flatMap((innerInstruction) => @@ -23,5 +23,5 @@ export abstract class InstructionTransformer implements Transformer { ) } - abstract transformInstruction(accountKeys: PublicKey[], transaction: ConfirmedTransaction, instruction: CompiledInstruction): TransformedMessage[] + abstract transformInstruction(accountKeys: PublicKey[], transaction: TransactionResponse & { signature: string }, instruction: CompiledInstruction): TransformedMessage[] } \ No newline at end of file diff --git a/src/event-transformer/transformers/Transformer.ts b/src/event-transformer/transformers/Transformer.ts index 19a4d10..281ccbf 100644 --- a/src/event-transformer/transformers/Transformer.ts +++ b/src/event-transformer/transformers/Transformer.ts @@ -1,4 +1,4 @@ -import { ConfirmedTransaction, ConfirmedTransactionMeta, Message, PublicKey } from "@solana/web3.js"; +import { ConfirmedTransaction, ConfirmedTransactionMeta, Message, PublicKey, TransactionResponse } from "@solana/web3.js"; export type BlockTransaction = { transaction: { @@ -14,5 +14,5 @@ export interface TransformedMessage { export interface Transformer { get relevantKeys(): Set - transform(accountKeys: PublicKey[], transaction: ConfirmedTransaction): TransformedMessage[] + transform(accountKeys: PublicKey[], transaction: TransactionResponse & { signature: string }): TransformedMessage[] } \ No newline at end of file diff --git a/src/event-transformer/transformers/anchorProgram.ts b/src/event-transformer/transformers/anchorProgram.ts index 451e2d0..fcdda64 100644 --- a/src/event-transformer/transformers/anchorProgram.ts +++ b/src/event-transformer/transformers/anchorProgram.ts @@ -1,4 +1,4 @@ -import { Message, PublicKey, TokenBalance, MessageArgs, CompiledInstruction, ConfirmedTransaction } from "@solana/web3.js"; +import { Message, PublicKey, TokenBalance, MessageArgs, CompiledInstruction, ConfirmedTransaction, TransactionResponse } from "@solana/web3.js"; import { BlockTransaction, Transformer } from "./Transformer"; import { BinaryReader, deserializeUnchecked, baseDecode } from "borsh"; import BN from "bn.js"; @@ -70,7 +70,7 @@ export default class AnchorProgramTransformer extends InstructionTransformer { }, {} as Record) } - transformInstruction(accountKeys: PublicKey[], transaction: ConfirmedTransaction, instruction: CompiledInstruction | CompiledInstruction): any[] { + transformInstruction(accountKeys: PublicKey[], transaction: TransactionResponse & { signature: string }, instruction: CompiledInstruction | CompiledInstruction): any[] { const programId = accountKeys[instruction.programIdIndex].toBase58(); const ixData = bs58.decode(instruction.data); let codedInstruction = this.coder.instruction.decode(ixData); diff --git a/src/event-transformer/transformers/programSpec.ts b/src/event-transformer/transformers/programSpec.ts index 493f86a..eb281f6 100644 --- a/src/event-transformer/transformers/programSpec.ts +++ b/src/event-transformer/transformers/programSpec.ts @@ -1,4 +1,4 @@ -import { CompiledInstruction, ConfirmedTransaction, PublicKey, TokenBalance } from "@solana/web3.js"; +import { CompiledInstruction, ConfirmedTransaction, PublicKey, TokenBalance, TransactionResponse } from "@solana/web3.js"; import { BlockTransaction, Transformer } from "./Transformer"; import { BinaryReader, deserializeUnchecked, baseDecode } from "borsh"; import BN from "bn.js"; @@ -60,7 +60,7 @@ export default class ProgramSpecTransformer extends InstructionTransformer { return pids; } - transformInstruction(accountKeys: PublicKey[], transaction: ConfirmedTransaction, instruction: CompiledInstruction): any[] { + transformInstruction(accountKeys: PublicKey[], transaction: TransactionResponse & { signature: string }, instruction: CompiledInstruction): any[] { try { const index = instruction.data.length == 0 ? 0 : new BinaryReader(baseDecode(instruction.data)).readU8(); const programId = accountKeys[instruction.programIdIndex].toBase58() @@ -85,7 +85,7 @@ export default class ProgramSpecTransformer extends InstructionTransformer { }] } } catch (e: any) { - console.log(`Failed to process ${transaction.transaction.signature}`); + console.log(`Failed to process ${transaction.signature}`); console.error(e); } diff --git a/src/event-transformer/transformers/tokenAccounts.ts b/src/event-transformer/transformers/tokenAccounts.ts index 38a7db1..3871cec 100644 --- a/src/event-transformer/transformers/tokenAccounts.ts +++ b/src/event-transformer/transformers/tokenAccounts.ts @@ -1,6 +1,5 @@ -import { ConfirmedTransaction, PublicKey, TokenBalance } from "@solana/web3.js"; -import { BlockTransaction, Transformer } from "./Transformer"; -import BN from "bn.js"; +import { PublicKey, TokenBalance, TransactionResponse } from "@solana/web3.js"; +import { Transformer } from "./Transformer"; // Zip two arrays by some key. Output array of length 2 arrays that are each object with its pair (or undefined) function zipBy(a: A[], b: A[], getKey: (a: A) => B): (A | undefined)[][] { @@ -16,13 +15,21 @@ function zipBy(a: A[], b: A[], getKey: (a: A) => B): (A | undefined)[][] { return [...keys].map(key => [aMap.get(key), bMap.get(key)]) } +type PubkeyAmount = { mint: string, amount: string, pubkey: PublicKey, decimals: number }; + +function groupByPubkey(acc: Record, record: PubkeyAmount): Record { + acc[record.pubkey.toBase58()] = record; + + return acc; +} + export default class TokenAccountTransformer implements Transformer { get relevantKeys(): Set { return new Set(["TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"]) } - transform(accountKeys: PublicKey[], transaction: ConfirmedTransaction): any[] { - function toPubkeyAmount({ accountIndex, mint, uiTokenAmount: { decimals, amount } }: TokenBalance) { + transform(accountKeys: PublicKey[], transaction: TransactionResponse & { signature: string }): any[] { + function toPubkeyAmount({ accountIndex, mint, uiTokenAmount: { decimals, amount } }: TokenBalance): PubkeyAmount { return { mint, pubkey: accountKeys[accountIndex], @@ -30,15 +37,17 @@ export default class TokenAccountTransformer implements Transformer { decimals } }; - const preBalances = transaction.meta?.preTokenBalances?.map(toPubkeyAmount); - const postBalances = transaction.meta?.postTokenBalances?.map(toPubkeyAmount); + const preBalances = transaction.meta?.preTokenBalances?.map(toPubkeyAmount)?.reduce(groupByPubkey, {} as Record); + const postBalances = transaction.meta?.postTokenBalances?.map(toPubkeyAmount)?.reduce(groupByPubkey, {} as Record); + const keys = new Set([...Object.keys(preBalances || {}), ...Object.keys(postBalances || {})]) const emptyItem = { pubkey: null, amount: null, mint: null, decimals: null } - const zipped = zipBy(preBalances || [], postBalances || [], i => i.pubkey.toBase58()) - const rawEvents = zipped.map(([preItem = emptyItem, postItem = emptyItem]) => { + const rawEvents = [...keys].map((key) => { + const preItem = (preBalances && preBalances[key]) || emptyItem; + const postItem = (postBalances && postBalances[key]) || emptyItem; return { type: "TokenAccountBalanceChange", // @ts-ignore - pubkey: new PublicKey(new BN((preItem.pubkey || postItem.pubkey)!._bn, 'hex')).toBase58(), + pubkey: (preItem.pubkey || postItem.pubkey).toBase58(), preAmount: preItem.amount || 0, mint: preItem.mint || postItem.mint, postAmount: postItem.amount, diff --git a/src/kafka-signature-processor.ts b/src/kafka-signature-processor.ts index 7bdf3aa..5b82442 100644 --- a/src/kafka-signature-processor.ts +++ b/src/kafka-signature-processor.ts @@ -9,15 +9,11 @@ const { KAFKA_TOPIC, KAFKA_INPUT_TOPIC, KAFKA_GROUP_ID } = process.env const producer = kafka.producer() async function processSignature(signature: ConfirmedSignatureInfo): Promise { - const txn = await connection.getConfirmedTransaction(signature.signature, FINALITY); + const txn = await connection.getTransaction(signature.signature, { commitment: FINALITY }); try { - const data = txn?.transaction.serialize({ - requireAllSignatures: false, - verifySignatures: false - }).toJSON().data const value = JSON.stringify({ ...txn, - transaction: data + signature: signature.signature.toString() }) const size = Buffer.byteLength(value); if (size > 500000) { diff --git a/tests/event-transformer/transformers/anchor.ts b/tests/event-transformer/transformers/anchor.ts index 31deaae..8187efc 100644 --- a/tests/event-transformer/transformers/anchor.ts +++ b/tests/event-transformer/transformers/anchor.ts @@ -13,8 +13,7 @@ describe("anchor-transformer", () => { const block: BlockResponse & { slot: number } = blockResp as any; const mapped: any = block.transactions.flatMap(txn => { const accounts = txn.transaction.message.accountKeys.map((key) => ( - // @ts-ignore - new PublicKey(new BN(key._bn, 'hex')) + new PublicKey(key) )); return transformer.transform(accounts, txn) @@ -34,8 +33,7 @@ describe("anchor-transformer", () => { const block: BlockResponse & { slot: number } = wumboBlockResp as any; const mapped: any = block.transactions.flatMap(txn => { const accounts = txn.transaction.message.accountKeys.map((key) => ( - // @ts-ignore - new PublicKey(new BN(key._bn, 'hex')) + new PublicKey(key) )); return transformer.transform(accounts, txn) From 5d19654b02e9a82cb59ff35d889e45c8f5d6acfa Mon Sep 17 00:00:00 2001 From: Chewing Glass Date: Thu, 27 Jan 2022 15:55:40 -0600 Subject: [PATCH 14/14] More fixes --- src/event-transformer/index.ts | 4 ++++ src/leaderboard/index.ts | 18 ++++++++++++------ 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/event-transformer/index.ts b/src/event-transformer/index.ts index 8205628..b1c7e2d 100644 --- a/src/event-transformer/index.ts +++ b/src/event-transformer/index.ts @@ -28,6 +28,10 @@ function hasIntersect(set1: Set, set2: Set): boolean { } function processTxn(transformers: Transformer[], txn: TransactionResponse & { signature: string }): KafkaMessage[] { + if (!txn.transaction || !txn.transaction.message) { + return [] + } + const accounts = txn.transaction.message.accountKeys.map(k => new PublicKey(k)); const accountsSet = new Set(accounts.map(a => a.toBase58())); diff --git a/src/leaderboard/index.ts b/src/leaderboard/index.ts index 55e45b4..aa1829e 100644 --- a/src/leaderboard/index.ts +++ b/src/leaderboard/index.ts @@ -16,7 +16,7 @@ async function totalWumNetWorthPlugin(payload: EachBatchPayload) { async function accountPlugin(payload: EachBatchPayload) { const { batch: { messages } } = payload; - const batch = redisClient.batch() + let batch = redisClient.batch() const balanceChangeMessages = messages .map(m => ({ ...JSON.parse(m.value!.toString()), account: m.key })) @@ -41,13 +41,19 @@ async function accountPlugin(payload: EachBatchPayload) { const balanceChanges: any[] = keyAndValue[1]; const zeroes = balanceChanges.filter(change => change.tokenAmount === 0) const positives = balanceChanges.filter(change => change.tokenAmount !== 0) - const scoresAndValues = positives.flatMap((balanceChange: any) => { - return [Number(balanceChange.tokenAmount), balanceChange.account] - }) + const scoresAndValues = positives + .flatMap((balanceChange: any) => { + return [Number(balanceChange.tokenAmount), balanceChange.account] + }) // @ts-ignore const key = `accounts-by-balance-${tokenBonding}`; - batch.zadd(key, 'CH', ...scoresAndValues); - batch.zrem(key, ...zeroes.map(z => z.account)); + if (scoresAndValues.length > 0) { + batch = batch.zadd(key, 'CH', ...scoresAndValues); + } + if (zeroes.length > 0) { + console.log(`Removing ${zeroes.length} zeroes`) + batch = batch.zrem(key, ...zeroes.map(z => z.account)); + } }); const result = await promisify(batch.exec).bind(batch)(); const numChanged = result.reduce((a, b) => a + b, 0);