Created
November 12, 2025 22:02
-
-
Save SgtPooki/f0cce2d9d0aa5642f6144c426a718db0 to your computer and use it in GitHub Desktop.
create filecoin-pin empty datasets for all approved SPs
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env tsx | |
| /** | |
| * Create empty datasets for each approved provider on the given network | |
| * | |
| * Usage: | |
| * PRIVATE_KEY=0x... tsx debug/create-datasets-for-all-providers.ts [--mainnet] | |
| * | |
| * This script will: | |
| * 1. Connect to the network (default: Calibration testnet, use --mainnet for mainnet) | |
| * 2. Get all approved providers from the storage service | |
| * 3. Create an empty dataset for each provider using the PDP API directly | |
| * 4. Output the dataset ID and provider information for each | |
| * | |
| * Datasets are created on-chain immediately with the correct metadata: | |
| * - withIPFSIndexing: '' (enables IPFS indexing) | |
| * - source: 'filecoin-pin' (identifies the source application) | |
| * - erc8004Files: '' (ERC-8004 metadata) | |
| */ | |
| import { calibration, mainnet } from '@filoz/synapse-core/chains' | |
| import * as warmStorage from '@filoz/synapse-core/warm-storage' | |
| import * as SP from '@filoz/synapse-core/sp' | |
| import { RPC_URLS } from '@filoz/synapse-sdk' | |
| import { createWalletClient, http, type Address } from 'viem' | |
| import { privateKeyToAccount } from 'viem/accounts' | |
| import pino from 'pino' | |
| import { cleanupSynapseService, initializeSynapse } from '../src/core/synapse/index.js' | |
| import { DEFAULT_DATA_SET_METADATA } from '../src/core/synapse/constants.js' | |
| const logger = pino({ | |
| level: 'info', | |
| }) | |
| async function main() { | |
| // Parse command line arguments | |
| const args = process.argv.slice(2) | |
| const isMainnet = args.includes('--mainnet') | |
| // Determine network and RPC URL | |
| const network = isMainnet ? 'mainnet' : 'calibration' | |
| const rpcUrl = isMainnet ? RPC_URLS.mainnet.websocket : RPC_URLS.calibration.websocket | |
| // Get private key from environment | |
| const privateKey = process.env.PRIVATE_KEY | |
| if (!privateKey) { | |
| logger.error('Missing required environment variable:') | |
| logger.error(' PRIVATE_KEY - Your wallet private key') | |
| process.exit(1) | |
| } | |
| logger.info(`Starting dataset creation for all approved providers on ${network}...`) | |
| logger.info({ network, rpcUrl }, 'Configuration') | |
| let synapse: Awaited<ReturnType<typeof initializeSynapse>> | null = null | |
| try { | |
| // Step 1: Initialize Synapse | |
| logger.info('Step 1: Initializing Synapse...') | |
| synapse = await initializeSynapse( | |
| { | |
| privateKey, | |
| rpcUrl, | |
| }, | |
| logger | |
| ) | |
| const networkName = synapse.getNetwork() | |
| logger.info({ network: networkName }, '✓ Synapse initialized successfully') | |
| // Step 2: Get all approved providers | |
| logger.info('Step 2: Fetching approved providers...') | |
| const storageInfo = await synapse.storage.getStorageInfo() | |
| const providers = storageInfo.providers ?? [] | |
| if (providers.length === 0) { | |
| logger.error('No approved providers found on this network') | |
| process.exit(1) | |
| } | |
| logger.info({ providerCount: providers.length }, `✓ Found ${providers.length} approved provider(s)`) | |
| // Prepare metadata: merge DEFAULT_DATA_SET_METADATA with erc8004Files | |
| const metadata = { | |
| ...DEFAULT_DATA_SET_METADATA, | |
| erc8004Files: '', | |
| } | |
| logger.info({ metadata }, 'Dataset metadata prepared') | |
| logger.info( | |
| { metadataKeys: Object.keys(metadata) }, | |
| `Metadata keys: ${Object.keys(metadata).join(', ')}` | |
| ) | |
| // Get payer address from synapse | |
| const client = synapse.getClient() | |
| const payer = (await client.getAddress()) as Address | |
| // Create viem wallet client from private key | |
| // Use the chain's HTTP RPC URL (warm-storage works with HTTP, not WebSocket) | |
| const chain = isMainnet ? mainnet : calibration | |
| const account = privateKeyToAccount(privateKey as `0x${string}`) | |
| // Get HTTP RPC URL from chain (fallback to converting websocket URL if needed) | |
| const httpRpcUrl = chain.rpcUrls.default.http[0] ?? rpcUrl.replace(/^wss?:\/\//, 'https://') | |
| const walletClient = createWalletClient({ | |
| account, | |
| chain, | |
| transport: http(httpRpcUrl), | |
| }) | |
| logger.info({ payer, chainId: chain.id, network: chain.name, rpcUrl: httpRpcUrl }, 'Client configuration') | |
| // Step 3: Create empty dataset for each provider using PDP API directly | |
| logger.info('Step 3: Creating empty datasets for each provider using PDP API...') | |
| const results: Array<{ | |
| providerId: number | |
| providerName: string | |
| providerAddress: string | |
| dataSetId: number | undefined | |
| success: boolean | |
| error?: string | |
| }> = [] | |
| for (let i = 0; i < providers.length; i++) { | |
| const provider = providers[i] | |
| logger.info( | |
| { providerId: provider.id, providerName: provider.name, index: i + 1, total: providers.length }, | |
| `Creating empty dataset for provider: ${provider.name || provider.serviceProvider}` | |
| ) | |
| try { | |
| // Check if provider has PDP service URL | |
| const pdpServiceURL = provider.products?.PDP?.data?.serviceURL | |
| if (!pdpServiceURL) { | |
| throw new Error(`Provider ${provider.id} does not have a PDP service URL`) | |
| } | |
| const payee = (provider.payee || provider.serviceProvider) as Address | |
| logger.debug({ pdpServiceURL, payee }, 'Provider PDP service configuration') | |
| // Create empty dataset using warm-storage createDataSet | |
| // This creates the dataset on-chain immediately with the specified metadata | |
| logger.debug('Calling warm-storage createDataSet...') | |
| const createResult = await warmStorage.createDataSet(walletClient, { | |
| payee, | |
| payer, | |
| endpoint: pdpServiceURL, | |
| cdn: false, | |
| metadata, | |
| }) | |
| logger.info( | |
| { txHash: createResult.txHash, statusUrl: createResult.statusUrl }, | |
| 'Dataset creation transaction submitted' | |
| ) | |
| // Poll for dataset creation status | |
| logger.debug('Polling for dataset creation status...') | |
| const statusResult = await SP.pollForDataSetCreationStatus({ | |
| statusUrl: createResult.statusUrl, | |
| }) | |
| if (!statusResult.dataSetCreated || statusResult.dataSetId == null) { | |
| throw new Error( | |
| `Dataset creation failed or not confirmed. Status: ${statusResult.txStatus}, OK: ${statusResult.ok}` | |
| ) | |
| } | |
| const dataSetId = statusResult.dataSetId | |
| logger.info( | |
| { | |
| providerId: provider.id, | |
| providerName: provider.name || provider.serviceProvider, | |
| dataSetId, | |
| txHash: createResult.txHash, | |
| }, | |
| `✓ Empty dataset created for provider: ${provider.name || provider.serviceProvider} (ID: ${dataSetId})` | |
| ) | |
| results.push({ | |
| providerId: provider.id, | |
| providerName: provider.name || provider.serviceProvider, | |
| providerAddress: provider.serviceProvider, | |
| dataSetId, | |
| success: true, | |
| }) | |
| } catch (error) { | |
| const errorMessage = error instanceof Error ? error.message : String(error) | |
| logger.error( | |
| { | |
| providerId: provider.id, | |
| providerName: provider.name || provider.serviceProvider, | |
| error: errorMessage, | |
| }, | |
| `✗ Failed to create dataset for provider: ${provider.name || provider.serviceProvider}` | |
| ) | |
| results.push({ | |
| providerId: provider.id, | |
| providerName: provider.name || provider.serviceProvider, | |
| providerAddress: provider.serviceProvider, | |
| dataSetId: undefined, | |
| success: false, | |
| error: errorMessage, | |
| }) | |
| } | |
| } | |
| // Step 4: Print summary | |
| logger.info('') | |
| logger.info('━━━ Summary ━━━') | |
| const successful = results.filter((r) => r.success) | |
| const failed = results.filter((r) => !r.success) | |
| logger.info({ total: results.length, successful: successful.length, failed: failed.length }, 'Results') | |
| logger.info('') | |
| logger.info('Successfully created datasets:') | |
| for (const result of successful) { | |
| logger.info( | |
| { | |
| providerId: result.providerId, | |
| providerName: result.providerName, | |
| dataSetId: result.dataSetId, | |
| }, | |
| ` ✓ Provider ${result.providerId} (${result.providerName}): Dataset #${result.dataSetId}` | |
| ) | |
| } | |
| if (failed.length > 0) { | |
| logger.info('') | |
| logger.info('Failed datasets:') | |
| for (const result of failed) { | |
| logger.info( | |
| { | |
| providerId: result.providerId, | |
| providerName: result.providerName, | |
| error: result.error, | |
| }, | |
| ` ✗ Provider ${result.providerId} (${result.providerName}): ${result.error}` | |
| ) | |
| } | |
| } | |
| logger.info('') | |
| logger.info('━━━ Complete ━━━') | |
| process.exit(failed.length > 0 ? 1 : 0) | |
| } catch (error) { | |
| logger.error({ error: error instanceof Error ? error.message : String(error) }, 'Failed to create datasets') | |
| if (error instanceof Error && error.stack) { | |
| logger.error(error.stack) | |
| } | |
| process.exit(1) | |
| } finally { | |
| // Clean up resources | |
| if (synapse) { | |
| await cleanupSynapseService() | |
| } | |
| } | |
| } | |
| main() | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment