diff --git a/src/@types/C2D.ts b/src/@types/C2D.ts new file mode 100644 index 000000000..ba6364e20 --- /dev/null +++ b/src/@types/C2D.ts @@ -0,0 +1,32 @@ +import type { MetadataAlgorithm } from './DDO/Metadata.js' +import type { Command } from './commands.js' +export interface Compute { + env: string // with hash + validUntil: number +} + +export interface ComputeAsset { + url?: string + documentId: string + serviceId: string + transferTxId?: string + userdata?: { [key: string]: any } +} + +export interface ComputeAlgorithm { + documentId?: string + serviceId?: string + url?: string + meta?: MetadataAlgorithm + transferTxId?: string + algocustomdata?: { [key: string]: any } + userdata?: { [key: string]: any } +} + +export interface InitializeComputeCommand extends Command { + datasets: [ComputeAsset] + algorithm: ComputeAlgorithm + compute: Compute + consumerAddress: string + chainId: number +} diff --git a/src/@types/DDO/Metadata.ts b/src/@types/DDO/Metadata.ts index 49571a834..1eb513558 100644 --- a/src/@types/DDO/Metadata.ts +++ b/src/@types/DDO/Metadata.ts @@ -23,7 +23,7 @@ export interface MetadataAlgorithm { * Object describing the Docker container image. * @type {Object} */ - container: { + container?: { /** * The command to execute, or script to run inside the Docker image. * @type {string} diff --git a/src/components/core/compute.ts b/src/components/core/compute.ts index 3ad31ae1a..6edb615fc 100644 --- a/src/components/core/compute.ts +++ b/src/components/core/compute.ts @@ -3,14 +3,18 @@ import { P2PCommandResponse } from '../../@types' import { CORE_LOGGER } from '../../utils/logging/common.js' import { Handler } from './handler.js' import { GetEnvironmentsCommand } from '../../@types/commands.js' +import { InitializeComputeCommand } from '../../@types/C2D' import { getConfiguration } from '../../utils/config.js' +import { PROTOCOL_COMMANDS } from '../../utils/constants.js' +import { streamToString } from '../../utils/util.js' import axios from 'axios' +import { validateProviderFeesForDatasets } from './utils/initializeCompute.js' export class GetEnvironmentsHandler extends Handler { async handle(task: GetEnvironmentsCommand): Promise { try { CORE_LOGGER.logMessage( - 'File Info Request recieved with arguments: ' + JSON.stringify(task, null, 2), + 'Get C2D Envs Request recieved with arguments: ' + JSON.stringify(task, null, 2), true ) const response: any[] = [] @@ -53,3 +57,85 @@ export class GetEnvironmentsHandler extends Handler { } } } + +export class InitializeComputeHandler extends Handler { + validateTimestamp(value: number) { + // in miliseconds + const timestampNow = new Date().getTime() / 1000 + const validUntil = new Date(value).getTime() / 1000 + + return validUntil > timestampNow + } + + checksC2DEnv(computeEnv: string, c2dEnvsWithHash: any[]): boolean { + for (const c of c2dEnvsWithHash) { + if (c.id === computeEnv) { + return true + } + } + return false + } + + async handle(task: InitializeComputeCommand): Promise { + try { + CORE_LOGGER.logMessage( + 'Initialize Compute Request recieved with arguments: ' + + JSON.stringify(task, null, 2), + true + ) + + const { validUntil } = task.compute + if (!this.validateTimestamp(validUntil)) { + const errorMsg = `Error validating validUntil ${validUntil}. It is not in the future.` + CORE_LOGGER.error(errorMsg) + return { + stream: null, + status: { + httpStatus: 400, + error: errorMsg + } + } + } + + const c2dEnvTask: GetEnvironmentsCommand = { + chainId: task.chainId, + command: PROTOCOL_COMMANDS.GET_COMPUTE_ENVIRONMENTS + } + + const req = await new GetEnvironmentsHandler(this.getOceanNode()).handle(c2dEnvTask) + + const resp = await streamToString(req.stream as Readable) + const c2dEnvs = JSON.parse(resp) + + if (!this.checksC2DEnv(task.compute.env, c2dEnvs)) { + const errorMsg = `Compute env was not found.` + CORE_LOGGER.error(errorMsg) + return { + stream: null, + status: { + httpStatus: 400, + error: errorMsg + } + } + } + return await validateProviderFeesForDatasets( + this.getOceanNode(), + task.datasets, + task.algorithm, + task.chainId, + task.compute.env, + task.compute.validUntil, + task.consumerAddress + ) + } catch (error) { + CORE_LOGGER.error(error.message) + return { + stream: null, + status: { + httpStatus: 500, + error: error.message + } + } + } + } +} diff --git a/src/components/core/coreHandlersRegistry.ts b/src/components/core/coreHandlersRegistry.ts index b5feb4663..bc9a5ca1e 100644 --- a/src/components/core/coreHandlersRegistry.ts +++ b/src/components/core/coreHandlersRegistry.ts @@ -18,7 +18,7 @@ import { StatusHandler } from './statusHandler.js' import { ReindexHandler } from './reindexHandler.js' import { OceanNode } from '../../OceanNode.js' import { Command } from '../../@types/commands.js' -import { GetEnvironmentsHandler } from './compute.js' +import { GetEnvironmentsHandler, InitializeComputeHandler } from './compute.js' export type HandlerRegistry = { handlerName: string // name of the handler @@ -74,6 +74,10 @@ export class CoreHandlersRegistry { PROTOCOL_COMMANDS.GET_COMPUTE_ENVIRONMENTS, new GetEnvironmentsHandler(node) ) + this.registerCoreHandler( + PROTOCOL_COMMANDS.INITIALIZE_COMPUTE, + new InitializeComputeHandler(node) + ) } public static getInstance(node: OceanNode): CoreHandlersRegistry { diff --git a/src/components/core/utils/feesHandler.ts b/src/components/core/utils/feesHandler.ts index 6557d0f26..4aa7606f4 100644 --- a/src/components/core/utils/feesHandler.ts +++ b/src/components/core/utils/feesHandler.ts @@ -14,11 +14,11 @@ import { verifyMessage } from '../../../utils/blockchain.js' import { getConfiguration } from '../../../utils/config.js' import { CORE_LOGGER } from '../../../utils/logging/common.js' import { LOG_LEVELS_STR } from '../../../utils/logging/Logger.js' -import { findEventByKey } from '../../Indexer/utils.js' import axios from 'axios' import { getOceanArtifactsAdresses } from '../../../utils/address.js' import ERC20Template from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC20TemplateEnterprise.sol/ERC20TemplateEnterprise.json' assert { type: 'json' } import { C2DClusterInfo } from '../../../@types' +import { verifyComputeProviderFees } from '../validateTransaction.js' export async function getC2DEnvs(asset: DDO): Promise> { try { @@ -159,56 +159,47 @@ export async function calculateComputeProviderFee( return providerFee } -async function getEventData( - provider: JsonRpcApiProvider, - transactionHash: string, - abi: any -): Promise { - const iface = new ethers.Interface(abi) - const receipt = await provider.getTransactionReceipt(transactionHash) - const eventObj = { - topics: receipt.logs[0].topics as string[], - data: receipt.logs[0].data - } - return iface.parseLog(eventObj) -} -// It will be used in #230 for initializeCompute export async function validateComputeProviderFee( provider: JsonRpcApiProvider, tx: string, computeEnv: string, // with hash asset: DDO, service: Service, - validUntil: number + validUntil: number, + userAddress: string ): Promise<[boolean, ProviderFeeData | {}]> { - const txReceipt = await provider.getTransactionReceipt(tx) - const { logs } = txReceipt - const timestampNow = new Date().getTime() / 1000 - for (const log of logs) { - const event = findEventByKey(log.topics[0]) - if (event && event.type === 'ProviderFee') { - const decodedEventData = await getEventData(provider, tx, ERC20Template.abi) - const validUntilContract = parseInt(decodedEventData.args[7].toString()) - if (timestampNow >= validUntilContract) { - // provider fee expired -> reuse order - CORE_LOGGER.log( - LOG_LEVELS_STR.LEVEL_INFO, - `Provider fees for this env have expired -> reuse order.`, - true - ) - const envId = computeEnv.split('-')[1] - const newProviderFee = await calculateComputeProviderFee( - asset, - validUntil, - envId, - service, - provider - ) - return [false, newProviderFee] - } else { - return [true, {}] - } + try { + const timestampNow = new Date().getTime() / 1000 + const validationResult = await verifyComputeProviderFees( + tx, + userAddress, + provider, + timestampNow, + service.timeout + ) + + if (validationResult.isValid === false) { + // provider fee expired or tx id is not provided -> reuse order + CORE_LOGGER.log( + LOG_LEVELS_STR.LEVEL_INFO, + `${validationResult.message} -> create new provider fees.`, + true + ) + const regex = /[^-]*-(ocean-[^-]*)/ + const envId = computeEnv.match(regex)[1] + const newProviderFee = await calculateComputeProviderFee( + asset, + validUntil, + envId, + service, + provider + ) + return [false, newProviderFee] + } else { + return [true, validationResult.message] } + } catch (err) { + CORE_LOGGER.logMessage(`Validation for compute provider fees failed due to: ${err}`) } } // equiv to get_provider_fees diff --git a/src/components/core/utils/initializeCompute.ts b/src/components/core/utils/initializeCompute.ts new file mode 100644 index 000000000..99dfd8771 --- /dev/null +++ b/src/components/core/utils/initializeCompute.ts @@ -0,0 +1,92 @@ +import { P2PCommandResponse } from '../../../@types' +import { ComputeAlgorithm, ComputeAsset } from '../../../@types/C2D.js' +import { CORE_LOGGER } from '../../../utils/logging/common.js' +import { DDO } from '../../../@types/DDO/DDO.js' +import { getJsonRpcProvider } from '../../../utils/blockchain.js' +import { validateComputeProviderFee } from './feesHandler.js' +import { Readable } from 'stream' +import { OceanNode } from '../../../OceanNode' +import { Service } from '../../../@types/DDO/Service.js' + +export function getServiceById(ddo: DDO, serviceId: string): Service { + try { + return ddo.services.filter((service) => service.id === serviceId)[0] + } catch (err) { + CORE_LOGGER.error(`Service was not found: ${err}`) + } +} + +export async function validateProviderFeesForDatasets( + node: OceanNode, + datasets: [ComputeAsset], + algorithm: ComputeAlgorithm, + chainId: number, + env: string, + validUntil: number, + consumerAddress: string +): Promise { + const listOfAssest = [...datasets, ...[algorithm]] + const approvedParams: any = { + algorithm: {}, + datasets: [] + } + const provider = await getJsonRpcProvider(chainId) + + for (const asset of listOfAssest) { + try { + const ddo = (await node.getDatabase().ddo.retrieve(asset.documentId)) as DDO + if (ddo.id === algorithm.documentId) { + if (ddo.metadata.type !== 'algorithm') { + const errorMsg = `DID is not a valid algorithm` + CORE_LOGGER.error(errorMsg) + return { + stream: null, + status: { + httpStatus: 400, + error: errorMsg + } + } + } + } + const service = getServiceById(ddo, asset.serviceId) + + const resultValidation = await validateComputeProviderFee( + provider, + asset.transferTxId, + env, + ddo, + service, + validUntil, + consumerAddress + ) + if (ddo.metadata.type === 'algorithm') { + approvedParams.algorithm = { + datatoken: service.datatokenAddress, + providerFee: resultValidation[1], + validOrder: resultValidation[0] + } + } else { + approvedParams.datasets.push({ + datatoken: service.datatokenAddress, + providerFee: resultValidation[1], + validOrder: resultValidation[0] + }) + } + } catch (error) { + CORE_LOGGER.error(`Unable to get compute provider fees: ${error}`) + } + } + + const result = JSON.stringify(approvedParams, (key, value) => { + if (typeof value === 'bigint') { + return value.toString() + } + return value + }) + return { + stream: Readable.from(result), + status: { + httpStatus: 200 + } + } +} diff --git a/src/components/core/validateTransaction.ts b/src/components/core/validateTransaction.ts index 3d0024a3d..1c8262e72 100644 --- a/src/components/core/validateTransaction.ts +++ b/src/components/core/validateTransaction.ts @@ -1,4 +1,10 @@ -import { JsonRpcProvider, Contract, Interface, TransactionReceipt } from 'ethers' +import { + JsonRpcProvider, + JsonRpcApiProvider, + Contract, + Interface, + TransactionReceipt +} from 'ethers' import { fetchEventFromTransaction } from '../../utils/util.js' import ERC20Template from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC20TemplateEnterprise.sol/ERC20TemplateEnterprise.json' assert { type: 'json' } import ERC721Template from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC721Template.sol/ERC721Template.json' assert { type: 'json' } @@ -11,10 +17,10 @@ interface ValidateTransactionResponse { const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) -async function fetchTransactionReceipt( +export async function fetchTransactionReceipt( txId: string, - provider: JsonRpcProvider, - retries: number = 3 + provider: JsonRpcApiProvider, + retries: number = 2 ): Promise { while (retries > 0) { try { @@ -24,7 +30,7 @@ async function fetchTransactionReceipt( } if (retries > 1) { // If it's not the last retry, sleep before the next retry - await sleep(30000) + await sleep(1000) } retries-- } catch (error) { @@ -35,6 +41,65 @@ async function fetchTransactionReceipt( } } +export async function verifyComputeProviderFees( + txId: string, + userAddress: string, + provider: JsonRpcApiProvider, + timestampNow: number, + serviceTimeout: number +): Promise { + const contractInterface = new Interface(ERC20Template.abi) + const txReceiptMined = await fetchTransactionReceipt(txId, provider) + + if (!txReceiptMined) { + const errorMsg = `Tx receipt cannot be processed, because tx id ${txId} was not mined.` + CORE_LOGGER.logMessage(errorMsg) + return { + isValid: false, + message: errorMsg + } + } + + if (userAddress.toLowerCase() !== txReceiptMined.from.toLowerCase()) { + const errorMsg = 'User address does not match the sender of the transaction.' + CORE_LOGGER.logMessage(errorMsg) + return { + isValid: false, + message: errorMsg + } + } + + const eventTimestamp = (await provider.getBlock(txReceiptMined.blockHash)).timestamp + + const currentTimestamp = Math.floor(Date.now() / 1000) + + const timeElapsed = currentTimestamp - eventTimestamp + + if (serviceTimeout !== 0 && timeElapsed > serviceTimeout) { + return { + isValid: false, + message: 'The order has expired.' + } + } + const ProviderFeesEvent = fetchEventFromTransaction( + txReceiptMined, + 'ProviderFees', + contractInterface + ) + + const validUntilContract = parseInt(ProviderFeesEvent[0].args[7].toString()) + if (timestampNow >= validUntilContract) { + return { + isValid: false, + message: 'Provider fees for compute have expired.' + } + } + return { + isValid: true, + message: ProviderFeesEvent[0].args + } +} + export async function validateOrderTransaction( txId: string, userAddress: string, diff --git a/src/components/httpRoutes/compute.ts b/src/components/httpRoutes/compute.ts index a12f7f75a..3b5cfac7f 100644 --- a/src/components/httpRoutes/compute.ts +++ b/src/components/httpRoutes/compute.ts @@ -1,6 +1,6 @@ import express from 'express' -import { GetEnvironmentsHandler } from '../core/compute.js' -import { streamToObject } from '../../utils/util.js' +import { GetEnvironmentsHandler, InitializeComputeHandler } from '../core/compute.js' +import { streamToObject, streamToString } from '../../utils/util.js' import { PROTOCOL_COMMANDS } from '../../utils/constants.js' import { Readable } from 'stream' import { HTTP_LOGGER } from '../../utils/logging/common.js' @@ -46,3 +46,46 @@ computeRoutes.get(`${C2D_API_BASE_PATH}/computeEnvironments`, async (req, res) = res.status(500).send('Internal Server Error') } }) + +computeRoutes.post('/api/services/initializeCompute', async (req, res) => { + try { + HTTP_LOGGER.logMessage( + `POST initializeCompute request received with query: ${JSON.stringify(req.body)}`, + true + ) + const { body } = req + if (!body) { + res.status(400).send('Missing required body') + return + } + if (!body.datasets && !body.algorithm) { + res.status(400).send('Missing datasets and algorithm') + return + } + for (const dataset of body.datasets) { + if (!dataset.documentId) { + res.status(400).send('Missing dataset did') + return + } + } + if (!body.algorithm.documentId) { + res.status(400).send('Missing algorithm did') + return + } + body.command = PROTOCOL_COMMANDS.INITIALIZE_COMPUTE + const result = await new InitializeComputeHandler(req.oceanNode).handle(body) + if (result.stream) { + const queryResult = JSON.parse(await streamToString(result.stream as Readable)) + res.json(queryResult) + } else { + HTTP_LOGGER.log( + LOG_LEVELS_STR.LEVEL_ERROR, + `Stream not found: ${result.status.error}` + ) + res.status(result.status.httpStatus).send(result.status.error) + } + } catch (error) { + HTTP_LOGGER.log(LOG_LEVELS_STR.LEVEL_ERROR, `Error: ${error}`) + res.status(500).send('Internal Server Error') + } +}) diff --git a/src/components/httpRoutes/validateCommands.ts b/src/components/httpRoutes/validateCommands.ts index 0622445c3..44a0f313e 100644 --- a/src/components/httpRoutes/validateCommands.ts +++ b/src/components/httpRoutes/validateCommands.ts @@ -134,6 +134,18 @@ export function validateCommandAPIParameters(requestBody: any): ValidateParams { if (!requestBody.chainId) { return buildInvalidRequestMessage('Missing required parameter: "chainId"') } + } else if (command === PROTOCOL_COMMANDS.INITIALIZE_COMPUTE) { + if ( + !requestBody.chaindId || + !requestBody.datasets || + !requestBody.algorithm || + !requestBody.compute || + !requestBody.consumerAddress + ) { + return buildInvalidRequestMessage( + 'Missing required parameter(s): "chaindId","datasets", "algorithm","compute", "consumerAddress"' + ) + } } // only once is enough :-) return { diff --git a/src/test/data/ddo.ts b/src/test/data/ddo.ts index 2b10d85f9..717e5c82c 100644 --- a/src/test/data/ddo.ts +++ b/src/test/data/ddo.ts @@ -20,6 +20,58 @@ export const ddo = { } } } +export const genericAlgorithm = { + '@context': ['https://w3id.org/did/v1'], + id: '', + version: '4.1.0', + chainId: 8996, + nftAddress: '0x0', + metadata: { + created: '2021-12-20T14:35:20Z', + updated: '2021-12-20T14:35:20Z', + type: 'algorithm', + name: 'dataset-name', + description: 'Ocean protocol test dataset description', + author: 'oceanprotocol-team', + license: 'MIT', + tags: ['white-papers'], + additionalInformation: { 'test-key': 'test-value' }, + links: ['http://data.ceda.ac.uk/badc/ukcp09/'] + }, + services: [ + { + id: '0', + type: 'access', + description: 'Download service', + files: [ + { + url: 'https://raw.githubusercontent.com/oceanprotocol/test-algorithm/master/javascript/algo.js', + contentType: 'text/js', + encoding: 'UTF-8' + } + ], + datatokenAddress: '0x0', + serviceEndpoint: 'http://172.15.0.4:8030', + timeout: 0 + } + ], + nft: { state: 0 }, + event: {}, + credentials: { + allow: [ + { + type: 'address', + values: ['0xBE5449a6A97aD46c8558A3356267Ee5D2731ab5e'] + } + ], + deny: [ + { + type: 'address', + values: ['0x123'] + } + ] + } +} export const genericDDO = { '@context': ['https://w3id.org/did/v1'], @@ -74,6 +126,71 @@ export const genericDDO = { } } +export const genericComputeDDO = { + '@context': ['https://w3id.org/did/v1'], + id: '', + version: '4.1.0', + chainId: 8996, + nftAddress: '0x0', + metadata: { + created: '2021-12-20T14:35:20Z', + updated: '2021-12-20T14:35:20Z', + type: 'dataset', + name: 'dataset-name', + description: 'Ocean protocol test dataset description', + author: 'oceanprotocol-team', + license: 'MIT', + tags: ['white-papers'], + additionalInformation: { 'test-key': 'test-value' }, + links: ['http://data.ceda.ac.uk/badc/ukcp09/'] + }, + services: [ + { + id: '0', + type: 'compute', + description: 'Compute service', + files: [ + { + url: 'https://raw.githubusercontent.com/oceanprotocol/test-algorithm/master/javascript/algo.js', + contentType: 'text/js', + encoding: 'UTF-8' + } + ], + datatokenAddress: '0x0', + serviceEndpoint: 'http://172.15.0.4:8030', + timeout: 0, + compute: { + namespace: 'test', + publisherTrustedAlgorithms: [ + { + did: 'did:op:706d7452b1a25b183051fe02f2ad902d54fc45a43fdcee26b20f21684b5dee72', + filesChecksum: + 'b4908c868c78086097a10f986718a8f3fae1455f0d443c3dc59330207d47cc6d', + containerSectionChecksum: + '20d3f5667b2068e84db5465fb51aa405b06a0ff791635048d7976ec7f5abdc73' + } + ] + } + } + ], + nft: { state: 0 }, + event: {}, + credentials: { + allow: [ + { + type: 'address', + values: ['0xBE5449a6A97aD46c8558A3356267Ee5D2731ab5e'] + } + ], + deny: [ + { + type: 'address', + values: ['0x123'] + } + ] + } +} + export const DDOExample = { '@context': ['https://w3id.org/did/v1'], id: 'did:op:fa0e8fa9550e8eb13392d6eeb9ba9f8111801b332c8d2345b350b3bc66b379d5', diff --git a/src/test/integration/computeFees.test.ts b/src/test/integration/computeFees.test.ts index 46dba2b48..9d2da8d93 100644 --- a/src/test/integration/computeFees.test.ts +++ b/src/test/integration/computeFees.test.ts @@ -9,19 +9,33 @@ import { hexlify, ZeroAddress } from 'ethers' +import { Readable } from 'stream' +import { streamToObject, getEventFromTx } from '../../utils/util.js' +import { + PROTOCOL_COMMANDS, + ENVIRONMENT_VARIABLES, + EVENTS +} from '../../utils/constants.js' +import { + InitializeComputeCommand, + ComputeAsset, + ComputeAlgorithm +} from '../../@types/C2D.js' +import { + InitializeComputeHandler, + GetEnvironmentsHandler +} from '../../components/core/compute.js' import ERC721Factory from '@oceanprotocol/contracts/artifacts/contracts/ERC721Factory.sol/ERC721Factory.json' assert { type: 'json' } import ERC721Template from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC721Template.sol/ERC721Template.json' assert { type: 'json' } import { RPCS } from '../../@types/blockchain.js' -import { genericDDO } from '../data/ddo.js' +import { genericComputeDDO, genericAlgorithm } from '../data/ddo.js' import { getOceanArtifactsAdresses } from '../../utils/address.js' -import { getEventFromTx } from '../../utils/util.js' import { waitToIndex, expectedTimeoutFailure } from './testUtils.js' import { encrypt } from '../../utils/crypt.js' import { calculateComputeProviderFee, getC2DEnvs } from '../../components/core/utils/feesHandler.js' -import { ENVIRONMENT_VARIABLES, EVENTS } from '../../utils/constants.js' import { DEFAULT_TEST_TIMEOUT, OverrideEnvConfig, @@ -32,20 +46,32 @@ import { tearDownEnvironment } from '../utils/utils.js' import { DDO } from '../../@types/DDO/DDO.js' +import { ProviderFeeData } from '../../@types/Fees.js' +import { OceanNode } from '../../OceanNode.js' import { EncryptMethod } from '../../@types/fileObject.js' +import { Database } from '../../components/database/index.js' describe('Compute provider fees', () => { + let database: Database let provider: JsonRpcProvider let factoryContract: Contract let nftContract: Contract + let nftAlgoContract: Contract let publisherAccount: Signer let nftAddress: string + let nftAddressAlgo: string const chainId = 8996 let assetDID: string + let algoDID: string let resolvedDDO: Record + let resolvedAlgo: Record let genericAsset: any + let genericAlgo: any let datatokenAddress: string + let datatokenAddressAlgo: string let computeEnvs: Array + let computeProviderFess: ProviderFeeData + let oceanNode: OceanNode const mockSupportedNetworks: RPCS = getMockSupportedNetworks() const data = getOceanArtifactsAdresses() @@ -62,9 +88,19 @@ describe('Compute provider fees', () => { ) ) + const dbConfig = { + url: 'http://localhost:8108/?apiKey=xyz' + } + database = await new Database(dbConfig) + oceanNode = await OceanNode.getInstance(database) + + provider = new JsonRpcProvider('http://127.0.0.1:8545') + publisherAccount = (await provider.getSigner(0)) as Signer + provider = new JsonRpcProvider('http://127.0.0.1:8545') publisherAccount = (await provider.getSigner(0)) as Signer - genericAsset = genericDDO + genericAsset = genericComputeDDO + genericAlgo = genericAlgorithm factoryContract = new ethers.Contract( data.development.ERC721Factory, ERC721Factory.abi, @@ -108,6 +144,42 @@ describe('Compute provider fees', () => { assert(datatokenAddress, 'find datatoken created failed') }) + it('should publish a algorithm', async () => { + const tx = await factoryContract.createNftWithErc20( + { + name: '72120Bundle', + symbol: '72Bundle', + templateIndex: 1, + tokenURI: 'https://oceanprotocol.com/nft/', + transferable: true, + owner: await publisherAccount.getAddress() + }, + { + strings: ['ERC20B1', 'ERC20DT1Symbol'], + templateIndex: 1, + addresses: [ + await publisherAccount.getAddress(), + ZeroAddress, + ZeroAddress, + '0x0000000000000000000000000000000000000000' + ], + uints: [1000, 0], + bytess: [] + } + ) + const txReceipt = await tx.wait() + assert(txReceipt, 'transaction failed') + const nftEvent = getEventFromTx(txReceipt, 'NFTCreated') + const erc20Event = getEventFromTx(txReceipt, 'TokenCreated') + + nftAddressAlgo = nftEvent.args[0] + datatokenAddressAlgo = erc20Event.args[0] + console.log('### datatokenAddress', datatokenAddress) + + assert(nftAddressAlgo, 'find nft created failed') + assert(datatokenAddressAlgo, 'find datatoken created failed') + }) + it('should set metadata and save ', async () => { // Encrypt the files const files = { @@ -124,7 +196,6 @@ describe('Compute provider fees', () => { const data = Uint8Array.from(Buffer.from(JSON.stringify(files))) const encryptedData = await encrypt(data, EncryptMethod.ECIES) - // const encryptedDataString = encryptedData.toString('base64') nftContract = new ethers.Contract(nftAddress, ERC721Template.abi, publisherAccount) genericAsset.id = @@ -153,6 +224,39 @@ describe('Compute provider fees', () => { ) const trxReceipt = await setMetaDataTx.wait() assert(trxReceipt, 'set metada failed') + + nftAlgoContract = new ethers.Contract( + nftAddressAlgo, + ERC721Template.abi, + publisherAccount + ) + genericAlgo.id = + 'did:op:' + + createHash('sha256') + .update(getAddress(nftAddressAlgo) + chainId.toString(10)) + .digest('hex') + genericAlgo.nftAddress = nftAddressAlgo + genericAlgo.services[0].datatokenAddress = datatokenAddressAlgo + genericAlgo.services[0].files = encryptedData + + algoDID = genericAlgo.id + + const stringAlgo = JSON.stringify(genericAlgo) + const bytesAlgo = Buffer.from(stringAlgo) + const metadataAlgo = hexlify(bytesAlgo) + const hashAlgo = createHash('sha256').update(metadata).digest('hex') + + const setMetaDataTxAlgo = await nftAlgoContract.setMetaData( + 0, + 'http://v4.provider.oceanprotocol.com', + '0x123', + '0x01', + metadataAlgo, + '0x' + hashAlgo, + [] + ) + const trxReceiptAlgo = await setMetaDataTxAlgo.wait() + assert(trxReceiptAlgo, 'set metada failed') }) it('should store the ddo in the database and return it ', async function () { @@ -169,6 +273,20 @@ describe('Compute provider fees', () => { } }) + it('should store the algo in the database and return it ', async function () { + const { ddo, wasTimeout } = await waitToIndex( + algoDID, + EVENTS.METADATA_CREATED, + DEFAULT_TEST_TIMEOUT + ) + resolvedAlgo = ddo + if (resolvedAlgo) { + expect(resolvedAlgo.id).to.equal(genericAlgo.id) + } else { + expect(expectedTimeoutFailure(this.test.title)).to.be.equal(wasTimeout) + } + }) + it('should get provider fees for compute', async () => { computeEnvs = await getC2DEnvs(resolvedDDO as DDO) if (!isRunningContinousIntegrationEnv()) { @@ -184,16 +302,16 @@ describe('Compute provider fees', () => { // expect 2 envs expect(envs.length === 2, 'incorrect length') const filteredEnv = envs.filter((env: any) => env.priceMin !== 0)[0] - const providerFees = await calculateComputeProviderFee( + computeProviderFess = await calculateComputeProviderFee( resolvedDDO as DDO, 0, filteredEnv.id, resolvedDDO.services[0], provider ) - assert(providerFees, 'provider fees were not fetched') - assert(providerFees.providerFeeToken === oceanToken) - assert(providerFees.providerFeeAmount, 'provider fee amount is not fetched') + assert(computeProviderFess, 'provider fees were not fetched') + assert(computeProviderFess.providerFeeToken === oceanToken) + assert(computeProviderFess.providerFeeAmount, 'provider fee amount is not fetched') }) it('should get free provider fees for compute', async () => { @@ -209,17 +327,124 @@ describe('Compute provider fees', () => { 'http://172.15.0.13:31000/api/v1/operator/environments?chain_id=8996' ] const filteredEnv = envs.filter((env: any) => env.priceMin === 0)[0] - const providerFees = await calculateComputeProviderFee( + computeProviderFess = await calculateComputeProviderFee( resolvedDDO as DDO, 0, filteredEnv.id, resolvedDDO.services[0], provider ) - assert(providerFees, 'provider fees were not fetched') - console.log('provider fees: ', providerFees) - assert(providerFees.providerFeeToken === oceanToken) - assert(providerFees.providerFeeAmount === 0n, 'provider fee amount is not fetched') + assert(computeProviderFess, 'provider fees were not fetched') + assert(computeProviderFess.providerFeeToken === oceanToken) + assert( + computeProviderFess.providerFeeAmount === 0n, + 'provider fee amount is not fetched' + ) + }) + + it('Initialize compute without transaction IDs', async () => { + const getEnvironmentsTask = { + command: PROTOCOL_COMMANDS.GET_COMPUTE_ENVIRONMENTS, + chainId: 8996 + } + const response = await new GetEnvironmentsHandler(oceanNode).handle( + getEnvironmentsTask + ) + + assert(response, 'Failed to get response') + assert(response.status.httpStatus === 200, 'Failed to get 200 response') + assert(response.stream, 'Failed to get stream') + expect(response.stream).to.be.instanceOf(Readable) + + const computeEnvironments = await streamToObject(response.stream as Readable) + const firstEnv = computeEnvironments[0].id + const { consumerAddress } = computeEnvironments[0] + const dataset: ComputeAsset = { + documentId: resolvedDDO.id, + serviceId: resolvedDDO.services[0].id + } + const algorithm: ComputeAlgorithm = { + documentId: resolvedAlgo.id, + serviceId: resolvedAlgo.services[0].id + } + const currentDate = new Date() + const initializeComputeTask: InitializeComputeCommand = { + datasets: [dataset], + algorithm, + compute: { + env: firstEnv, + validUntil: new Date( + currentDate.getFullYear() + 1, + currentDate.getMonth(), + currentDate.getDate() + ).getTime() + }, + consumerAddress, + command: PROTOCOL_COMMANDS.INITIALIZE_COMPUTE, + chainId: 8996 + } + const resp = await new InitializeComputeHandler(oceanNode).handle( + initializeComputeTask + ) + + assert(resp, 'Failed to get response') + assert(resp.status.httpStatus === 200, 'Failed to get 200 response') + assert(resp.stream, 'Failed to get stream') + expect(resp.stream).to.be.instanceOf(Readable) + + const result: any = await streamToObject(resp.stream as Readable) + + assert(result.algorithm, 'algorithm does not exist') + assert( + result.algorithm.datatoken === datatokenAddressAlgo, + 'incorrect datatoken address for algo' + ) + assert( + result.algorithm.providerFee.providerFeeAddress, + 'algorithm providerFeeAddress does not exist' + ) + assert( + result.algorithm.providerFee.providerFeeToken, + 'algorithm providerFeeToken does not exist' + ) + assert( + result.algorithm.providerFee.providerFeeAmount, + 'algorithm providerFeeAmount does not exist' + ) + assert( + result.algorithm.providerFee.providerData, + 'algorithm providerFeeData does not exist' + ) + + assert(result.algorithm.providerFee.validUntil, 'algorithm validUntil does not exist') + + assert(result.algorithm.validOrder === false, 'incorrect validOrder') // expect false because tx id was not provided and no start order was called before + + assert(result.datasets.length > 0, 'datasets key does not exist') + const resultParsed = JSON.parse(JSON.stringify(result.datasets[0])) + assert( + resultParsed.datatoken === datatokenAddress, + 'incorrect datatoken address for dataset' + ) + assert( + resultParsed.providerFee.providerFeeAddress, + 'dataset providerFeeAddress does not exist' + ) + assert( + resultParsed.providerFee.providerFeeToken, + 'dataset providerFeeToken does not exist' + ) + assert( + resultParsed.providerFee.providerFeeAmount, + 'dataset providerFeeAmount does not exist' + ) + assert( + resultParsed.providerFee.providerData, + 'dataset providerFeeData does not exist' + ) + + assert(resultParsed.providerFee.validUntil, 'algorithm validUntil does not exist') + assert(result.datasets[0].validOrder === false, 'incorrect validOrder') // expect false because tx id was not provided and no start order was called before }) after(async () => { diff --git a/src/utils/blockchain.ts b/src/utils/blockchain.ts index 6311ecba5..29a77b6cd 100644 --- a/src/utils/blockchain.ts +++ b/src/utils/blockchain.ts @@ -1,4 +1,5 @@ -import { ethers, Signer, JsonRpcApiProvider } from 'ethers' +import { ethers, Signer, JsonRpcApiProvider, JsonRpcProvider } from 'ethers' +import { getConfiguration } from './config.js' export class Blockchain { private signer: Signer @@ -47,3 +48,8 @@ export async function verifyMessage( return false } } + +export async function getJsonRpcProvider(chainId: number): Promise { + const networkUrl = (await getConfiguration()).supportedNetworks[chainId.toString()].rpc + return new JsonRpcProvider(networkUrl) +} diff --git a/src/utils/constants.ts b/src/utils/constants.ts index fb00aea0a..02de31d8d 100644 --- a/src/utils/constants.ts +++ b/src/utils/constants.ts @@ -17,7 +17,8 @@ export const PROTOCOL_COMMANDS = { GET_FEES: 'getFees', FILE_INFO: 'fileInfo', VALIDATE_DDO: 'validateDDO', - GET_COMPUTE_ENVIRONMENTS: 'getComputeEnvironments' + GET_COMPUTE_ENVIRONMENTS: 'getComputeEnvironments', + INITIALIZE_COMPUTE: 'initializeCompute' } // more visible, keep then close to make sure we always update both export const SUPPORTED_PROTOCOL_COMMANDS: string[] = [ @@ -35,7 +36,8 @@ export const SUPPORTED_PROTOCOL_COMMANDS: string[] = [ PROTOCOL_COMMANDS.GET_FEES, PROTOCOL_COMMANDS.FILE_INFO, PROTOCOL_COMMANDS.VALIDATE_DDO, - PROTOCOL_COMMANDS.GET_COMPUTE_ENVIRONMENTS + PROTOCOL_COMMANDS.GET_COMPUTE_ENVIRONMENTS, + PROTOCOL_COMMANDS.INITIALIZE_COMPUTE ] export const MetadataStates = {