diff --git a/requirements.txt b/requirements.txt index ce78474f..73d7a945 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,3 +20,4 @@ eth-account==0.9.0 trezor==0.13.8 ledgerblue==0.1.48 snet.contracts==0.1.1 +lighthouseweb3==0.1.4 diff --git a/snet/cli/arguments.py b/snet/cli/arguments.py index e9ce4b93..a38e6af5 100644 --- a/snet/cli/arguments.py +++ b/snet/cli/arguments.py @@ -2,6 +2,8 @@ import os import re import sys +from email.policy import default +from random import choices from snet.contracts import get_all_abi_contract_files, get_contract_def @@ -258,6 +260,15 @@ def add_metadatafile_argument_for_org(p): help="Service metadata json file (default organization_metadata.json)") +def add_p_storage_param(_p): + _p.add_argument( + "--storage", + default="ipfs", + choices=['ipfs', 'filecoin'], + help="Choose storage for uploading metadata/protobuf file (defaults to 'ipfs')", + ) + + def add_organization_options(parser): parser.set_defaults(cmd=OrganizationCommand) @@ -390,8 +401,9 @@ def add_organization_options(parser): add_metadatafile_argument_for_org(p) p.add_argument("--members", help="List of members to be added to the organization", metavar="ORG_MEMBERS[]") add_organization_arguments(p) + add_p_storage_param(p) - p = subparsers.add_parser("update-metadata", help="Create an Organization") + p = subparsers.add_parser("update-metadata", help="Update metadata for an Organization") p.add_argument("org_id", help="Unique Organization Id", metavar="ORG_ID") p.set_defaults(fn="update_metadata") add_metadatafile_argument_for_org(p) @@ -400,6 +412,7 @@ def add_organization_options(parser): help="List of members to be added to the organization", metavar="ORG_MEMBERS[]") add_organization_arguments(p) + add_p_storage_param(p) p = subparsers.add_parser("change-owner", help="Change Organization's owner") p.set_defaults(fn="change_owner") @@ -936,7 +949,7 @@ def add_mpe_service_options(parser): add_p_metadata_file_opt(p) p = subparsers.add_parser("metadata-init", - help="Init metadata file with providing protobuf directory (which we publish in IPFS) and display_name (optionally encoding, service_type and payment_expiration_threshold)") + help="Init metadata file with providing protobuf directory (which we publish in IPFS or FileCoin) and display_name (optionally encoding, service_type and payment_expiration_threshold)") p.set_defaults(fn="publish_proto_metadata_init") p.add_argument("protodir", help="Directory which contains protobuf files", @@ -965,14 +978,16 @@ def add_mpe_service_options(parser): default="grpc", choices=['grpc', 'http', 'jsonrpc', 'process'], help="Service type") + add_p_storage_param(p) - p = subparsers.add_parser("metadata-set-model", - help="Publish protobuf model in ipfs and update existed metadata file") + p = subparsers.add_parser("metadata-set-api", + help="Publish protobuf model in ipfs or filecoin and update existed metadata file") p.set_defaults(fn="publish_proto_metadata_update") p.add_argument("protodir", help="Directory which contains protobuf files", metavar="PROTO_DIR") add_p_metadata_file_opt(p) + add_p_storage_param(p) p = subparsers.add_parser("metadata-set-fixed-price", help="Set pricing model as fixed price for all methods") @@ -1199,6 +1214,7 @@ def add_p_publish_params(_p): _p.add_argument("--update-mpe-address", action='store_true', help="Update mpe_address in metadata before publishing them") + add_p_storage_param(p) add_p_mpe_address_opt(_p) p = subparsers.add_parser("publish", @@ -1214,9 +1230,15 @@ def add_p_publish_params(_p): add_p_publish_params(p) add_transaction_arguments(p) + p = subparsers.add_parser("publish-in-filecoin", + help="Publish metadata only in FileCoin, without publishing in Registry") + p.set_defaults(fn="publish_metadata_in_filecoin") + add_p_publish_params(p) + add_transaction_arguments(p) + p = subparsers.add_parser("update-metadata", - help="Publish metadata in IPFS and update existed service") - p.set_defaults(fn="publish_metadata_in_ipfs_and_update_registration") + help="Publish metadata in IPFS or FileCoin and update existed service") + p.set_defaults(fn="publish_metadata_in_storage_and_update_registration") add_p_publish_params(p) add_p_service_in_registry(p) add_transaction_arguments(p) @@ -1286,7 +1308,7 @@ def add_p_protodir_to_extract(_p): metavar="PROTO_DIR") p = subparsers.add_parser("get-api-metadata", - help="Extract service api (model) to the given protodir. Get model_ipfs_hash from metadata") + help="Extract service api (model) to the given protodir. Get existed metadata") p.set_defaults(fn="extract_service_api_from_metadata") add_p_protodir_to_extract(p) add_p_metadata_file_opt(p) diff --git a/snet/cli/commands/commands.py b/snet/cli/commands/commands.py index 057286be..c94baa25 100644 --- a/snet/cli/commands/commands.py +++ b/snet/cli/commands/commands.py @@ -7,6 +7,7 @@ from urllib.parse import urljoin import ipfshttpclient +from lighthouseweb3 import Lighthouse import yaml from rfc3986 import urlparse import web3 @@ -18,10 +19,10 @@ from snet.cli.metadata.organization import OrganizationMetadata, PaymentStorageClient, Payment, Group from snet.cli.utils.config import get_contract_address, get_field_from_args_or_session, \ read_default_contract_address -from snet.cli.utils.ipfs_utils import bytesuri_to_hash, get_from_ipfs_and_checkhash, \ - hash_to_bytesuri, publish_file_in_ipfs +from snet.cli.utils.ipfs_utils import get_from_ipfs_and_checkhash, \ + hash_to_bytesuri, publish_file_in_ipfs, publish_file_in_filecoin from snet.cli.utils.utils import DefaultAttributeObject, get_web3, is_valid_url, serializable, type_converter, \ - get_cli_version, bytes32_to_str + get_cli_version, bytes32_to_str, bytesuri_to_hash, get_file_from_filecoin class Command(object): @@ -79,6 +80,10 @@ def _get_ipfs_client(self): ipfs_endpoint = self.config.get_ipfs_endpoint() return ipfshttpclient.connect(ipfs_endpoint) + def _get_filecoin_client(self): + lighthouse_token = self.config.get_filecoin_key() + return Lighthouse(token=lighthouse_token) + class VersionCommand(Command): def show(self): @@ -489,9 +494,11 @@ def _get_organization_registration(self, org_id): def _get_organization_metadata_from_registry(self, org_id): rez = self._get_organization_registration(org_id) - metadata_hash = bytesuri_to_hash(rez["orgMetadataURI"]) - metadata = get_from_ipfs_and_checkhash( - self._get_ipfs_client(), metadata_hash) + storage_type, metadata_hash = bytesuri_to_hash(rez["orgMetadataURI"]) + if storage_type == "ipfs": + metadata = get_from_ipfs_and_checkhash(self._get_ipfs_client(), metadata_hash) + else: + metadata = get_file_from_filecoin(metadata_hash) metadata = metadata.decode("utf-8") return OrganizationMetadata.from_json(json.loads(metadata)) @@ -589,10 +596,17 @@ def create(self): members = self.get_members_from_args() - ipfs_metadata_uri = publish_file_in_ipfs( - self._get_ipfs_client(), metadata_file, False) - params = [type_converter("bytes32")( - org_id), hash_to_bytesuri(ipfs_metadata_uri), members] + storage = self.args.storage + if not storage or storage == "ipfs": + metadata_uri = publish_file_in_ipfs(self._get_ipfs_client(), metadata_file, False) + elif storage == "filecoin": + # upload to Filecoin via Lighthouse SDK + metadata_uri = publish_file_in_filecoin(self._get_filecoin_client(), metadata_file) + else: + raise ValueError(f"Unsupported storage option: {storage}. Use --storage ") + + params = [type_converter("bytes32")(org_id), + hash_to_bytesuri(metadata_uri, storage), members] self._printout("Creating transaction to create organization name={} id={}\n".format( org_metadata.org_name, org_id)) self.transact_contract_command( @@ -622,31 +636,34 @@ def update_metadata(self): with open(metadata_file, 'r') as f: org_metadata = OrganizationMetadata.from_json(json.load(f)) except Exception as e: - print( - "Organization metadata json file not found ,Please check --metadata-file path ") + print("Organization metadata JSON file not found. Please check --metadata-file path.") raise e - # validate the metadata before updating + # Validate the metadata before updating org_id = self.args.org_id - existing_registry_org_metadata = self._get_organization_metadata_from_registry( - org_id) + existing_registry_org_metadata = self._get_organization_metadata_from_registry(org_id) org_metadata.validate(existing_registry_org_metadata) # Check if Organization already exists found = self._get_organization_by_id(org_id)[0] if not found: - raise Exception( - "\nOrganization with id={} does not exists!\n".format(org_id)) + raise Exception("\nOrganization with id={} does not exist!\n".format(org_id)) + + storage = self.args.storage + if not storage or storage == "ipfs": + metadata_uri = publish_file_in_ipfs(self._get_ipfs_client(), metadata_file, False) + elif storage == "filecoin": + # upload to Filecoin via Lighthouse SDK + metadata_uri = publish_file_in_filecoin(self._get_filecoin_client(), metadata_file) + else: + raise ValueError(f"Unsupported storage option: {storage}. Use --storage ") - ipfs_metadata_uri = publish_file_in_ipfs( - self._get_ipfs_client(), metadata_file, False) - params = [type_converter("bytes32")( - org_id), hash_to_bytesuri(ipfs_metadata_uri)] + params = [type_converter("bytes32")(org_id), hash_to_bytesuri(metadata_uri, storage)] self._printout( - "Creating transaction to create organization name={} id={}\n".format(org_metadata.org_name, org_id)) - self.transact_contract_command( - "Registry", "changeOrganizationMetadataURI", params) - self._printout("id:\n%s" % org_id) + "Creating transaction to update organization metadata for org name={} id={}\n".format(org_metadata.org_name, + org_id)) + self.transact_contract_command("Registry", "changeOrganizationMetadataURI", params) + self._printout("Organization updated successfully with id:\n%s" % org_id) def list_services(self): org_id = self.args.org_id diff --git a/snet/cli/commands/mpe_channel.py b/snet/cli/commands/mpe_channel.py index 0c467c82..1c30a601 100644 --- a/snet/cli/commands/mpe_channel.py +++ b/snet/cli/commands/mpe_channel.py @@ -4,6 +4,7 @@ import shutil import tempfile from collections import defaultdict +from importlib.metadata import metadata from pathlib import Path from eth_abi.codec import ABICodec @@ -15,9 +16,9 @@ from snet.cli.metadata.service import mpe_service_metadata_from_json, load_mpe_service_metadata from snet.cli.metadata.organization import OrganizationMetadata from snet.cli.utils.agix2cogs import cogs2stragix -from snet.cli.utils.ipfs_utils import bytesuri_to_hash, get_from_ipfs_and_checkhash, safe_extract_proto_from_ipfs +from snet.cli.utils.ipfs_utils import get_from_ipfs_and_checkhash from snet.cli.utils.utils import abi_decode_struct_to_dict, abi_get_element_by_name, \ - compile_proto, type_converter + compile_proto, type_converter, bytesuri_to_hash, get_file_from_filecoin, download_and_safe_extract_proto # we inherit MPEServiceCommand because we need _get_service_metadata_from_registry @@ -577,12 +578,14 @@ def _get_service_registration(self): def _get_service_metadata_from_registry(self): response = self._get_service_registration() - metadata_hash = bytesuri_to_hash(response["metadataURI"]) - metadata = get_from_ipfs_and_checkhash( - self._get_ipfs_client(), metadata_hash) - metadata = metadata.decode("utf-8") - metadata = mpe_service_metadata_from_json(metadata) - return metadata + storage_type, metadata_hash = bytesuri_to_hash(response["metadataURI"]) + if storage_type == "ipfs": + service_metadata = get_from_ipfs_and_checkhash(self._get_ipfs_client(), metadata_hash) + else: + service_metadata = get_file_from_filecoin(metadata_hash) + service_metadata = service_metadata.decode("utf-8") + service_metadata = mpe_service_metadata_from_json(service_metadata) + return service_metadata def _init_or_update_service_if_needed(self, metadata, service_registration): # if service was already initialized and metadataURI hasn't changed we do nothing @@ -609,8 +612,8 @@ def _init_or_update_service_if_needed(self, metadata, service_registration): try: spec_dir = os.path.join(service_dir, "service_spec") os.makedirs(spec_dir, mode=0o700) - safe_extract_proto_from_ipfs( - self._get_ipfs_client(), metadata["model_ipfs_hash"], spec_dir) + service_api_source = metadata.get("service_api_source") or metadata.get("model_ipfs_hash") + download_and_safe_extract_proto(service_api_source, spec_dir, self._get_ipfs_client()) # compile .proto files if not compile_proto(Path(spec_dir), service_dir): diff --git a/snet/cli/commands/mpe_service.py b/snet/cli/commands/mpe_service.py index 50fdd8e3..9b30cfd1 100644 --- a/snet/cli/commands/mpe_service.py +++ b/snet/cli/commands/mpe_service.py @@ -3,6 +3,7 @@ from pathlib import Path from re import search from sys import exit +import tempfile from grpc_health.v1 import health_pb2 as heartb_pb2 from grpc_health.v1 import health_pb2_grpc as heartb_pb2_grpc @@ -12,7 +13,8 @@ from snet.cli.metadata.organization import OrganizationMetadata from snet.cli.metadata.service import MPEServiceMetadata, load_mpe_service_metadata, mpe_service_metadata_from_json from snet.cli.utils import ipfs_utils -from snet.cli.utils.utils import is_valid_url, open_grpc_channel, type_converter +from snet.cli.utils.utils import is_valid_url, open_grpc_channel, type_converter, bytesuri_to_hash, \ + get_file_from_filecoin, download_and_safe_extract_proto class MPEServiceCommand(BlockchainCommand): @@ -64,17 +66,33 @@ def service_metadata_init(self): while True: org_id = input(f"organization id `{display_name}` service would be linked to: ").strip() while org_id == "": - org_id = input(f"organization id required: ").strip() + org_id = input(f"organization id is required: ").strip() try: org_metadata = self._get_organization_metadata_from_registry(org_id) no_of_groups = len(org_metadata.groups) break except Exception: print(f"`{org_id}` is invalid.") + while True: + storage_type = input("storage type (ipfs or filecoin): ").strip() + if storage_type == "": + print("storage type is required.") + elif storage_type != "ipfs" and storage_type != "filecoin": + print("storage type must be 'ipfs' or 'filecoin'.") + else: + break while True: try: protodir_path = input("protodir path: ") - model_ipfs_hash_base58 = ipfs_utils.publish_proto_in_ipfs(self._get_ipfs_client(), protodir_path) + if storage_type == 'ipfs': + service_api_source = ipfs_utils.publish_proto_in_ipfs( + self._get_ipfs_client(), + protodir_path) + else: + service_api_source = ipfs_utils.publish_proto_in_filecoin( + self._get_filecoin_client(), + protodir_path) + service_api_source = ipfs_utils.hash_to_bytesuri(service_api_source, storage_type, False) break except Exception: print(f'Invalid path: "{protodir_path}"') @@ -89,13 +107,13 @@ def service_metadata_init(self): metadata.add_contributor(input('Enter contributor name '), input('Enter contributor email: ')) mpe_address = self.get_mpe_address() - metadata.set_simple_field('model_ipfs_hash', model_ipfs_hash_base58) + metadata.set_simple_field('service_api_source', service_api_source) metadata.set_simple_field('mpe_address', mpe_address) metadata.set_simple_field('display_name', display_name) print('', '', json.dumps(metadata.m, indent=2), sep='\n') print("Are you sure you want to create? [y/n] ", end='') if input() == 'y': - file_name = input(f"Choose file name: (service_metadata) ") or 'service_metadata' + file_name = input(f"Choose file name (service_metadata by default): ") or 'service_metadata' file_name += '.json' metadata.save_pretty(file_name) print(f"{file_name} created.") @@ -105,12 +123,21 @@ def service_metadata_init(self): exit("\n`snet service metadata-init-utility` CANCELLED.") def publish_proto_metadata_init(self): - model_ipfs_hash_base58 = ipfs_utils.publish_proto_in_ipfs( - self._get_ipfs_client(), self.args.protodir) + + storage = self.args.storage + if storage == 'ipfs': + service_api_source = ipfs_utils.publish_proto_in_ipfs( + self._get_ipfs_client(), + self.args.protodir) + else: + service_api_source = ipfs_utils.publish_proto_in_filecoin( + self._get_filecoin_client(), + self.args.protodir) + service_api_source = ipfs_utils.hash_to_bytesuri(service_api_source, storage, False) metadata = MPEServiceMetadata() mpe_address = self.get_mpe_address() - metadata.set_simple_field("model_ipfs_hash", model_ipfs_hash_base58) + metadata.set_simple_field("service_api_source", service_api_source) metadata.set_simple_field("mpe_address", mpe_address) metadata.set_simple_field("display_name", self.args.display_name) metadata.set_simple_field("encoding", self.args.encoding) @@ -133,11 +160,21 @@ def publish_proto_metadata_init(self): metadata.save_pretty(self.args.metadata_file) def publish_proto_metadata_update(self): - """ Publish protobuf model in ipfs and update existing metadata file """ + """ Publish protobuf model in storage and update existing metadata file """ metadata = load_mpe_service_metadata(self.args.metadata_file) - ipfs_hash_base58 = ipfs_utils.publish_proto_in_ipfs( - self._get_ipfs_client(), self.args.protodir) - metadata.set_simple_field("model_ipfs_hash", ipfs_hash_base58) + + storage = self.args.storage + if storage == 'ipfs': + service_api_source = ipfs_utils.publish_proto_in_ipfs( + self._get_ipfs_client(), + self.args.protodir) + else: + service_api_source = ipfs_utils.publish_proto_in_filecoin( + self._get_filecoin_client(), + self.args.protodir) + service_api_source = ipfs_utils.hash_to_bytesuri(service_api_source, storage, False) + + metadata.set_simple_field("service_api_source", service_api_source) metadata.save_pretty(self.args.metadata_file) def metadata_set_fixed_price(self): @@ -385,7 +422,7 @@ def metadata_validate(self): else: exit("OK. Ready to publish.") - def _publish_metadata_in_ipfs(self, metadata_file): + def _prepare_to_publish_metadata(self, metadata_file): metadata = load_mpe_service_metadata(metadata_file) mpe_address = self.get_mpe_address() if self.args.update_mpe_address: @@ -398,24 +435,40 @@ def _publish_metadata_in_ipfs(self, metadata_file): "You have two possibilities:\n" + "1. You can use --multipartyescrow-at to set current mpe address\n" + "2. You can use --update-mpe-address parameter to update mpe_address in metadata before publishing it\n") + return metadata + + def _publish_metadata_in_ipfs(self, metadata_file): + metadata = self._prepare_to_publish_metadata(metadata_file) return self._get_ipfs_client().add_bytes(metadata.get_json().encode("utf-8")) def publish_metadata_in_ipfs(self): """ Publish metadata in ipfs and print hash """ self._printout(self._publish_metadata_in_ipfs(self.args.metadata_file)) + def _publish_metadata_in_filecoin(self, metadata_file): + self._prepare_to_publish_metadata(metadata_file) + return self._get_filecoin_client().upload(metadata_file)['data']['Hash'] + + def publish_metadata_in_filecoin(self): + """ Publish metadata in filecoin and print hash """ + self._printout(self._publish_metadata_in_filecoin(self.args.metadata_file)) + #def _get_converted_tags(self): # return [type_converter("bytes32")(tag) for tag in self.args.tags] def _get_organization_metadata_from_registry(self, org_id): + # TODO: In fact, it's the same method as in commands.OrganizationCommand... rez = self._get_organization_registration(org_id) - metadata_hash = ipfs_utils.bytesuri_to_hash(rez["orgMetadataURI"]) - metadata = ipfs_utils.get_from_ipfs_and_checkhash( - self._get_ipfs_client(), metadata_hash) + storage_type, metadata_hash = bytesuri_to_hash(rez["orgMetadataURI"]) + if storage_type == "ipfs": + metadata = ipfs_utils.get_from_ipfs_and_checkhash(self._get_ipfs_client(), metadata_hash) + else: + metadata = get_file_from_filecoin(metadata_hash) metadata = metadata.decode("utf-8") return OrganizationMetadata.from_json(json.loads(metadata)) def _get_organization_registration(self, org_id): + # TODO: In fact, it's the same method as in commands.OrganizationCommand... params = [type_converter("bytes32")(org_id)] result = self.call_contract_command( "Registry", "getOrganizationById", params) @@ -440,22 +493,39 @@ def _validate_service_group_with_org_group_and_update_group_id(self, org_id, met "Group name %s does not exist in organization" % group["group_name"]) def publish_service_with_metadata(self): - self._validate_service_group_with_org_group_and_update_group_id( - self.args.org_id, self.args.metadata_file) - metadata_uri = ipfs_utils.hash_to_bytesuri( - self._publish_metadata_in_ipfs(self.args.metadata_file)) + self._validate_service_group_with_org_group_and_update_group_id(self.args.org_id, self.args.metadata_file) + + storage = self.args.storage + if not storage or storage == "ipfs": + metadata_uri = self._publish_metadata_in_ipfs(self.args.metadata_file) + elif storage == "filecoin": + # upload to Filecoin via Lighthouse SDK + metadata_uri = self._publish_metadata_in_filecoin(self.args.metadata_file) + else: + raise ValueError(f"Unsupported storage option: {storage}. Use --storage ") + + metadata_uri = ipfs_utils.hash_to_bytesuri(metadata_uri, storage) #tags = self._get_converted_tags() - params = [type_converter("bytes32")(self.args.org_id), type_converter( - "bytes32")(self.args.service_id), metadata_uri] + params = [type_converter("bytes32")(self.args.org_id), + type_converter("bytes32")(self.args.service_id), + metadata_uri] self.transact_contract_command( "Registry", "createServiceRegistration", params) - def publish_metadata_in_ipfs_and_update_registration(self): + def publish_metadata_in_storage_and_update_registration(self): # first we check that we do not change payment_address or group_id in existed payment groups - self._validate_service_group_with_org_group_and_update_group_id( - self.args.org_id, self.args.metadata_file) - metadata_uri = ipfs_utils.hash_to_bytesuri( - self._publish_metadata_in_ipfs(self.args.metadata_file)) + self._validate_service_group_with_org_group_and_update_group_id(self.args.org_id, self.args.metadata_file) + + storage = self.args.storage + if not storage or storage == "ipfs": + metadata_uri = self._publish_metadata_in_ipfs(self.args.metadata_file) + elif storage == "filecoin": + # upload to Filecoin via Lighthouse SDK + metadata_uri = self._publish_metadata_in_filecoin(self.args.metadata_file) + else: + raise ValueError(f"Unsupported storage option: {storage}. Use --storage ") + + metadata_uri = ipfs_utils.hash_to_bytesuri(metadata_uri, storage) params = [type_converter("bytes32")(self.args.org_id), type_converter( "bytes32")(self.args.service_id), metadata_uri] self.transact_contract_command( @@ -484,6 +554,7 @@ def update_registration_remove_tags(self): self._printout("This command has been deprecated. Please use `snet service metadata-remove-tags` instead") def _get_service_registration(self): + # TODO: In fact, it's the same method as in MPEChannelCommand... params = [type_converter("bytes32")(self.args.org_id), type_converter( "bytes32")(self.args.service_id)] rez = self.call_contract_command( @@ -494,13 +565,16 @@ def _get_service_registration(self): return {"metadataURI": rez[2]} def _get_service_metadata_from_registry(self): - rez = self._get_service_registration() - metadata_hash = ipfs_utils.bytesuri_to_hash(rez["metadataURI"]) - metadata = ipfs_utils.get_from_ipfs_and_checkhash( - self._get_ipfs_client(), metadata_hash) - metadata = metadata.decode("utf-8") - metadata = mpe_service_metadata_from_json(metadata) - return metadata + # TODO: In fact, it's the same method as in MPEChannelCommand... + response = self._get_service_registration() + storage_type, metadata_hash = bytesuri_to_hash(response["metadataURI"]) + if storage_type == "ipfs": + service_metadata = ipfs_utils.get_from_ipfs_and_checkhash(self._get_ipfs_client(), metadata_hash) + else: + service_metadata = get_file_from_filecoin(metadata_hash) + service_metadata = service_metadata.decode("utf-8") + service_metadata = mpe_service_metadata_from_json(service_metadata) + return service_metadata def print_service_metadata_from_registry(self): metadata = self._get_service_metadata_from_registry() @@ -544,13 +618,14 @@ def print_service_tags_from_registry(self): def extract_service_api_from_metadata(self): metadata = load_mpe_service_metadata(self.args.metadata_file) - ipfs_utils.safe_extract_proto_from_ipfs(self._get_ipfs_client( - ), metadata["model_ipfs_hash"], self.args.protodir) + service_api_source = metadata.get("service_api_source") or metadata.get("model_ipfs_hash") + download_and_safe_extract_proto(service_api_source, self.args.protodir, self._get_ipfs_client()) + def extract_service_api_from_registry(self): metadata = self._get_service_metadata_from_registry() - ipfs_utils.safe_extract_proto_from_ipfs(self._get_ipfs_client( - ), metadata["model_ipfs_hash"], self.args.protodir) + service_api_source = metadata.get("service_api_source") or metadata.get("model_ipfs_hash") + download_and_safe_extract_proto(service_api_source, self.args.protodir, self._get_ipfs_client()) def delete_service_registration(self): params = [type_converter("bytes32")(self.args.org_id), type_converter( diff --git a/snet/cli/commands/sdk_command.py b/snet/cli/commands/sdk_command.py index 4525801b..2a6c5cbd 100644 --- a/snet/cli/commands/sdk_command.py +++ b/snet/cli/commands/sdk_command.py @@ -1,8 +1,7 @@ import os from pathlib import Path, PurePath -from snet.cli.utils.ipfs_utils import safe_extract_proto_from_ipfs -from snet.cli.utils.utils import compile_proto +from snet.cli.utils.utils import compile_proto, download_and_safe_extract_proto from snet.cli.commands.mpe_service import MPEServiceCommand @@ -25,10 +24,10 @@ def generate_client_library(self): library_dir_path = client_libraries_base_dir_path.joinpath(library_org_id, library_service_id, library_language) metadata = self._get_service_metadata_from_registry() - model_ipfs_hash = metadata["model_ipfs_hash"] + service_api_source = metadata.get("service_api_source") or metadata.get("model_ipfs_hash") # Receive proto files - safe_extract_proto_from_ipfs(self._get_ipfs_client(), model_ipfs_hash, library_dir_path) + download_and_safe_extract_proto(service_api_source, library_dir_path, self._get_ipfs_client()) # Compile proto files compile_proto(Path(library_dir_path), library_dir_path, target_language=self.args.language) diff --git a/snet/cli/config.py b/snet/cli/config.py index 4636d748..95dd3b31 100644 --- a/snet/cli/config.py +++ b/snet/cli/config.py @@ -92,6 +92,9 @@ def set_session_field(self, key, value, out_f): if key == "default_ipfs_endpoint": self.set_ipfs_endpoint(value) print("set default_ipfs_endpoint=%s" % value, file=out_f) + elif key == "filecoin_api_key": + self.set_filecoin_key(value) + print("set filecoin_api_key=%s" % value, file=out_f) elif key in get_session_network_keys(): session_network = self.get_session_network_name() self.set_network_field(session_network, key, value) @@ -113,7 +116,7 @@ def unset_session_field(self, key, out_f): def session_to_dict(self): session_identity, session_network = self.safe_get_session_identity_network_names() - show = {"session", "network.%s" % session_network, "identity.%s" % session_identity, "ipfs"} + show = {"session", "network.%s" % session_network, "identity.%s" % session_identity, "ipfs", "filecoin"} response = {f: dict(self[f]) for f in show} return response @@ -164,6 +167,16 @@ def set_ipfs_endpoint(self, ipfs_endpoint): self["ipfs"]["default_ipfs_endpoint"] = ipfs_endpoint self._persist() + def get_filecoin_key(self): + if not self["filecoin"].get("filecoin_api_key"): + raise Exception("Use [snet set filecoin_api_key ] to set filecoin key") + return self["filecoin"]["filecoin_api_key"] + + def set_filecoin_key(self, filecoin_key: str): + self["filecoin"]["filecoin_api_key"] = filecoin_key + self._persist() + + def get_all_identities_names(self): return [x[len("identity."):] for x in self.sections() if x.startswith("identity.")] @@ -194,6 +207,7 @@ def create_default_config(self): "default_eth_rpc_endpoint": "https://sepolia.infura.io/v3/09027f4a13e841d48dbfefc67e7685d5", } self["ipfs"] = {"default_ipfs_endpoint": "/dns/ipfs.singularitynet.io/tcp/80/"} + self["filecoin"] = {"filecoin_api_key": ""} network = self.get_param_from_sdk_config("network") if network: if network not in self.get_all_networks_names(): @@ -280,4 +294,4 @@ def get_session_network_keys_removable(): def get_session_keys(): - return get_session_network_keys() + get_session_identity_keys() + ["default_ipfs_endpoint"] + return get_session_network_keys() + get_session_identity_keys() + ["default_ipfs_endpoint"] + ["filecoin_api_key"] diff --git a/snet/cli/metadata/service.py b/snet/cli/metadata/service.py index c8429ecc..895df1dc 100644 --- a/snet/cli/metadata/service.py +++ b/snet/cli/metadata/service.py @@ -12,7 +12,7 @@ than current_block + payment_expiration_threshold. This field should be used by the client with caution. Client should not accept arbitrary payment_expiration_threshold -model_ipfs_hash - IPFS HASH to the .tar archive of protobuf service specification +service_api_source - HASH with the storage type prefix to the .tar archive of protobuf service specification mpe_address - Address of MultiPartyEscrow contract. Client should use it exclusively for cross-checking of mpe_address, (because service can attack via mpe_address) @@ -63,13 +63,12 @@ def is_single_value(asset_type): class MPEServiceMetadata: def __init__(self): - """ init with modelIPFSHash """ self.m = {"version": 1, "display_name": "", "encoding": "grpc", # grpc by default "service_type": "grpc", # grpc by default # one week by default (15 sec block, 24*60*60*7/15) - "model_ipfs_hash": "", + "service_api_source": "", "mpe_address": "", "groups": [], "assets": {}, @@ -78,8 +77,8 @@ def __init__(self): } def set_simple_field(self, f, v): - if f != "display_name" and f != "encoding" and f != "model_ipfs_hash" and f != "mpe_address" and \ - f != "service_type" and f != "payment_expiration_threshold" and f != "service_description": + if f != "display_name" and f != "encoding" and f != "mpe_address" and f != "service_type" and \ + f != "payment_expiration_threshold" and f != "service_description" and f != "service_api_source": raise Exception("unknown field in MPEServiceMetadata") self.m[f] = v @@ -306,6 +305,9 @@ def save_pretty(self, file_name): def __getitem__(self, key): return self.m[key] + def get(self, key, default=None): + return self.m.get(key, default) + def __contains__(self, key): return key in self.m diff --git a/snet/cli/utils/ipfs_utils.py b/snet/cli/utils/ipfs_utils.py index 8f1e336a..f627075d 100644 --- a/snet/cli/utils/ipfs_utils.py +++ b/snet/cli/utils/ipfs_utils.py @@ -23,6 +23,17 @@ def publish_file_in_ipfs(ipfs_client, filepath, wrap_with_directory=True): print("File error ", err) +def publish_file_in_filecoin(filecoin_client, filepath): + """ + Push a file to Filecoin given its path. + """ + try: + response = filecoin_client.upload(filepath) + return response['data']['Hash'] + except Exception as err: + print("File upload error: ", err) + + def publish_proto_in_ipfs(ipfs_client, protodir): """ make tar from protodir/*proto, and publish this tar in ipfs @@ -50,73 +61,75 @@ def publish_proto_in_ipfs(ipfs_client, protodir): return ipfs_client.add_bytes(tarbytes.getvalue()) +def publish_proto_in_filecoin(filecoin_client, protodir): + """ + Create a tar archive from protodir/*.proto, and publish this tar archive to Lighthouse. + Return the hash (CID) of the uploaded archive. + """ + + if not os.path.isdir(protodir): + raise Exception("Directory %s doesn't exist" % protodir) + + files = glob.glob(os.path.join(protodir, "*.proto")) + + if len(files) == 0: + raise Exception("Cannot find any .proto files in %s" % protodir) + + files.sort() + + tarbytes = io.BytesIO() + + with tarfile.open(fileobj=tarbytes, mode="w") as tar: + for f in files: + tar.add(f, os.path.basename(f)) + tarbytes.seek(0) + + temp_tar_path = os.path.join(protodir, "proto_files.tar") + with open(temp_tar_path, 'wb') as temp_tar_file: + temp_tar_file.write(tarbytes.getvalue()) + response = filecoin_client.upload(source=temp_tar_path, tag="") + + os.remove(temp_tar_path) + + return response['data']['Hash'] + + def get_from_ipfs_and_checkhash(ipfs_client, ipfs_hash_base58, validate=True): """ - Get file from ipfs - We must check the hash becasue we cannot believe that ipfs_client wasn't been compromise + Get file from IPFS. If validate is True, verify the integrity of the file using its hash. """ - if validate: - from snet.cli.resources.proto.unixfs_pb2 import Data - from snet.cli.resources.proto.merckledag_pb2 import MerkleNode - # No nice Python library to parse ipfs blocks, so do it ourselves. + data = ipfs_client.cat(ipfs_hash_base58) + + if validate: block_data = ipfs_client.block.get(ipfs_hash_base58) - mn = MerkleNode() - mn.ParseFromString(block_data) - unixfs_data = Data() - unixfs_data.ParseFromString(mn.Data) - assert unixfs_data.Type == unixfs_data.DataType.Value( - 'File'), "IPFS hash must be a file" - data = unixfs_data.Data - - # multihash has a badly registered base58 codec, overwrite it... - multihash.CodecReg.register( - 'base58', base58.b58encode, base58.b58decode) - # create a multihash object from our ipfs hash - mh = multihash.decode(ipfs_hash_base58.encode('ascii'), 'base58') - - # Convenience method lets us directly use a multihash to verify data - if not mh.verify(block_data): + + # print(f"IPFS hash (Base58): {ipfs_hash_base58}") + # print(f"Block data length: {len(block_data)}") + + # Decode Base58 bash to multihash + try: + mh = multihash.decode(ipfs_hash_base58.encode('ascii'), "base58") + except Exception as e: + raise ValueError(f"Invalid multihash for IPFS hash: {ipfs_hash_base58}. Error: {str(e)}") from e + + if not mh.verify(block_data): # Correctly using mh instance for verification raise Exception("IPFS hash mismatch with data") - else: - data = ipfs_client.cat(ipfs_hash_base58) + return data -def hash_to_bytesuri(s): +def hash_to_bytesuri(s, storage_type="ipfs", to_encode=True): """ Convert in and from bytes uri format used in Registry contract """ # TODO: we should pad string with zeros till closest 32 bytes word because of a bug in processReceipt (in snet_cli.contract.process_receipt) - s = "ipfs://" + s - return s.encode("ascii").ljust(32 * (len(s)//32 + 1), b"\0") - - -def bytesuri_to_hash(s): - s = s.rstrip(b"\0").decode('ascii') - if not s.startswith("ipfs://"): - raise Exception("We support only ipfs uri in Registry") - return s[7:] + if storage_type == "ipfs": + s = "ipfs://" + s + elif storage_type == "filecoin": + s = "filecoin://" + s - -def safe_extract_proto_from_ipfs(ipfs_client, ipfs_hash, protodir): - """ - Tar files might be dangerous (see https://bugs.python.org/issue21109, - and https://docs.python.org/3/library/tarfile.html, TarFile.extractall warning) - we extract only simple files - """ - spec_tar = get_from_ipfs_and_checkhash(ipfs_client, ipfs_hash) - with tarfile.open(fileobj=io.BytesIO(spec_tar)) as f: - for m in f.getmembers(): - if os.path.dirname(m.name) != "": - raise Exception( - "tarball has directories. We do not support it.") - if not m.isfile(): - raise Exception( - "tarball contains %s which is not a files" % m.name) - fullname = os.path.join(protodir, m.name) - if os.path.exists(fullname): - os.remove(fullname) - print("%s removed." % fullname) - # now it is safe to call extractall - f.extractall(protodir) + if to_encode: + return s.encode("ascii").ljust(32 * (len(s)//32 + 1), b"\0") + else: + return s # for 'service_api_source' metadata field diff --git a/snet/cli/utils/utils.py b/snet/cli/utils/utils.py index 568bcc7a..b69e60ba 100644 --- a/snet/cli/utils/utils.py +++ b/snet/cli/utils/utils.py @@ -8,13 +8,18 @@ from importlib.metadata import distribution from urllib.parse import urlparse from pathlib import Path, PurePath +from lighthouseweb3 import Lighthouse +import io +import tarfile import web3 import grpc from grpc_tools.protoc import main as protoc +from trezorlib.cli.firmware import download from snet import cli from snet.cli.resources.root_certificate import certificate +from snet.cli.utils.ipfs_utils import get_from_ipfs_and_checkhash RESOURCES_PATH = PurePath(os.path.dirname(cli.__file__)).joinpath("resources") @@ -308,3 +313,52 @@ def is_valid_url(url): r'(?::\d+)?' r'(?:/?|[/?]\S+)$', re.IGNORECASE) return re.match(regex, url) is not None + + +def bytesuri_to_hash(s, to_decode=True): + if to_decode: + s = s.rstrip(b"\0").decode('ascii') + if s.startswith("ipfs://"): + return "ipfs", s[7:] + elif s.startswith("filecoin://"): + return "filecoin", s[11:] + else: + raise Exception("We support only ipfs and filecoin uri in Registry") + + +def get_file_from_filecoin(cid): + lighthouse_client = Lighthouse(" ") + downloaded_file, _ = lighthouse_client.download(cid) + return downloaded_file + + +def download_and_safe_extract_proto(service_api_source, protodir, ipfs_client): + """ + Tar files might be dangerous (see https://bugs.python.org/issue21109, + and https://docs.python.org/3/library/tarfile.html, TarFile.extractall warning) + we extract only simple files + """ + try: + storage_type, service_api_source = bytesuri_to_hash(service_api_source, to_decode=False) + except Exception: + storage_type = "ipfs" + + if storage_type == "ipfs": + spec_tar = get_from_ipfs_and_checkhash(ipfs_client, service_api_source) + else: + spec_tar = get_file_from_filecoin(service_api_source) + + with tarfile.open(fileobj=io.BytesIO(spec_tar)) as f: + for m in f.getmembers(): + if os.path.dirname(m.name) != "": + raise Exception( + "tarball has directories. We do not support it.") + if not m.isfile(): + raise Exception( + "tarball contains %s which is not a files" % m.name) + fullname = os.path.join(protodir, m.name) + if os.path.exists(fullname): + os.remove(fullname) + print("%s removed." % fullname) + # now it is safe to call extractall + f.extractall(protodir) \ No newline at end of file diff --git a/version.py b/version.py index df4be5e0..8a124bf6 100644 --- a/version.py +++ b/version.py @@ -1 +1 @@ -__version__ = "2.1.4" +__version__ = "2.2.0"