Source code for torch_geometric.datasets.modelnet

import glob
import os
import os.path as osp
from typing import Callable, List, Optional

import torch

from import (
from import fs, read_off

[docs]class ModelNet(InMemoryDataset): r"""The ModelNet10/40 datasets from the `"3D ShapeNets: A Deep Representation for Volumetric Shapes" <>`_ paper, containing CAD models of 10 and 40 categories, respectively. .. note:: Data objects hold mesh faces instead of edge indices. To convert the mesh to a graph, use the :obj:`torch_geometric.transforms.FaceToEdge` as :obj:`pre_transform`. To convert the mesh to a point cloud, use the :obj:`torch_geometric.transforms.SamplePoints` as :obj:`transform` to sample a fixed number of points on the mesh faces according to their face area. Args: root (str): Root directory where the dataset should be saved. name (str, optional): The name of the dataset (:obj:`"10"` for ModelNet10, :obj:`"40"` for ModelNet40). (default: :obj:`"10"`) train (bool, optional): If :obj:`True`, loads the training dataset, otherwise the test dataset. (default: :obj:`True`) transform (callable, optional): A function/transform that takes in an :obj:`` object and returns a transformed version. The data object will be transformed before every access. (default: :obj:`None`) pre_transform (callable, optional): A function/transform that takes in an :obj:`` object and returns a transformed version. The data object will be transformed before being saved to disk. (default: :obj:`None`) pre_filter (callable, optional): A function that takes in an :obj:`` object and returns a boolean value, indicating whether the data object should be included in the final dataset. (default: :obj:`None`) force_reload (bool, optional): Whether to re-process the dataset. (default: :obj:`False`) **STATS:** .. list-table:: :widths: 20 10 10 10 10 10 :header-rows: 1 * - Name - #graphs - #nodes - #edges - #features - #classes * - ModelNet10 - 4,899 - ~9,508.2 - ~37,450.5 - 3 - 10 * - ModelNet40 - 12,311 - ~17,744.4 - ~66,060.9 - 3 - 40 """ urls = { '10': '', '40': '' } def __init__( self, root: str, name: str = '10', train: bool = True, transform: Optional[Callable] = None, pre_transform: Optional[Callable] = None, pre_filter: Optional[Callable] = None, force_reload: bool = False, ) -> None: assert name in ['10', '40'] = name super().__init__(root, transform, pre_transform, pre_filter, force_reload=force_reload) path = self.processed_paths[0] if train else self.processed_paths[1] self.load(path) @property def raw_file_names(self) -> List[str]: return [ 'bathtub', 'bed', 'chair', 'desk', 'dresser', 'monitor', 'night_stand', 'sofa', 'table', 'toilet' ] @property def processed_file_names(self) -> List[str]: return ['', ''] def download(self) -> None: path = download_url(self.urls[], self.root) extract_zip(path, self.root) os.unlink(path) folder = osp.join(self.root, f'ModelNet{}') fs.rm(self.raw_dir) os.rename(folder, self.raw_dir) # Delete osx metadata generated during compression of ModelNet10 metadata_folder = osp.join(self.root, '__MACOSX') if osp.exists(metadata_folder): fs.rm(metadata_folder) def process(self) -> None:'train'), self.processed_paths[0])'test'), self.processed_paths[1]) def process_set(self, dataset: str) -> List[Data]: categories = glob.glob(osp.join(self.raw_dir, '*', '')) categories = sorted([x.split(os.sep)[-2] for x in categories]) data_list = [] for target, category in enumerate(categories): folder = osp.join(self.raw_dir, category, dataset) paths = glob.glob(f'{folder}/{category}_*.off') for path in paths: data = read_off(path) data.y = torch.tensor([target]) data_list.append(data) if self.pre_filter is not None: data_list = [d for d in data_list if self.pre_filter(d)] if self.pre_transform is not None: data_list = [self.pre_transform(d) for d in data_list] return data_list def __repr__(self) -> str: return f'{self.__class__.__name__}{}({len(self)})'