Skip to content

Commit

Permalink
支持建筑物矢量提取(检测类任务)PolyBuilding、多光谱预训练模型(分类任务)
Browse files Browse the repository at this point in the history
  • Loading branch information
v4if committed Jul 27, 2023
1 parent 7f0648a commit aad9332
Show file tree
Hide file tree
Showing 260 changed files with 10,332 additions and 25,060 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -133,3 +133,4 @@ docker
note.md
update_docker.sh
.DS_Store
tests
11 changes: 10 additions & 1 deletion aiearth/deeplearning/datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,13 @@
from .datasets import ChangeDetNonGeoCustomDataset
from .datasets import LandcoverNonGeoCustomDataset
from .datasets import TargetExtractionNonGeoCustomDataset
from .mmseg import ChangeDetDataset, RemoteSensingBinary, LandcoverLoader

from .mmseg import (
ChangeDetDataset,
RemoteSensingBinary,
LandcoverLoader,
SemiDataset,
SemiLargeScaleDataset,
)
from .mmdet import AICrowdDataset
from .multispcetral import Bigearthnet, LMDBDataset, random_subset
3 changes: 3 additions & 0 deletions aiearth/deeplearning/datasets/mmdet/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .polybuilding import AICrowdDataset

__all__ = ["AICrowdDataset"]
4 changes: 4 additions & 0 deletions aiearth/deeplearning/datasets/mmdet/polybuilding/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .aicrowd import AICrowdDataset
from .pipelines.loading import LoadBuildingAnnotations

__all__ = ['AICrowdDataset']
68 changes: 68 additions & 0 deletions aiearth/deeplearning/datasets/mmdet/polybuilding/aicrowd.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
from mmdet.datasets.builder import DATASETS
from mmdet.datasets.coco import CocoDataset


@DATASETS.register_module()
class AICrowdDataset(CocoDataset):
CLASSES = ("building",)
PALETTE = [(119, 11, 32)]

def _segm2json(self, results):
"""Convert instance segmentation results to COCO json style."""
bbox_json_results = []
segm_json_results = []
# poly_json_results = []
for idx in range(len(self)):
img_id = self.img_ids[idx]
# det, seg, poly = results[idx]
det, seg = results[idx]
for label in range(len(det)):
# bbox results
bboxes = det[label]
for i in range(bboxes.shape[0]):
data = dict()
data["image_id"] = img_id
data["bbox"] = self.xyxy2xywh(bboxes[i])
data["score"] = float(bboxes[i][4])
data["category_id"] = self.cat_ids[label]
bbox_json_results.append(data)

# segm results
# some detectors use different scores for bbox and mask
if isinstance(seg, tuple):
segms = seg[0][label]
mask_score = seg[1][label]
else:
segms = seg[label]
mask_score = [bbox[4] for bbox in bboxes]

for i in range(bboxes.shape[0]):
if segms[i] is None or segms[i] == []:
continue
data = dict()
data["image_id"] = img_id
data["bbox"] = self.xyxy2xywh(bboxes[i])
data["score"] = float(mask_score[i])
data["category_id"] = self.cat_ids[label]
if isinstance(segms[i]["counts"], bytes):
segms[i]["counts"] = segms[i]["counts"].decode()
data["segmentation"] = segms[i]
segm_json_results.append(data)

# poly results
# some detectors use different scores for bbox and mask
# polys = poly['poly'][label]
# mask_score = [bbox[4] for bbox in bboxes]
# for i in range(bboxes.shape[0]):
# if polys[i] == []:
# continue
# data = dict()
# data['image_id'] = img_id
# data['bbox'] = self.xyxy2xywh(bboxes[i])
# data['score'] = float(mask_score[i])
# data['category_id'] = self.cat_ids[label]
# if isinstance(polys[i], torch.Tensor):
# polys[i] = [polys[i].view(-1).cpu().numpy().tolist()]
# data['segmentation'] = polys[i]
# poly_json_results.append(data)
return bbox_json_results, segm_json_results # , poly_json_results
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .loading import LoadBuildingAnnotations
from .structures import PolygonMasks

__all__ = ['LoadBuildingAnnotations', 'PolygonMasks']
Loading

0 comments on commit aad9332

Please sign in to comment.