-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #5 from shamspias/feat/sahi
Feat/sahi -> Implement write annotation and different save
- Loading branch information
Showing
7 changed files
with
175 additions
and
130 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,97 +1,94 @@ | ||
import os | ||
import cv2 | ||
import zipfile | ||
from typing import List | ||
from formats.base_format import BaseFormat | ||
|
||
|
||
class CVATFormat(BaseFormat): | ||
""" | ||
Class to handle the CVAT format for image annotations. | ||
Attributes: | ||
output_dir (str): Base directory for all output. | ||
Handles the CVAT format for image annotations. This class manages the creation of necessary directories, | ||
the writing of annotations into CVAT-compatible text files, and the organization of image data. | ||
""" | ||
|
||
def __init__(self, output_dir): | ||
super().__init__(output_dir) | ||
def __init__(self, output_dir: str, sahi_enabled: bool = False): | ||
super().__init__(output_dir, sahi_enabled) | ||
self.data_dir = os.path.join(output_dir, 'data') | ||
self.image_dir = os.path.join(self.data_dir, 'obj_train_data') | ||
os.makedirs(self.image_dir, exist_ok=True) | ||
|
||
def save_annotations(self, frame, frame_path, frame_filename, results, supported_classes): | ||
def save_annotations(self, frame, frame_path: str, frame_filename: str, results, supported_classes: List[str]): | ||
""" | ||
Saves annotations and images in CVAT-compatible format directly in obj_train_data. | ||
Saves annotations and frames in a format compatible with CVAT. | ||
""" | ||
img_dimensions = frame.shape[:2] | ||
annotations = self.process_results(frame, results, img_dimensions) | ||
frame_filename_png = frame_filename.replace('.jpg', '.png') | ||
image_path = os.path.join(self.image_dir, frame_filename_png) | ||
cv2.imwrite(image_path, frame) # Save the frame image | ||
cv2.imwrite(image_path, frame) | ||
self.write_annotations(frame_filename_png, annotations) | ||
self.create_metadata_files(supported_classes) | ||
|
||
annotation_filename = frame_filename_png.replace('.png', '.txt') | ||
def write_annotations(self, frame_filename: str, annotations: List[str]): | ||
""" | ||
Writes annotations to a text file associated with each frame image. | ||
""" | ||
annotation_filename = frame_filename.replace('.png', '.txt') | ||
annotation_path = os.path.join(self.image_dir, annotation_filename) | ||
try: | ||
with open(annotation_path, 'w') as file: | ||
for annotation in annotations: | ||
file.write(annotation + "\n") | ||
except IOError as e: | ||
print(f"Error writing annotation file {annotation_path}: {str(e)}") | ||
|
||
with open(annotation_path, 'w') as file: | ||
for result in results: | ||
if hasattr(result, 'boxes') and result.boxes is not None: | ||
for box in result.boxes: | ||
if box.xyxy.dim() == 2 and box.xyxy.shape[0] == 1: | ||
class_id = int(box.cls[0]) | ||
xmin, ymin, xmax, ymax = box.xyxy[0].tolist() | ||
x_center = ((xmin + xmax) / 2) / frame.shape[1] | ||
y_center = ((ymin + ymax) / 2) / frame.shape[0] | ||
width = (xmax - xmin) / frame.shape[1] | ||
height = (ymax - ymin) / frame.shape[0] | ||
file.write(f"{class_id} {x_center:.6f} {y_center:.6f} {width:.6f} {height:.6f}\n") | ||
|
||
# After saving all annotations, update metadata files | ||
self.create_metadata_files(supported_classes) | ||
|
||
def create_metadata_files(self, supported_classes): | ||
def create_metadata_files(self, supported_classes: List[str]): | ||
""" | ||
Creates necessary metadata files for CVAT training setup. | ||
Creates necessary metadata files for a CVAT training setup, including class names and training configurations. | ||
""" | ||
obj_names_path = os.path.join(self.data_dir, 'obj.names') | ||
obj_data_path = os.path.join(self.data_dir, 'obj.data') | ||
train_txt_path = os.path.join(self.data_dir, 'train.txt') | ||
|
||
# Create obj.names file | ||
with open(obj_names_path, 'w') as f: | ||
for cls in supported_classes: | ||
f.write(f"{cls}\n") | ||
try: | ||
with open(obj_names_path, 'w') as f: | ||
for cls in supported_classes: | ||
f.write(f"{cls}\n") | ||
|
||
# Create obj.data file | ||
with open(obj_data_path, 'w') as f: | ||
f.write("classes = {}\n".format(len(supported_classes))) | ||
f.write("train = data/train.txt\n") | ||
f.write("names = data/obj.names\n") | ||
f.write("backup = backup/\n") | ||
with open(obj_data_path, 'w') as f: | ||
f.write("classes = {}\n".format(len(supported_classes))) | ||
f.write("train = data/train.txt\n") | ||
f.write("names = data/obj.names\n") | ||
f.write("backup = backup/\n") | ||
|
||
# Create train.txt file listing all image files | ||
with open(train_txt_path, 'w') as f: | ||
for image_file in os.listdir(self.image_dir): | ||
if image_file.endswith('.png'): | ||
f.write(f"data/obj_train_data/{image_file}\n") | ||
|
||
def ensure_directories(self): | ||
"""Ensures all directories are created and ready for use.""" | ||
super().ensure_directories() # Ensures base directories are created | ||
with open(train_txt_path, 'w') as f: | ||
for image_file in os.listdir(self.image_dir): | ||
if image_file.endswith('.png'): | ||
f.write(f"data/obj_train_data/{image_file}\n") | ||
except IOError as e: | ||
print(f"Error writing metadata files: {str(e)}") | ||
|
||
def zip_and_cleanup(self): | ||
# Create a zip file and add all the data in the data directory to it. | ||
""" | ||
Zips the processed data for transfer or storage and cleans up the directory structure. | ||
""" | ||
zip_path = os.path.join(self.output_dir, 'cvat_data.zip') | ||
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: | ||
try: | ||
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: | ||
for root, dirs, files in os.walk(self.data_dir): | ||
for file in files: | ||
file_path = os.path.join(root, file) | ||
zipf.write(file_path, os.path.relpath(file_path, self.data_dir)) | ||
for dir in dirs: | ||
dir_path = os.path.join(root, dir) | ||
zipf.write(dir_path, os.path.relpath(dir_path, self.data_dir)) | ||
|
||
# Cleanup | ||
for root, dirs, files in os.walk(self.data_dir, topdown=False): | ||
for file in files: | ||
file_path = os.path.join(root, file) | ||
zipf.write(file_path, os.path.relpath(file_path, self.data_dir)) | ||
os.remove(os.path.join(root, file)) | ||
for dir in dirs: | ||
dir_path = os.path.join(root, dir) | ||
zipf.write(dir_path, os.path.relpath(dir_path, self.data_dir)) | ||
|
||
# Clean up the directory by removing all files first, then empty directories. | ||
for root, dirs, files in os.walk(self.data_dir, topdown=False): | ||
for file in files: | ||
os.remove(os.path.join(root, file)) | ||
for dir in dirs: | ||
os.rmdir(os.path.join(root, dir)) | ||
|
||
# Finally, remove the base data directory now that it should be empty. | ||
os.rmdir(self.data_dir) | ||
os.rmdir(os.path.join(root, dir)) | ||
os.rmdir(self.data_dir) | ||
except Exception as e: | ||
print(f"Error during zip or cleanup: {str(e)}") |
Oops, something went wrong.