Skip to content

Commit

Permalink
Add export format and fix issues
Browse files Browse the repository at this point in the history
  • Loading branch information
tannyle289 committed Aug 27, 2024
1 parent f0e1f92 commit 54df435
Show file tree
Hide file tree
Showing 18 changed files with 427 additions and 228 deletions.
51 changes: 51 additions & 0 deletions exports/base_export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
from exports.ibase_export import IBaseExport
from utils.VariableClass import VariableClass
from os.path import (
join as pjoin,
dirname as pdirname,
abspath as pabspath,
)
import os
import time


class BaseExport(IBaseExport):
def __init__(self, proj_dir_name):
self._var = VariableClass()
_cur_dir = pdirname(pabspath(__file__))
self.proj_dir = pjoin(_cur_dir, f'../data/{proj_dir_name}')
self.proj_dir = pabspath(self.proj_dir) # normalise the link
self.result_dir_path = None

def initialize_save_dir(self):
"""
See ibase_project.py
Returns:
None
"""
self.result_dir_path = pjoin(self.proj_dir, f'{self._var.DATASET_FORMAT}-v{self._var.DATASET_VERSION}')
os.makedirs(self.result_dir_path, exist_ok=True)

if os.path.exists(self.result_dir_path):
print('Successfully initialize save directory!')
return True
else:
print('Something wrong happened!')
return False

def save_frame(self, frame, predicted_frames, cv2, labels_and_boxes):
print(f'5.1. Condition met, processing valid frame: {predicted_frames}')
# Save original frame
unix_time = int(time.time())
print("5.2. Saving frame, labels and boxes")
cv2.imwrite(
f'{self.result_dir_path}/{unix_time}.png',
frame)
# Save labels and boxes
with open(f'{self.result_dir_path}/{unix_time}.txt',
'w') as my_file:
my_file.write(labels_and_boxes)

# Increase the frame_number and predicted_frames by one.
return predicted_frames + 1
21 changes: 21 additions & 0 deletions exports/export_factory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from exports.base_export import BaseExport
from exports.yolov8_export import Yolov8Export
from utils.VariableClass import VariableClass


class ExportFactory:
"""
Export Factory initializes specific export types.
"""
def __init__(self):
self._var = VariableClass()
self.save_format = self._var.DATASET_FORMAT

def init(self, proj_name):
if self.save_format == 'yolov8':
return Yolov8Export(proj_name)
elif self.save_format == 'base':
return BaseExport(proj_name)
else:
raise ModuleNotFoundError('Export type not found!')

12 changes: 12 additions & 0 deletions exports/ibase_export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from abc import ABC, abstractmethod


class IBaseExport(ABC):

@abstractmethod
def initialize_save_dir(self):
pass

@abstractmethod
def save_frame(self, frame, predicted_frames, cv2, labels_and_boxes):
pass
16 changes: 16 additions & 0 deletions exports/iyolov8_export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from abc import ABC, abstractmethod


class IYolov8Export(ABC):

@abstractmethod
def initialize_save_dir(self):
pass

@abstractmethod
def save_frame(self, frame, predicted_frames, cv2, labels_and_boxes):
pass

@abstractmethod
def create_yaml(self, model2):
pass
83 changes: 83 additions & 0 deletions exports/yolov8_export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
from exports.iyolov8_export import IYolov8Export
from utils.VariableClass import VariableClass
from os.path import (
join as pjoin,
dirname as pdirname,
abspath as pabspath,
)
import os
import time


class Yolov8Export(IYolov8Export):
def __init__(self, proj_dir_name):
"""
Constructor.
"""
self._var = VariableClass()
_cur_dir = pdirname(pabspath(__file__))
self.proj_dir = pjoin(_cur_dir, f'../data/{proj_dir_name}')
self.proj_dir = pabspath(self.proj_dir) # normalise the link
self.image_dir_path = None
self.label_dir_path = None
self.yaml_path = None
self.result_dir_path = None

def initialize_save_dir(self):
"""
See ibase_project.py
Returns:
None
"""
self.result_dir_path = pjoin(self.proj_dir, f'{self._var.DATASET_FORMAT}-v{self._var.DATASET_VERSION}')
os.makedirs(self.result_dir_path, exist_ok=True)

self.image_dir_path = pjoin(self.result_dir_path, 'images')
os.makedirs(self.image_dir_path, exist_ok=True)

self.label_dir_path = pjoin(self.result_dir_path, 'labels')
os.makedirs(self.label_dir_path, exist_ok=True)

self.yaml_path = pjoin(self.result_dir_path, 'data.yaml')

if (os.path.exists(self.result_dir_path)
and os.path.exists(self.image_dir_path)
and os.path.exists(self.label_dir_path)):
print('Successfully initialize save directory!')
return True
else:
print('Something wrong happened!')
return False

def save_frame(self, frame, predicted_frames, cv2, labels_and_boxes):
print(f'5.1. Condition met, processing valid frame: {predicted_frames}')
# Save original frame
unix_time = int(time.time())
print("5.2. Saving frame, labels and boxes")
cv2.imwrite(
f'{self.image_dir_path}/{unix_time}.png',
frame)
# Save labels and boxes
with open(f'{self.label_dir_path}/{unix_time}.txt',
'w') as my_file:
my_file.write(labels_and_boxes)

# Increase the frame_number and predicted_frames by one.
return predicted_frames + 1

def create_yaml(self, model2):
"""
Create YAML configuration file with DATASET_FORMAT format.
As convention, class names of YAML file is configured based on model2
Returns:
None
"""
label_names = [name for name in list(model2.names.values())]
with open(self.yaml_path, 'w') as my_file:
content = 'names:\n'
for name in label_names:
content += f'- {name}\n' # class mapping for helmet detection project
content += f'nc: {len(label_names)}'
my_file.write(content)
File renamed without changes.
19 changes: 19 additions & 0 deletions integrations/integration_factory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from integrations.roboflow_integration import RoboflowIntegration
from integrations.s3_integration import S3Integration
from utils.VariableClass import VariableClass


class IntegrationFactory:
def __init__(self):
self._var = VariableClass()
self.name = self._var.INTEGRATION_NAME

def init(self):
if self.name == 'roboflow':
print('Initializing Roboflow agent ...')
return RoboflowIntegration()
elif self.name == 's3':
print('Initializing S3 compatible agent ...')
return S3Integration()
else:
raise ModuleNotFoundError('Integration type not found!')
12 changes: 12 additions & 0 deletions integrations/iroboflow_integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from abc import ABC, abstractmethod


class IRoboflowIntegration(ABC):

@abstractmethod
def upload_dataset(self, src_project_path):
pass

@abstractmethod
def __connect__(self):
pass
20 changes: 20 additions & 0 deletions integrations/is3_integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
from abc import ABC, abstractmethod


class IS3Integration(ABC):

@abstractmethod
def upload_file(self, source_path, output_path):
pass

@abstractmethod
def upload_dataset(self, src_project_path):
pass

@abstractmethod
def __connect__(self):
pass

@abstractmethod
def __check_bucket_exists__(self, bucket_name):
pass
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,21 @@
from utils.VariableClass import VariableClass


var = VariableClass()


class RoboflowHelper:
class RoboflowIntegration:
def __init__(self):
self.agent, self.ws, self.project = self.__login__
self._var = VariableClass()
self.agent, self.ws, self.project = self.__connect__()

@property
def __login__(self):
def __connect__(self):
try:
# Attempt to initialize Roboflow with the API key
agent = roboflow.Roboflow(api_key=var.ROBOFLOW_API_KEY)
agent = roboflow.Roboflow(api_key=self._var.ROBOFLOW_API_KEY)

# Access the workspace
workspace = agent.workspace(var.ROBOFLOW_WORKSPACE)
workspace = agent.workspace(self._var.ROBOFLOW_WORKSPACE)

# Access the project
project = workspace.project(var.ROBOFLOW_PROJECT)
project = workspace.project(self._var.ROBOFLOW_PROJECT)

return agent, workspace, project

Expand All @@ -32,17 +29,17 @@ def __login__(self):
raise ConnectionRefusedError(f'Error during Roboflow login: {e}')

def upload_dataset(self, src_project_path):
# Upload data set to an existing project
self.ws.upload_dataset(
# Upload data set to an existing project
self.ws.upload_dataset(
src_project_path,
pbasename(self.project.id),
num_workers=10,
project_license="MIT",
project_type="object-detection",
batch_name=None,
num_retries=0
)
print('Uploaded')
)
print('Uploaded')

# Remove local folder when uploaded
shutil.rmtree(src_project_path)
# Remove local folder when uploaded
shutil.rmtree(src_project_path)
70 changes: 70 additions & 0 deletions integrations/s3_integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import boto3
import os

from utils.VariableClass import VariableClass


class S3Integration:
def __init__(self):
self._var = VariableClass()
self.session, self.agent = self.__connect__()
self.bucket = self._var.S3_BUCKET
self.__check_bucket_exists__(self.bucket)

def __connect__(self):
session = boto3.session.Session()
# Connect to Wasabi S3
agent = session.client(
self._var.INTEGRATION_NAME,
endpoint_url=self._var.S3_ENDPOINT, # Wasabi endpoint URL
aws_access_key_id=self._var.S3_ACCESS_KEY,
aws_secret_access_key=self._var.S3_SECRET_KEY,
)
print('Connected!')

return session, agent

def upload_file(self, source_path, output_path):
try:
self.agent.upload_file(source_path, self.bucket, output_path)
print(f"Successfully uploaded '{source_path}' to 's3://{self.bucket}/{output_path}'")
except Exception as e:
print(f"Failed to upload '{source_path}' to 's3://{self.bucket}/{output_path}': {e}")

# def upload_dataset(self, src_project_path):
# # Iterate over all the files in the folder
# for root, dirs, files in os.walk(src_project_path):
# for filename in files:
# # Construct the full file path
# source_path = os.path.join(root, filename)
#
# output_path = f'{self._var.DATASET_FORMAT}-v{self._var.DATASET_VERSION}/{filename}'
# # Upload the file
# self.upload_file(source_path, output_path)
# print(f'Uploaded: {source_path} to s3://{self.bucket}/{output_path}')

def upload_dataset(self, src_project_path):
# Iterate over all the files in the folder, including sub folders
for root, dirs, files in os.walk(src_project_path):
for filename in files:
# Construct the full file path
source_path = os.path.join(root, filename)

# Preserve the folder structure in the S3 path
# Create the relative path from the source folder to the current file
relative_path = os.path.relpath(source_path, src_project_path)

# Construct the output path using DATASET_FORMAT and DATASET_VERSION, including the relative path
output_path = f"{self._var.DATASET_FORMAT}-v{self._var.DATASET_VERSION}/{relative_path.replace(os.sep, '/')}"

# Upload the file
self.upload_file(source_path, output_path)
print(f'Uploaded: {source_path} to s3://{self.bucket}/{output_path}')

def __check_bucket_exists__(self, bucket_name):
try:
self.agent.head_bucket(Bucket=bucket_name)
print(f"Bucket '{bucket_name}' found.")

except:
raise ModuleNotFoundError(f"Bucket '{bucket_name}' does not exist.")
Loading

0 comments on commit 54df435

Please sign in to comment.