Skip to content

Commit

Permalink
reconfigured Resource class and added get_resource() to the ResourceP…
Browse files Browse the repository at this point in the history
…ool class
  • Loading branch information
Biraj Shrestha authored and Biraj Shrestha committed Jul 19, 2024
1 parent 5c1eece commit 250de03
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 85 deletions.
6 changes: 3 additions & 3 deletions CPAC/_entrypoints/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -782,12 +782,12 @@ def run_main():
only_one_anat=False,
)
# Initializing the bidstable on the bids_directory
bids_table = bids2table(bids_dir, workers=10)
bids_table = bids2table(bids_dir, workers=10).flat

try:
# fillna
bids_table['ent__ses'] = bids_table['ent__ses'].fillna('None')
grouped_tab = bids_table.groupby(["ent__sub", "ent__ses"])
bids_table['ses'] = bids_table['ses'].fillna('None')
grouped_tab = bids_table.groupby(["sub", "ses"])
except Exception as e:
WFLOGGER.warning("Could not create bids table: %s", e)
print("Could not create bids table: %s", e)
Expand Down
16 changes: 15 additions & 1 deletion CPAC/pipeline/engine/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -1347,10 +1347,24 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None):
# rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path)

# output files with 4 different scans
print(rpool.get_resource("T1w"))
resource_description = {
"suffix": "T1w",
"desc": "preproc",
"space": "MNI152NLin6ASym"
}
resource_content = rpool.get_resource(resource_description)
#print(dir(rpool.get_resource("T1w")[0]))
#rpool.write_to_disk(cfg.pipeline_setup["working_directory"]["path"])
#print(rpool.get_resource("T1w"))

# Ensure the directory exists
os.makedirs('/code/output', exist_ok=True)

# Now, safely open the file. It will be created if it does not exist.
with open('/code/output/output.txt', 'w') as file:

# Write the content to the file
file.write(str(resource_content))
import sys
sys.exit()

Expand Down
89 changes: 12 additions & 77 deletions CPAC/pipeline/engine/resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from dataclasses import dataclass
import pandas as pd

class Resource(BIDSFile):
class Resource():
row: dict
CpacProvenance: tuple
ds: dict
Expand All @@ -17,90 +17,25 @@ class Resource(BIDSFile):
def __init__(self, row, CpacProvenance):

self.cpac_provenance = CpacProvenance
self.metadata = row['meta__json'] if isinstance(row['meta__json'], dict) else {}
self.metadata = {}#row['json'] if isinstance(row['json'], dict) else {}
self.row = row
#extra stuff
self.ds = {k: v for k, v in row.items() if k.startswith('ds')}
self.entity = {k: v for k, v in row.items() if k.startswith('ent')}
self.finfo = {k: v for k, v in row.items() if k.startswith('finfo')}
for key, value in self.row.items():
setattr(self, key, value)

self.filename = self.finfo['finfo__file_path'].split("/")[-1]
self.file_path = self.finfo['finfo__file_path']
self.rel_path = f"sub-{self.entity['ent__sub']}"
if self.entity['ent__ses'] != "None":
self.rel_path += f"/ses-{self.entity['ent__ses']}"
self.rel_path += f"/{self.entity['ent__datatype']}"

self.category = self.entity['ent__suffix']

super().__init__(
self.ds['ds__dataset'],
self.ds['ds__dataset_path'],
self.finfo['finfo__file_path'],
self.entity,
self.metadata,
)
self.filename = self.file_path.split("/")[-1]
self.rel_path = f"sub-{self.sub}"
if self.ses != "None":
self.rel_path += f"/ses-{self.ses}"
self.rel_path += f"/{self.datatype}"

self.entity_to_bids_key = {

# These are the keys that are used in generating the Resource names.
# The order of the keys does matter and affects the Resource names.
# Some of the keys can me commented or uncommented to include or exclude them from the Resource names.
# If the keys are duplicated then the values are appended as a list.

#'ent__sub': 'sub',
#'ent__ses': 'ses',
'ent__sample': 'sample',
#'ent__task': 'task',
#'ent__acq': 'acq',
'ent__ce': 'ce',
'ent__trc': 'trc',
'ent__stain': 'stain',
'ent__rec': 'rec',
'ent__dir': 'dir',
'ent__mod': 'mod',
'ent__echo': 'echo',
'ent__flip': 'flip',
'ent__inv': 'inv',
'ent__mt': 'mt',
'ent__part': 'part',
'ent__proc': 'proc',
'ent__hemi': 'hemi',
'ent__space': 'space',
'ent__split': 'split',
'ent__recording': 'recording',
'ent__chunk': 'chunk',
'ent__res': 'res',
'ent__den': 'den',
'ent__label': 'label',
'ent__extra_entities': 'extra_entities',
'ent__desc': 'desc',
'ent__suffix': 'suffix',
#'ent__ext': 'ext'
}
self.suffix = self.suffix

self.name = self.generate_resource_name()
self.name = self.filename.split(".")[0]
self.strats = {
str(self.cpac_provenance) : self.file_path
}
for key, value in self.metadata.items():
setattr(self, key, value)

def generate_resource_name(self):
bids_key_value_pairs = []
for col, bids_key in self.entity_to_bids_key.items():
if pd.notnull(self.row[col]) and self.row[col] != '':
value = self.row[col] if pd.notna(self.row[col]) else {}
if bids_key in ['suffix']:
bids_key_value_pairs.append(value)
elif bids_key in ['extra_entities']:
for key, value in value.items():
bids_key_value_pairs.append(f"{key}-{value}")
else:
bids_key_value_pair = f"{bids_key}-{value}"
bids_key_value_pairs.append(bids_key_value_pair)
bids_style_combined = '_'.join(bids_key_value_pairs)
return bids_style_combined

def __repr__(self):
exclude_list = ['CpacConfig', 'CpacConfigHash', 'CpacProvenance', 'metadata', 'cpac_provenance', 'ds', 'entity', 'finfo', 'row', 'filename', 'file_path', 'rel_path', 'entities', 'path', 'entity_to_bids_key', ] # Add attribute names to exclude
Expand All @@ -114,7 +49,7 @@ def write_to_disk(self, path):
path_to_write = os.path.join(path, self.rel_path)
os.makedirs(path_to_write, exist_ok=True)
# Copy the NIFTI file
shutil.copy(self.finfo['finfo__file_path'], path_to_write)
shutil.copy(self.finfo['file_path'], path_to_write)
# Write the JSON file only if the ext is .nii.gz
if self.filename.endswith('.nii.gz'):
json_path = os.path.join(path_to_write, f"{self.filename.replace('.nii.gz', '.json')}")
Expand Down
28 changes: 24 additions & 4 deletions CPAC/pipeline/engine/resource_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -1429,8 +1429,8 @@ def build_rpool(
CpacProvenance = default_CpacProvenance
resource = Resource(row, CpacProvenance)
# making the rpool a list so that the duplicates are appended rather than overwritten
self.rpool.setdefault(resource.category, [])
self.rpool[resource.category].append(resource)
self.rpool.setdefault(resource.suffix, [])
self.rpool[resource.suffix].append(resource)
# count += 1
# if count >10:
# break
Expand All @@ -1441,8 +1441,28 @@ def write_to_disk(self, path):
for item in resources:
print(item['resource'].write_to_disk(path))

def get_resource(self, name):
return self.rpool.get(name, None)
def get_resource(self, description):
matching_resources = []
for resources in self.rpool.get(description['suffix'], []):
# Initialize a flag to True, assuming the resource matches until proven otherwise
is_match = True
for key, val in description.items():
# Skip the 'suffix' key as it's used to select the pool, not to match resources
if key == 'suffix':
continue
# Check if the resource matches the description criteria
# Use getattr for object attributes or resources.get for dictionary keys
resource_val = getattr(resources, key, None)
if resource_val != val:
is_match = False
break # Break out of the inner loop if any criteria does not match
if is_match:
# If the resource matches all criteria, append its name to the matching_resources list
matching_resources.append(resources.name)
for items in matching_resources:
print(items)
return matching_resources


def set_resource(self, name, value):
self.rpool[name] = value
Expand Down

0 comments on commit 250de03

Please sign in to comment.