Commit ab65d8f2 by arnaud.marcoux

Merge Master -> dev

parents ad2d4a17 a8bac17d
Pipeline #1041 passed with stages
in 1 minute 58 seconds
......@@ -109,6 +109,7 @@ unit:mac:
- cd test
- ln -s /Volumes/builds/Clinica/data_ci ./data
- pytest -v
- module purge
- source deactivate
......
......@@ -396,6 +396,7 @@ class CAPSVertexBasedInput(CAPSInput):
def save_weights_as_nifti(self, weights, output_dir):
pass
class CAPSTSVBasedInput(CAPSInput):
def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_id, image_type, atlas, dataset,
......@@ -418,6 +419,7 @@ class CAPSTSVBasedInput(CAPSInput):
self._atlas = atlas
self._pvc = pvc
self._dataset = dataset
self._orig_shape = None
self._data_mask = None
......@@ -427,23 +429,27 @@ class CAPSTSVBasedInput(CAPSInput):
def get_images(self):
"""
Returns: string
Returns: string
"""
import pandas as pd
if self._images is not None:
return self._images
print self._group_id
print self._atlas
if self._image_type == 'T1':
#import pandas as pd
pass
#if self._images is not None:
# return self._images
#print self._group_id
#print self._atlas
#print self._image_type
self._images = str('group-' + self._group_id + '_T1w_space-' + self._atlas + '_map-graymatter')
#if self._image_type == 'T1':
## to implement for PET
# self._images = str('group-' + self._group_id + '_T1w_space-' + self._atlas + '_map-graymatter')
return self._images
### to implement for PET
#return self._images
def get_x(self):
"""
......@@ -451,12 +457,17 @@ class CAPSTSVBasedInput(CAPSInput):
Returns: a numpy 2d-array.
"""
if self._x is not None:
return self._x
#if self._x is not None:
# return self._x
print 'Loading TSV subjects'
self._x = tbio.load_data(self._images, self._caps_directory, self._subjects, self._sessions, self._dataset)
###to finish
string = str('group-' + self._group_id + '_T1w_space-' + self._atlas + '_map-graymatter')
self._x = tbio.load_data(string, self._caps_directory, self._subjects, self._sessions, self._dataset)
print 'Subjects loaded'
return self._x
......@@ -473,4 +484,6 @@ class CAPSTSVBasedInput(CAPSInput):
"""
output_filename = path.join(output_dir, 'weights.nii.gz')
rbio.weights_to_nifti(weights, self._atlas, output_filename)
\ No newline at end of file
rbio.weights_to_nifti(weights, self._atlas, output_filename)
......@@ -288,7 +288,7 @@ class RB_RepHoldOut_DualSVM(base.MLWorkflow):
class RB_RepHoldOut_LogisticRegression(base.MLWorkflow):
def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_id, image_type, atlas,
output_dir, pvc=None, n_threads=15, n_iterations=100, test_size=0.3,
grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None):
......@@ -305,20 +305,20 @@ class RB_RepHoldOut_LogisticRegression(base.MLWorkflow):
image_type, atlas, pvc)
self._validation = None
self._algorithm = None
def run(self):
x = self._input.get_x()
y = self._input.get_y()
self._algorithm = algorithm.LogisticReg(x, y, balanced=self._balanced,
grid_search_folds=self._grid_search_folds,
c_range=self._c_range,
n_threads=self._n_threads)
self._validation = validation.RepeatedHoldOut(self._algorithm, n_iterations=self._n_iterations, test_size=self._test_size)
classifier, best_params, results = self._validation.validate(y, n_threads=self._n_threads, splits_indices=self._splits_indices)
classifier_dir = os.path.join(self._output_dir, 'classifier')
if not path.exists(classifier_dir):
os.makedirs(classifier_dir)
......@@ -656,5 +656,3 @@ class TB_RepHoldOut_RandomForest(base.MLWorkflow):
self._input.save_weights_as_nifti(weights, classifier_dir)
self._validation.save_results(self._output_dir)
......@@ -34,22 +34,21 @@ def load_data(images, caps_directory, subjects, sessions, dataset):
all_vector = np.array([])
if dataset == 'OASIS':
df = df[df.age_bl > 61]
#subjects_visits = pd.io.parsers.read_csv(os.path.join(subjects_visits_tsv), sep='\t')
participant_id = subjects
print len(participant_id)
session_id = sessions
for i in xrange(len(participant_id)):
df_sub = df[df.participant_id == participant_id[i]]
df_analysis = df_sub[[col for col in df_sub.columns if images in col]]
all_vector = np.append(all_vector, df_analysis.values)
data = np.zeros((participant_id.shape[0], df_analysis.shape[1]))
data_temp = np.split(all_vector, participant_id.shape[0])
data = np.zeros((len(participant_id), df_analysis.shape[1]))
data_temp = np.split(all_vector, len(participant_id))
for i in xrange(len(participant_id)):
for j in xrange(df_analysis.shape[1]):
......
......@@ -136,8 +136,8 @@ class StatisticsSurfaceCLI(ce.CmdParser):
raise Exception('You must specify a --feature_label when using the --custom_files flag')
# Check if the group label has been existed, if yes, give an error to the users
if os.path.exists(os.path.join(os.path.abspath(self.absolute_path(args.caps_directory)), 'groups', 'group-' + args.group_label)):
error_message = 'group_id : ' + args.group_label + ' already exists, please choose another one or delete the existing folder and also the working directory and rerun the pipelines'
if os.path.exists(os.path.join(os.path.abspath(self.absolute_path(args.caps_directory)), 'groups', 'group-' + args.group_id)):
error_message = 'group_id : ' + args.group_id + ' already exists, please choose another one or delete the existing folder and also the working directory and rerun the pipelines'
raise Exception(error_message)
pipeline = StatisticsSurface(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment