Source code for brainvisa.data.databaseCheck

# -*- coding: utf-8 -*-
#  This software and supporting documentation are distributed by
#      Institut Federatif de Recherche 49
#      CEA/NeuroSpin, Batiment 145,
#      91191 Gif-sur-Yvette cedex
#      France
#
# This software is governed by the CeCILL license version 2 under
# French law and abiding by the rules of distribution of free software.
# You can  use, modify and/or redistribute the software under the
# terms of the CeCILL license version 2 as circulated by CEA, CNRS
# and INRIA at the following URL "http://www.cecill.info".
#
# As a counterpart to the access to the source code and  rights to copy,
# modify and redistribute granted by the license, users are provided only
# with a limited warranty  and the software's author,  the holder of the
# economic rights,  and the successive licensors  have only  limited
# liability.
#
# In this respect, the user's attention is drawn to the risks associated
# with loading,  using,  modifying and/or developing or reproducing the
# software by the user in light of its specific status of free software,
# that may mean  that it is complicated to manipulate,  and  that  also
# therefore means  that it is reserved for developers  and  experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and,  more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license version 2 and that you accept its terms.

from __future__ import print_function
from __future__ import absolute_import
import os
import stat
import re
import shutil
import time
import operator
from brainvisa.data import neuroHierarchy
from brainvisa import registration
from soma.sorted_dictionary import SortedDictionary
from brainvisa.data.actions import FileProcess, Move, Remove, CallProcess, SetTransformationInfo
from soma.minf.api import readMinf, writeMinf
from brainvisa.data.sqlFSODatabase import SQLDatabase
from brainvisa.data.readdiskitem import ReadDiskItem
import six
import collections
import sys

#
# DBProcessor
#


[docs]class DBProcessor(object): """ @type dbDir: string @ivar dbDir: database directory @type components : list of DBProcessor @ivar components: it is possible to have several processors, each specialized for one database part. @type fileProcesses: list of FileProcess @ivar fileProcesses: files that must be processed to process the database. @type doneProcesses: list of FileProcess @ivar doneProcesses: actions really done by process method in the inverse order of execution. It is stored to create the undo scripts. To undo, it is necessary to do inverse of each done action in inverse order. @type undoScriptName: string @ivar undoScriptName: name of the script that will be generated by generateUndoScripts method to undo done processing on the database. """ def __init__(self, dbDir, context=None): if os.path.isfile(dbDir): dbDir = os.path.dirname(dbDir) self.dbDir = os.path.abspath(dbDir) self.fileProcesses = [] self.doneProcesses = [] self.components = {} self.undoScriptName = "undoProcessing.py" self.context = context
[docs] def findActions(self, component=None): """ Search for actions to do to process the database. @type component: string @param component: key of a component to process only a part of the database @rtype : list of FileProcess @return : actions to do to process the database """ actions = [] if component is not None: c = self.components.get(component) if c is not None: actions.extend(c.findActions()) else: raise NameError(self.__class__ + " has no component named " + component + ". Available components : " + list(self.components.keys()) + ".") else: for c in self.components.values(): actions.extend(c.findActions()) return actions
[docs] def process(self, component=None, debug=False): """ For each stored actions, calls doit method which does the action to process the database. Each done action is stored in doneActions list in order to create undo scripts. Calls convert method for each sub converters. @type component: string @param component: key of a component to process only a part of the database """ # do component converters actions if component is not None: c = self.components.get(component) if c is not None: c.process(debug=debug) else: raise NameError(self.__class__ + " has no component named " + component + ". Available components : " + list(self.components.keys()) + ".") else: for c in self.components.values(): c.process(debug=debug) self.doneProcesses = [] currentDir = six.moves.getcwd() os.chdir(self.dbDir) for action in self.fileProcesses: if action.selected: action.doit(debug, context=self.context) self.doneProcesses.insert(0, action) os.chdir(currentDir)
[docs] def generateUndoScripts(self, component=None): """ Creates a script to undo conversion actions. The script file is dbDir/undoScriptName For each done action, the undoCmd is written in the undo script. Calls generateUndoScripts for each sub converter. @type component: string @param component: key of a component to process only a part of the database """ if not os.path.exists(os.path.join(self.dbDir, 'scripts')): os.mkdir(os.path.join(self.dbDir, 'scripts')) undoScriptName = os.path.join( self.dbDir, 'scripts', self.undoScriptName) if os.path.exists(undoScriptName): oldScriptName = undoScriptName[ :-3] + time.strftime("_%Y-%m-%d_%H-%M-%S") + ".py" if os.path.exists(oldScriptName): os.remove(oldScriptName) os.rename(undoScriptName, oldScriptName) if self.doneProcesses: undoScript = open(undoScriptName, "w") # print("Generate undo script ", undoScriptName) print("#!/usr/bin/env python", file=undoScript) print("# This has been created by Brainvisa. ", file=undoScript) print("# Run it to undo changes made by database processes.", file=undoScript) print("import os, shutil, sys", file=undoScript) # the undo script is in database directory, so change dir to script # directory is change dir to database directory print("os.chdir(os.path.dirname(os.path.dirname(sys.argv[0])))", file=undoScript) for action in self.doneProcesses: print(action.undoCmd(), file=undoScript) undoScript.close() # make the script executable os.chmod(undoScriptName, os.stat(undoScriptName)[ stat.ST_MODE] | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) if component is not None: c = self.components.get(component) if c is not None: c.generateUndoScripts() else: raise NameError(self.__class__ + " has no component named " + component + ". Available components : " + list(self.components.keys()) + ".") else: for c in self.components.values(): c.generateUndoScripts()
[docs] def undo(self, component=None): """ Executes undo scripts to undo database conversion. The script file must have been generated by generateUndoScripts method. Executes undo scripts for each sub converter. @type component: string @param component: key of a component to process only a part of the database """ undoScript = os.path.join(self.dbDir, "scripts", self.undoScriptName) if os.path.exists(undoScript): # print("Execute undo script ", undoScript) os.spawnl(os.P_WAIT, undoScript) if component is not None: c = self.components.get(component) if c is not None: c.undo() else: raise NameError(self.__class__ + " has no component named " + component + ". Available components : " + list(self.components.keys()) + ".") else: for c in reversed(list(self.components.values())): c.undo()
# # DBConverter #
[docs]class DBConverter(DBProcessor): """ Converts a file system database by moving, renaming, removing files and directories. Generic converter, it can contain other converters, specialized for a part of the database. """ # default names for acquisition and analysis levels, could be asked to user default_acquisition = "default_acquisition" default_analysis = "default_analysis" default_session = "default_session" def __init__(self, dbDir, context=None): super(DBConverter, self).__init__(dbDir, context) self.undoScriptName = "undoDBConversion.py"
[docs] def convertFiles(self, conversionPatterns, oldDir, newDir, baseDir, currentAcquisition="", currentAnalysis="", currentSession="", starReplace="acquisition"): """ Files will be moved to destDir/currentAcquisition/currentAnalysis/currentSession and renamed according to patterns. If there is * patterns, the corresponding files will be moved to this path replacing starReplace by match string @type conversionPatterns: list of tuple (2-4) @param conversionPatterns: each tuple contain : source pattern, dest pattern and eventually a star pattern. the star pattern is the same as the source pattern with a variable in the name which represents an attribute (acquisition, analysis...) @type oldDir : string @param oldDir : directory where diffusion files are currently (diffusion) @type newDir: string @param newDir: directory where diffusion files will be after processing conversion. (diffusion/acquisition) @type destDir : string @param baseDir: base directory where the files must be put (generally diffusion) @type starReplace: string @param starReplace: indicates what attribute the match string in star pattern must replace in dest path. """ # analysis : newDir -> default_analysis content = [] newContent = [] if os.path.isdir(oldDir): content = os.listdir(oldDir) if os.path.isdir(newDir): newContent = os.listdir(newDir) destDir = os.path.join( baseDir, currentAcquisition, currentAnalysis, currentSession) for patterns in conversionPatterns: patternSrc = patterns[0] patternDest = patterns[1] if (contentMatch(content, patternSrc)) or (contentMatch(newContent, patternSrc)): self.fileProcesses.append( FileProcess(newDir, Move(destDir, patternSrc, patternDest), patternSrc)) if len(patterns) > 2: patternStar = patterns[2] if patternStar: regexp = re.compile(patternStar) otherStars = [] for f in content: match = regexp.match(f) if match: star = match.group(1) if star not in otherStars: otherStars.append(star) for star in otherStars: pattern = patternStar.replace("(.+)", star) if starReplace == "acquisition": starDir = os.path.join( baseDir, star, currentAnalysis, currentSession) elif starReplace == "analysis": starDir = os.path.join( baseDir, currentAcquisition, star, currentSession) else: starDir = os.path.join( baseDir, currentAcquisition, currentAnalysis, star) if len(patterns) > 3: patternDest = patterns[3] pattern = pattern.replace(star, "(" + star + ")") self.fileProcesses.append( FileProcess(newDir, Move(starDir, pattern, patternDest), pattern))
#
[docs]class T1MriConverter(DBConverter): """ In protocol/subject : * protocol -> center * anatomy -> t1mri/acquisition/analysis (raw t1 mri and acpc coordinates files are put in acquisition, the others in analysis) If anatomy already contains acquisition directories, they are moved in t1mri, and analysis level is added. * segment -> t1mri/acquisition/analysis/segmentation * tri, mesh -> t1mri/acquisition/analysis/segmentation/mesh * deepnuclei -> t1mri/acquisition/analysis/nuclei * Referential and transformations : * <subject>_TO_talairach.trm -> <center>/<subject>/registration/RawT1-<subject>_<acquisition>_TO_Talairach-ACPC.trm * *<subject>.referential -> *<subject>-default_acquisition.referential, *<subject>_TO_*.trm -> *<subject>_default_acquisition_TO_*.trm """ def __init__(self, dbDir, context=None): super(T1MriConverter, self).__init__(dbDir, context) self.undoScriptName = "undoT1mriConversion.py"
[docs] def findActions(self): self.fileProcesses = [] centers = os.listdir(self.dbDir) # first level : center/protocol currentDir = six.moves.getcwd() os.chdir(self.dbDir) for p in centers: if os.path.isdir(p): subjects = os.listdir(p) # second level : subject for s in subjects: # in subjects directories, if there is an old dir, the database must be converted subjectDir = os.path.join(p, s) if os.path.isdir(subjectDir) and s != "registration" and s != "group_analysis": dirs = os.listdir(subjectDir) if "registration" in dirs: # subject/registration, ajouter default_acquisition dans le nom des referential et transformations si non precise registrationPatterns = [( "RawT1-" + s + "\.referential", "RawT1-" + s + "_" + self.default_acquisition + ".referential", "RawT1-" + s + "-(.+)\.referential", "RawT1-" + s + "_\\1.referential"), ("RawT1-" + s + "(_TO_Talairach-ACPC\.trm.*)", "RawT1-" + s + "_" + self.default_acquisition + "\\1", "RawT1-" + s + "_(.+)(_TO_Talairach-ACPC\.trm.*)", "RawT1-" + s + "_\\1\\2")] self.convertFiles(registrationPatterns, os.path.join(subjectDir, "registration"), os.path.join( subjectDir, "registration"), os.path.join(subjectDir, 't1mri'), self.default_acquisition, "registration") # pattern="(.*"+s+")(\.referential)" # if contentMatch(os.path.join(subjectDir, "registration"), pattern): # self.fileProcesses.append( FileProcess(os.path.join(subjectDir, "registration"), Move(os.path.join(subjectDir, "registration"), pattern, "\\1-"+self.default_acquisition+"\\2"), pattern )) # pattern= "(.*"+s+")(_TO_.*\.trm.*)" # if contentMatch(os.path.join(subjectDir, "registration"), pattern): # self.fileProcesses.append( # FileProcess(os.path.join(subjectDir, # "registration"), # Move(os.path.join(subjectDir, # "registration"), pattern, # "\\1_"+self.default_acquisition+"\\2"), # pattern )) t1mriDir = os.path.join(subjectDir, "t1mri") if "anatomy" in dirs: # in anatomy raw t1 mri and acpc coordinates must # be in acquisition directory not in analysis # directory src = os.path.join(subjectDir, "anatomy") dest = t1mriDir subdirs = [] files = [] for f in os.listdir(src): if os.path.isdir(os.path.join(src, f)) and not f.endswith(".data"): subdirs.append(f) else: files.append(f) if subdirs: # level acquisition exists for acquisition in subdirs: # anatomy/acquisition -> t1mri/acquisition/default_analysis/ # move and rename transformation raw t1 mri # to talairach talairachTrm = os.path.join( src, acquisition, s + "_TO_talairach.trm") if os.path.exists(talairachTrm): transfoPattern = s + \ "_TO_talairach.trm(.*)" # action=Move(os.path.join(subjectDir, # "registration"), # patternSrc=transfoPattern, # patternDest="RawT1-"+s+"_"+acquisition+"_TO_Talairach-ACPC.trm\\1") action = Move( os.path.join(dest, acquisition, "registration"), patternSrc=transfoPattern, patternDest="RawT1-" + s + "_" + acquisition + "_TO_Talairach-ACPC.trm\\1") self.fileProcesses.append( FileProcess(os.path.join(src, acquisition), action, pattern=transfoPattern)) # move acquisition files in t1mri/acquisition # move anatomy/acquisition/subject.* -> # t1mri/acquisition action = FileProcess(os.path.join(src, acquisition), Move( os.path.join(dest, acquisition)), s + "(_sn)?\..*") self.fileProcesses.append(action) # if there is a normalized raw t1 # (wsubject), rename to # normalized_spm_subject and put in # acquisition directory normalizedPattern = "w(" + s + "\..*)" if contentMatch(os.path.join(src, acquisition), normalizedPattern): self.fileProcesses.append(FileProcess(os.path.join(src, acquisition), Move( os.path.join(dest, acquisition), normalizedPattern, "normalized_spm_\\1"), normalizedPattern)) # move anatomy/acquisition -> # t1mri/acquisition/default_analysis action = FileProcess(os.path.join(src, acquisition), Move( os.path.join(dest, acquisition), patternDest=self.default_analysis)) self.fileProcesses.append(action) if files: action = FileProcess( src, Move(os.path.join(dest, self.default_acquisition)), ".*") self.fileProcesses.append(action) action = FileProcess( src, Remove(subjectDir)) # put src in trash in a directory subjectDir self.fileProcesses.append(action) else: # no level acquisition # move and rename transformation raw t1 mri to # talairach talairachTrm = os.path.join( src, s + "_TO_talairach.trm") if os.path.exists(talairachTrm): transfoPattern = s + \ "_TO_talairach.trm(.*)" # action=Move(os.path.join(subjectDir, # "registration"), # patternSrc=transfoPattern, # patternDest="RawT1-"+s+"_"+self.default_acquisition+"_TO_Talairach-ACPC.trm\\1") action = Move( os.path.join(dest, self.default_acquisition, "registration"), patternSrc=transfoPattern, patternDest="RawT1-" + s + "_" + self.default_acquisition + "_TO_Talairach-ACPC.trm\\1") self.fileProcesses.append( FileProcess(src, action, pattern=transfoPattern)) # this file is treated, remove it from the # list of remaining file to process regexp = re.compile(transfoPattern) remainFiles = [] for f in files: if not regexp.match(f): remainFiles.append(f) files = remainFiles # move files subject_{acquisition}.* -> acquisition/subject.* # and files prefix_subject_{acquisition}.* -> acquisition/default_analysis/prefix_subject.* # idem for files in segment regexp = re.compile( s + "_(.*?)\..*") # search for files matching subject_acquisition.*. the '?' after '*' means that * is not greedy : it will match as few characters as possible. So in subject.ima.minf, .ima cannot be considered as an acquisition. processedSuffixes = [ "sn"] # to avoid processing the same acquisition several times, <subject>_sn.mat is not an acquisition for f in files: match = regexp.match(f) if match: acquisition = match.group(1) if acquisition not in processedSuffixes: processedSuffixes.append( acquisition) # move all files # subject_acquitition.extenstion -> # acquisition/subject.extension pattern = s + "_" + \ acquisition + "\.(.*)" self.fileProcesses.append( FileProcess(src, Move(os.path.join(dest, acquisition), pattern, s + ".\\1"), pattern)) # then move all linked files # prefix_subject_acquisition.extension # -> # acquisition/default_analysis/prefix_subject.extension linkedFilePattern = "(.*)_" + \ s + "_" + \ acquisition + \ "\.(.*)" if contentMatch(files, linkedFilePattern): # move analysis files that have the same acquisition suffix in acquisition/default_analysis self.fileProcesses.append( FileProcess(src, Move(os.path.join(dest, acquisition, self.default_analysis), linkedFilePattern, patternDest="\\1_" + s + ".\\2"), linkedFilePattern)) if contentMatch(os.path.join(subjectDir, "segment"), linkedFilePattern): # move segmentation files with # the same suffix and rename # segmentation files to remove # the suffix self.fileProcesses.append(FileProcess(os.path.join(subjectDir, "segment"), Move( os.path.join(dest, acquisition, self.default_analysis, "segmentation"), linkedFilePattern, patternDest="\\1_" + s + ".\\2"), linkedFilePattern)) # if it remains some files with a suffix which # is not an acquisition, it is considered as an # analysis, so these files are moved in # default_acquisition/analysis regexp = re.compile(".*_" + s + "_(.*?)\..*") for f in files: match = regexp.match(f) if match: analysis = match.group(1) if analysis not in processedSuffixes: processedSuffixes.append(analysis) pattern = "(.*)_" + s + \ "_" + analysis + \ "\.(.*)" self.fileProcesses.append( FileProcess(src, Move(os.path.join(dest, self.default_acquisition, analysis), pattern, patternDest="\\1_" + s + ".\\2"), pattern)) if contentMatch(os.path.join(subjectDir, "segment"), pattern): # move segmentation files with # the same suffix and rename # segmentation files to remove # the suffix self.fileProcesses.append(FileProcess(os.path.join(subjectDir, "segment"), Move( os.path.join(dest, self.default_acquisition, analysis, "segmentation"), pattern, patternDest="\\1_" + s + ".\\2"), pattern)) # move remaining files to default_acquisition action = FileProcess( src, Move(os.path.join(dest, self.default_acquisition)), s + "(_sn)?\..*") # regular expression for # <subject>.* -> raw t1 # mri and acpc coordinates self.fileProcesses.append(action) normalizedPattern = "w(" + s + "\..*)" if contentMatch(src, normalizedPattern): self.fileProcesses.append( FileProcess(src, Move(os.path.join(dest, self.default_acquisition), normalizedPattern, "normalized_spm_\\1"), normalizedPattern)) action = FileProcess( src, Move(os.path.join(dest, self.default_acquisition), patternDest=self.default_analysis)) self.fileProcesses.append(action) if "deepnuclei" in dirs: self.moveDir( os.path.join(subjectDir, "deepnuclei"), os.path.join(subjectDir, "t1mri"), "nuclei") graphPatterns = self.getGraphPatterns( s) # folds graphs patterns # recognition session graphs patterns manualPatterns = self.getManualGraphPatterns(s) autoPatterns = self.getAutoGraphPatterns(s) bestPatterns = self.getBestGraphPatterns(s) if "graphe" in dirs: # directory graphe contains graphs in 3.0 format # graphe/[acquisition] -> t1mri/acquisition/analysis/folds/3.0 # graphe may contain a level acquisition or not. Any way it contains subdirs : *.data, sulci_recognition_session. # if there is a level acquisition, graphe does not # contain *.arg files subdirs = [] files = [] grapheDir = os.path.join(subjectDir, "graphe") for f in os.listdir(grapheDir): if os.path.isdir(os.path.join(grapheDir, f)) and not f.endswith(".data"): subdirs.append(f) else: files.append(f) acquisitionLevel = not contentMatch( files, ".*\.arg") if acquisitionLevel: for acquisition in subdirs: graphAcquisitionDir = os.path.join( grapheDir, acquisition) foldsDir = os.path.join( subjectDir, "t1mri", acquisition, self.default_analysis, "folds", "3.0") self.convertFiles( graphPatterns, graphAcquisitionDir, graphAcquisitionDir, foldsDir) self.convertFiles(manualPatterns, graphAcquisitionDir, graphAcquisitionDir, os.path.join( foldsDir, self.default_session + "_manual")) self.convertFiles(autoPatterns, graphAcquisitionDir, graphAcquisitionDir, os.path.join( foldsDir, self.default_session + "_auto")) self.convertFiles(bestPatterns, graphAcquisitionDir, graphAcquisitionDir, os.path.join( foldsDir, self.default_session + "_best")) # if it already exists session directories, # move and rename inside graphs self.moveSessionGraphs( graphAcquisitionDir, graphAcquisitionDir, foldsDir, s) # moving graphs creates new .data, so we # have to remove old .data self.removeDataGraphs( graphAcquisitionDir, graphAcquisitionDir, s) else: # no acquisition level foldsDir = os.path.join( subjectDir, "t1mri", self.default_acquisition, self.default_analysis, "folds", "3.0") self.convertFiles( graphPatterns, grapheDir, grapheDir, foldsDir) self.convertFiles(manualPatterns, grapheDir, grapheDir, os.path.join( foldsDir, self.default_session + "_manual")) self.convertFiles(autoPatterns, grapheDir, grapheDir, os.path.join( foldsDir, self.default_session + "_auto")) self.convertFiles(bestPatterns, grapheDir, grapheDir, os.path.join( foldsDir, self.default_session + "_best")) self.moveSessionGraphs( grapheDir, grapheDir, foldsDir, s) self.removeDataGraphs(grapheDir, grapheDir, s) if "sulci" in dirs: # sulci directory contains graphs in 3.1 format subdirs = [] files = [] sulciDir = os.path.join(subjectDir, "sulci") for f in os.listdir(sulciDir): if os.path.isdir(os.path.join(sulciDir, f)) and not f.endswith(".data"): subdirs.append(f) else: files.append(f) acquisitionLevel = "default" not in subdirs if acquisitionLevel: for acquisition in subdirs: sulciAcquisitionDir = os.path.join( sulciDir, acquisition) # sulci graph level sulciGraphDirs = [] acquisitionFiles = [] for f in os.listdir(sulciAcquisitionDir): if os.path.isdir(os.path.join(sulciAcquisitionDir, f)) and not f.endswith(".data"): sulciGraphDirs.append(f) else: acquisitionFiles.append(f) for sulciGraph in sulciGraphDirs: # convertSulciGraphDir(self, sourceDir, # sulciGraph, acquisition, subject, # subjectDir): self.convertSulciGraphDir( sulciAcquisitionDir, sulciGraph, acquisition, s, subjectDir) if acquisitionFiles: # move remaining files to folds/3.1 foldsDir = os.path.join( subjectDir, "t1mri", acquisition, self.default_analysis, "folds", "3.1") self.fileProcesses.append( FileProcess(sulciAcquisitionDir, Move(foldsDir), ".*")) else: # no acquisition level # sulci graph level for sulciGraph in subdirs: # convertSulciGraphDir(self, sourceDir, # sulciGraph, acquisition, subject, # subjectDir): self.convertSulciGraphDir( sulciDir, sulciGraph, self.default_acquisition, s, subjectDir) # move remaining files to folds/3.1 if files: foldsDir = os.path.join( subjectDir, "t1mri", self.default_acquisition, self.default_analysis, "folds", "3.1") self.fileProcesses.append( FileProcess(sulciDir, Move(foldsDir), ".*")) # before removing the sulci directory self.fileProcesses.append( FileProcess(sulciDir, Remove(subjectDir))) if "segment" in dirs: # move from segment to t1mri/acquisition/analysis/segmentation all files that match t1mri segmentation patterns # move LSulci_<subject>, RSulci_<subject>, # LBottom_<subject>, RBottom_<subject>, # LHullJunction_<subject>, # LSimpleSurface_<subject>, # RHullJunction_<subject>, RSimpleSurface_<subject> # in folds 3.0 + add recognition session in name subdirs = [] files = [] segmentDir = os.path.join(subjectDir, "segment") for f in os.listdir(segmentDir): if os.path.isdir(os.path.join(segmentDir, f)) and not f.endswith(".data"): subdirs.append(f) else: files.append(f) segmentPatterns = self.getSegmentPatterns( s) # files that are in segment and must be moved in t1mri/acquisition/analysis/segmentation foldsSegmentPatterns = self.getFoldsSegmentPatterns( s) # files that are in segment and must be moved in t1mri/acquisition/analysis/folds/3.0/session/segmentation if subdirs: # level acquisition exists for acquisition in subdirs: # segment/acquisition/patterns -> t1mri/acquisition/default_analysis/segmentation/patterns segmentAcquisitionDir = os.path.join( segmentDir, acquisition) foldsDir = os.path.join( subjectDir, "t1mri", acquisition, self.default_analysis, "folds", "3.0", self.default_session + "_auto", "segmentation") self.convertFiles( segmentPatterns, segmentAcquisitionDir, segmentAcquisitionDir, t1mriDir, acquisition, self.default_analysis, "segmentation") self.convertFiles( foldsSegmentPatterns, segmentAcquisitionDir, segmentAcquisitionDir, foldsDir) else: # no level acquisition -> default_acquisition self.convertFiles( segmentPatterns, segmentDir, segmentDir, t1mriDir, self.default_acquisition, self.default_analysis, "segmentation") foldsDir = os.path.join( subjectDir, "t1mri", self.default_acquisition, self.default_analysis, "folds", "3.0", self.default_session + "_auto", "segmentation") self.convertFiles( foldsSegmentPatterns, segmentDir, segmentDir, foldsDir) if "tri" in dirs: action = FileProcess(os.path.join(subjectDir, "tri"), Move( os.path.join(subjectDir, "t1mri", self.default_acquisition, self.default_analysis, "segmentation"), patternDest="mesh")) self.fileProcesses.append(action) if "mesh" in dirs: self.moveDir(os.path.join(subjectDir, "mesh"), os.path.join( subjectDir, "t1mri"), os.path.join("segmentation", "mesh")) # deplacer <subject>_Lmedian et <subject>_Rmedian # dans surface analysis # if "spectroscopy" in dirs: # self.fileProcesses.append( FileProcess() ) os.chdir(currentDir) return self.fileProcesses
[docs] def moveDir(self, src, dest, newName=None): """ Moves a directory in another directory adding levels acquisition and analysis. If src contains acquisition directories, they are moved to dest/acquisition/default_analysis/newName. if there is no acquisition level in src, directory containt is moved to dest/default_acquisition/default_analysis/newName. """ subdirs = [] files = [] for f in os.listdir(src): if os.path.isdir(os.path.join(src, f)) and not f.endswith(".data"): subdirs.append(f) else: files.append(f) if subdirs: # level acquisition exists for acquisition in subdirs: # segment/acquisition -> t1mri/acquisition/default_analysis/segmentation action = FileProcess(os.path.join(src, acquisition), Move( os.path.join(dest, acquisition, self.default_analysis), patternDest=newName)) self.fileProcesses.append(action) if files: self.fileProcesses.append( FileProcess(src, Move(os.path.join(dest, self.default_acquisition, self.default_analysis, newName)), ".*")) action = FileProcess(src, Remove(os.path.dirname(src))) self.fileProcesses.append(action) else: action = FileProcess( src, Move(os.path.join(dest, self.default_acquisition, self.default_analysis), patternDest=newName)) self.fileProcesses.append(action) return subdirs
def getSegmentPatterns(self, subject): # (source pattern, destination pattern, [source pattern with *, [destination pattern for star pattern] ]) return [('(brain_' + subject + '\..*)', "\\1"), ("([LR]grey_white_" + subject + "\..*)", "\\1"), ("([LR]?cortex_" + subject + "\..*)", "\\1"), ("([LR]?csf_" + subject + "\..*)", "\\1"), ("([LR]skeleton_" + subject + "\..*)", "\\1"), ("([LR]roots_" + subject + "\..*)", "\\1"), ("(voronoi_" + subject + "\..*)", "\\1"), ("(head_" + subject + "\..*)", "\\1"), ("(" + subject + "_[LR]white_curv\..*)", "\\1")] def getFoldsSegmentPatterns(self, subject): return [( "([LR]Sulci_" + subject + ")(\..*)", "\\1_" + self.default_session + "_auto\\2"), ("([LR]Bottom_" + subject + ")(\..*)", "\\1_" + self.default_session + "_auto\\2"), ("([LR]HullJunction_" + subject + ")(\..*)", "\\1_" + self.default_session + "_auto\\2"), ("([LR]SimpleSurface_" + subject + ")(\..*)", "\\1_" + self.default_session + "_auto\\2"), (subject + "(_[LR]white_)((?:sulci)|(?:gyri))(\..*)", subject + "\\1\\2_" + self.default_session + "_auto\\3")] def getGraphPatterns(self, subject): return [("([LR]" + subject + "(?!\.data)\..*)", "\\1")] def getManualGraphPatterns(self, subject): return [("([LR]" + subject + ")Base(?!\.data)(\..*)", "\\1_" + self.default_session + "_manual\\2")] def getAutoGraphPatterns(self, subject): return [( "([LR]" + subject + ")Auto(?!\.data)(\..*)", "\\1_" + self.default_session + "_auto" + "\\2"), ("(" + subject + "_[LR]gyri)(?!\.data)(\..*)", "\\1_" + self.default_session + "_auto" + "\\2"), (subject + "_((?:left)|(?:right))_sulci_to_texture(\..*)", subject + "_\\1_sulci_to_texture_" + self.default_session + "_auto" + "\\2"), (subject + "_((?:left)|(?:right))_gyri_to_texture(\..*)", subject + "_\\1_gyri_to_texture_" + self.default_session + "_auto" + "\\2")] def getBestGraphPatterns(self, subject): return [("([LR]" + subject + ")AutoBest(?!\.data)(\..*)", "\\1_" + self.default_session + "_best" + "\\2")] def getSulciManualGraphPatterns(self, subject): return [("([LR]" + subject + ")(?!\.data)(\..*)", "\\1_" + self.default_session + "_manual\\2")]
[docs] def moveSessionGraphs(self, oldDir, newDir, destDir, subject): """ search for session directories in graph dir : a directory which doesn't end with .data the session directory is renamed in session_auto and the graph files also @param oldDir : directory where the recognition sessions are during parsing @param newDir : directory where the recognition sessions are during conversion @param destDir : directory where the recognition sessions must be moved """ # root, subdirs, files = next(os.walk(oldDir)) data = re.compile(".*\.data") for d in subdirs: if not data.match(d): autoGraph = "([LR]" + subject + ")Auto_" + \ d + "(?!\.data)(\..*)" if d == "default": session = self.default_session else: session = d if contentMatch(os.path.join(oldDir, d), autoGraph): self.fileProcesses.append(FileProcess(os.path.join(newDir, d), Move( os.path.join(destDir, session + "_auto"), autoGraph, "\\1_" + session + "_auto\\2"), autoGraph)) self.fileProcesses.append( FileProcess(os.path.join(newDir, d), Remove(newDir)))
[docs] def removeDataGraphs(self, oldDataDir, newDataDir, subject): """ Remove graphs .data, they have been copied during .arg move with AimsGraphConvert command @param oldDataDir : directory where .data are during parsing @param newDataDir : directory where .data are during conversion """ dataGraph = "[LR]" + subject + \ "(?:(?:Base)|(?:Auto)|(?:AutoBest))?\.data" if contentMatch(oldDataDir, dataGraph): self.fileProcesses.append( FileProcess(newDataDir, Remove(newDataDir), pattern=dataGraph)) dataGraph = subject + "_[LR]gyri\.data" if contentMatch(oldDataDir, dataGraph): self.fileProcesses.append( FileProcess(newDataDir, Remove(newDataDir), pattern=dataGraph))
def convertSulciGraphDir(self, sourceDir, sulciGraph, acquisition, subject, subjectDir): sourceDir = os.path.join(sourceDir, sulciGraph) if sulciGraph == "default": # moveSulciGraphs(sourceDir, acquisition, analysis, subject, # subjectDir) self.moveSulciGraphs( sourceDir, acquisition, self.default_analysis, subject, subjectDir) elif sulciGraph == "man": # hack for databases that have manually labelled graphs in a man directory patterns = self.getSulciManualGraphPatterns(subject) destDir = os.path.join( subjectDir, "t1mri", acquisition, self.default_analysis, "folds", "3.1") self.fileProcesses.append( FileProcess(sourceDir, Move(destDir, patternDest=self.default_session + "_manual"))) destDir = os.path.join(destDir, self.default_session + "_manual") self.convertFiles(patterns, sourceDir, destDir, destDir) dataGraph = "[LR]" + subject + "\.data" self.fileProcesses.append( FileProcess(destDir, Remove(destDir), pattern=dataGraph)) else: self.moveSulciGraphs( sourceDir, acquisition, sulciGraph, subject, subjectDir) def moveSulciGraphs(self, sourceDir, acquisition, analysis, subject, subjectDir): foldsDir = os.path.join( subjectDir, "t1mri", acquisition, analysis, "folds") # move the directory into folds/3.1 self.fileProcesses.append( FileProcess(sourceDir, Move(foldsDir, patternDest="3.1"))) foldsDir = os.path.join(foldsDir, "3.1") # rename manual and auto recognition graphs self.convertFiles(self.getManualGraphPatterns(subject), sourceDir, foldsDir, os.path.join(foldsDir, self.default_session + "_manual")) self.convertFiles(self.getAutoGraphPatterns(subject), sourceDir, foldsDir, os.path.join(foldsDir, self.default_session + "_auto")) self.convertFiles(self.getBestGraphPatterns(subject), sourceDir, foldsDir, os.path.join(foldsDir, self.default_session + "_best")) # if it already exists session directories, move and rename inside # graphs self.moveSessionGraphs(sourceDir, foldsDir, foldsDir, subject) # moving graphs creates new .data, so we have to remove old .data dataGraph = "[LR]" + subject + \ "(?:(?:Base)|(?:Auto)|(?:AutoBest))\.data" if contentMatch(sourceDir, dataGraph): self.fileProcesses.append( FileProcess(foldsDir, Remove(foldsDir), pattern=dataGraph))
#
[docs]class DiffusionConverter(DBConverter): """ Add acquisition level in center/subject/diffusion if needed. """ default_tracking_session = 'default_tracking_session' def __init__(self, dbDir, context=None): super(DiffusionConverter, self).__init__(dbDir, context) self.undoScriptName = "undoDiffusionConversion.py"
[docs] def findActions(self): self.fileProcesses = [] centers = os.listdir(self.dbDir) # first level : center/protocol currentDir = six.moves.getcwd() os.chdir(self.dbDir) for p in centers: if os.path.isdir(p): subjects = os.listdir(p) # second level : subject for s in subjects: subjectDir = os.path.join(p, s) if os.path.isdir(subjectDir) and s != "registration" and s != "group_analysis": # directory subject/diffusion # if there is a diffusion directory diffusionDir = os.path.join(subjectDir, "diffusion") if os.path.isdir(diffusionDir): subdirs = [] files = [] for f in os.listdir(diffusionDir): if os.path.isdir(os.path.join(diffusionDir, f)) and not f.endswith(".data"): subdirs.append(f) else: files.append(f) acquisitionLevel = True if files: # if level acquisition exists, diffusion directory doesn't contain t2 diffusion image file. regexp = re.compile(s + ".*\.ima") for f in files: if regexp.match(f): acquisitionLevel = False break acquisitionConversionPatterns = self.getAcquisitionPatterns( s) analysisConversionPatterns = self.getAnalysisPatterns( s) sessionConversionPatterns = self.getSessionPatterns( s, self.default_tracking_session) t1mriPatterns = ( s + "_eacpc_t1_mri(\..*)", "t1_acpc_" + s + "\\1") if not acquisitionLevel: # must add a level aquisition in diffusion directory #'<subject>_eacpc_t1_mri' renamed 't1_acpc_<subject>' and moved to t1mri if contentMatch(diffusionDir, t1mriPatterns[0]): self.fileProcesses.append( FileProcess(diffusionDir, Move(os.path.join(subjectDir, "t1mri", self.default_acquisition), t1mriPatterns[0], t1mriPatterns[1]), t1mriPatterns[0])) acquisitionDir = os.path.join( diffusionDir, self.default_acquisition) # move some transformations that are directly # in diffusion dir to # center/subject/registration oldRegistrationPatterns = self.getOldRegistrationPatterns( s) self.convertFiles( oldRegistrationPatterns, diffusionDir, diffusionDir, os.path.join(subjectDir, "registration"), "") # move all in default_acquisition action = FileProcess( diffusionDir, Move(acquisitionDir), ".*") self.fileProcesses.append(action) # move intra subject and modality # transformations that are directly in # diffusion dir to # diffusion/acquisition/registration oldIntraRegistrationPatterns = self.getOldIntraRegistrationPatterns( s) self.convertFiles( oldIntraRegistrationPatterns, diffusionDir, acquisitionDir, diffusionDir, self.default_acquisition, "registration") # rename acquisition files self.convertFiles( acquisitionConversionPatterns, diffusionDir, acquisitionDir, diffusionDir, self.default_acquisition) # move and rename analysis files self.convertFiles( analysisConversionPatterns, diffusionDir, acquisitionDir, diffusionDir, self.default_acquisition, self.default_analysis) # move and rename tracking files self.convertFiles( sessionConversionPatterns, diffusionDir, acquisitionDir, diffusionDir, self.default_acquisition, self.default_analysis, self.default_tracking_session) # move all tracking directories in analysis for tracking in subdirs: self.fileProcesses.append( FileProcess(os.path.join(acquisitionDir, tracking), Move(os.path.join(acquisitionDir, self.default_analysis)))) trackingPatterns = self.getExistingSessionPatterns( s, tracking) self.convertFiles(trackingPatterns, os.path.join(diffusionDir, tracking), os.path.join( acquisitionDir, self.default_analysis, tracking), diffusionDir, self.default_acquisition, self.default_analysis, tracking, starReplace="analysis") else: # acquisitioni level exists for acquisition in subdirs: acquisitionDir = os.path.join( diffusionDir, acquisition) sessions = [] files = [] for f in os.listdir(acquisitionDir): if os.path.isdir(os.path.join(acquisitionDir, f)) and not f.endswith(".data"): sessions.append(f) else: files.append(f) if contentMatch(acquisitionDir, t1mriPatterns[0]): self.fileProcesses.append( FileProcess(acquisitionDir, Move(os.path.join(subjectDir, "t1mri", acquisition), t1mriPatterns[0], t1mriPatterns[1]), t1mriPatterns[0])) self.convertFiles( acquisitionConversionPatterns, acquisitionDir, acquisitionDir, diffusionDir, acquisition) self.convertFiles( analysisConversionPatterns, acquisitionDir, acquisitionDir, diffusionDir, acquisition, self.default_analysis, starReplace="analysis") self.convertFiles( sessionConversionPatterns, acquisitionDir, acquisitionDir, diffusionDir, acquisition, self.default_analysis, self.default_tracking_session) for tracking in sessions: if contentMatch(os.path.join(acquisitionDir, tracking), s + ".*_((bundles)|(density)|(regions)|(statistics)).*"): # it is really a tracking directory, not an analysis directory self.fileProcesses.append( FileProcess(os.path.join(acquisitionDir, tracking), Move(os.path.join(acquisitionDir, self.default_analysis)))) trackingPatterns = self.getExistingSessionPatterns( s, tracking) self.convertFiles(trackingPatterns, os.path.join(acquisitionDir, tracking), os.path.join( acquisitionDir, self.default_analysis, tracking), diffusionDir, acquisition, self.default_analysis, tracking, starReplace="analysis") # directory subject/registration registrationDir = os.path.join( subjectDir, "registration") if os.path.isdir(registrationDir): # intra subject and modality referentials and # transformations moved from # protocol/subject/registration to # diffusion/acquisition/registration intraRegistrationPatterns = self.getIntraRegistrationPatterns( s) self.convertFiles( intraRegistrationPatterns, registrationDir, registrationDir, diffusionDir, self.default_acquisition, "registration") # inter modality referentials and transformations # stay in center/subject/regsitration but are # renamed registrationPatterns = self.getRegistrationPatterns( s) self.convertFiles( registrationPatterns, registrationDir, registrationDir, registrationDir, "") os.chdir(currentDir) return self.fileProcesses
def getAcquisitionPatterns(self, subject): # (source pattern, destination pattern, [source pattern with *, [destination pattern for star pattern] ]) return [( subject + '_raw_dw_diffusion(\..*)', 'raw_diffusion_' + subject + "\\1", subject + "_(.+)_raw_dw_diffusion(\..*)"), (subject + '_dw_diffusion(\..*)', 'diffusion_' + subject + "\\1", subject + "_(.+)(?<!raw)(?<!acpc)_dw_diffusion(\..*)"), (subject + '_t2_diffusion(\..*)', 't2diff_' + subject + "\\1", subject + "_(.+)(?<!acpc)(?<!eacpc)_t2_diffusion(\..*)"), (subject + '_acpc_dw_diffusion(\..*)', 'acpc_diffusion_' + subject + "\\1", subject + "_(.+)_acpc_dw_diffusion(\..*)"), (subject + '_acpc_t2_diffusion(\..*)', 'acpc_t2diff_' + subject + "\\1", subject + "_(.+)_acpc_t2_diffusion(\..*)"), (subject + '_eacpc_dw_diffusion(\..*)', 'eacpc_diffusion_' + subject + "\\1", subject + "_(.+)_eacpc_dw_diffusion(\..*)"), (subject + '_eacpc_t2_diffusion(\..*)', 'eacpc_t2diff_' + subject + "\\1", subject + "_(.+)_eacpc_t2_diffusion(\..*)") ] def getAnalysisPatterns(self, subject): return [( subject + '_mask(\..*)', 'diffusion_mask_' + subject + "\\1", subject + "_(.+)(?<!error)_mask(\..*)"), (subject + '_fixedDT(\..*)', 'dti_' + subject + "\\1", subject + "_(.+)_fixedDT(\..*)"), (subject + '_error_mask(\..*)', 'dti_error_mask_' + subject + "\\1", subject + "_(.+)_error_mask(\..*)"), (subject + '_adc(\..*)', 'adc_' + subject + "\\1", subject + "_(.+)_adc(\..*)"), (subject + '_fa(\..*)', 'fa_' + subject + "\\1", subject + "_(.+)_fa(\..*)"), (subject + '_vr(\..*)', 'vr_' + subject + "\\1", subject + "_(.+)_vr(\..*)"), (subject + '_stddev(\..*)', 'stddev_' + subject + "\\1", subject + "_(.+)_stddev(\..*)"), (subject + '_dwt2(\..*)', 'dwt2_' + subject + "\\1", subject + "_(.+)_dwt2(\..*)"), (subject + '_maxev(\..*)', 'maxev_' + subject + "\\1", subject + "_(.+)_maxev(\..*)"), (subject + '_rgbev(\..*)', 'rgbev_' + subject + "\\1", subject + "_(.+)_rgbev(\..*)"), (subject + '_qball(\..*)', 'qball_' + subject + "\\1", subject + "_(.+)_qball(\..*)"), (subject + '_alphaMap(\..*)', 'alphaMap_' + subject + "\\1", subject + "_(.+)_alphaMap(\..*)"), (subject + '_orientation(\..*)', 'orientation_' + subject + "\\1", subject + "_(.+)_orientation(\..*)") ] def getSessionPatterns(self, subject, session): return [( subject + '_regions(\..*)', 'regions_' + subject + "_" + session + "\\1"), (subject + '_bundles(\..*)', 'bundles_' + subject + "_" + session + "\\1"), (subject + '_density(\..*)', 'density_' + subject + "_" + session + "\\1"), (subject + '_statistics(\..*)', 'statistics_' + subject + "_" + session + "\\1") ] def getExistingSessionPatterns(self, subject, session): return [( subject + '_' + session + '_regions(\..*)', 'regions_' + subject + "_" + session + "\\1", subject + "_" + session + "_(.+)_regions(\..*)"), (subject + '_' + session + '_bundles(\..*)', 'bundles_' + subject + "_" + session + "\\1", subject + "_" + session + "_(.+)_bundles(\..*)"), (subject + '_' + session + '_density(\..*)', 'density_' + subject + "_" + session + "\\1", subject + "_" + session + "_(.+)_density(\..*)"), (subject + '_' + session + '_statistics(\..*)', 'statistics_' + subject + "_" + session + "\\1", subject + "_" + session + "_(.+)_statistics(\..*)") ] def getIntraRegistrationPatterns(self, subject): # intra subject and modality registration return [( 'DiffT2-' + subject + '(\..*)', 't2diff_' + subject + "\\1", 'DiffT2-' + subject + '-(.+)(\..*)'), ('ACPC-' + subject + '(\..*)', 'acpc_t2diff_' + subject + "\\1", 'ACPC-' + subject + '-(.+)(\..*)'), ('ExtACPC-DiffT2-' + subject + '(\..*)', 'eacpc_t2diff_' + subject + "\\1", 'ExtACPC-DiffT2-' + subject + '-(.+)(\..*)'), ('DiffT2-' + subject + '_TO_ACPC(\..*)', 't2diff_TO_acpc_t2diff_' + subject + "_" + self.default_acquisition + "\\1", 'DiffT2-' + subject + '-(.+)_TO_ACPC(\..*)', 't2diff_TO_acpc_t2diff_' + subject + "_\\1\\2"), ('DiffT2-' + subject + '_TO_ExtACPC-DiffT2(\..*)', 't2diff_TO_eacpc_t2diff_' + subject + "_" + self.default_acquisition + "\\1", 'DiffT2-' + subject + '-(.+)_TO_ExtACPC-DiffT2(\..*)', 't2diff_TO_eacpc_t2diff_' + subject + "_\\1\\2"), ('ACPC-' + subject + '_TO_ExtACPC-DiffT2(\..*)', 'acpc_t2diff_TO_eacpc_t2diff_' + subject + "_" + self.default_acquisition + "\\1", 'ACPC-' + subject + '-(.+)_TO_ExtACPC-DiffT2(\..*)', 'acpc_t2diff_TO_eacpc_t2diff_' + subject + "_\\1\\2"), ] def getRegistrationPatterns(self, subject): # inter modality registration return [( "RawT1-" + subject + "-(.+)_TO_DiffT2(\..*)", "t1mri_TO_t2diff_" + subject + "_\\1_\\1\\2"), ("RawT1-" + subject + "_TO_DiffT2(\..*)", "t1mri_TO_t2diff_" + subject + "_" + self.default_acquisition + "_" + self.default_acquisition + "\\1"), ("DiffT2-" + subject + "-(.+)_TO_RawT1(\..*)", "t2diff_TO_t1mri_" + subject + "_\\1_\\1\\2"), ("DiffT2-" + subject + "_TO_RawT1(\..*)", "t2diff_TO_t1mri_" + subject + "_" + self.default_acquisition + "_" + self.default_acquisition + "\\1"), ("RawT1-" + subject + "-(.+)_TO_ExtACPC-DiffT2(\..*)", "t1mri_TO_eacpc_t2diff_" + subject + "_\\1_\\1\\2"), ("RawT1-" + subject + "_TO_ExtACPC-DiffT2(\..*)", "t1mri_TO_eacpc_t2diff_" + subject + "_" + self.default_acquisition + "_" + self.default_acquisition + "\\1"), ("RawT1-" + subject + "-(.+)_TO_ACPC(\..*)", "t1mri_TO_acpc_t2diff_" + subject + "_\\1_\\1\\2"), ("RawT1-" + subject + "_TO_ACPC(\..*)", "t1mri_TO_acpc_t2diff_" + subject + "_" + self.default_acquisition + "_" + self.default_acquisition + "\\1"), ] def getOldRegistrationPatterns(self, subject): # transformation in version 3.0 return [( subject + "_t2_diffusion_TO_t1_anatomy_transform(\..*)", "t2diff_TO_t1mri_" + subject + "_" + self.default_acquisition + "_" + self.default_acquisition + "\\1"), (subject + "_t1_anatomy_TO_t2_diffusion_transform(\..*)", "t1mri_TO_t2diff_" + subject + "_" + self.default_acquisition + "_" + self.default_acquisition + "\\1"), (subject + "_t1_anatomy_TO_acpc_transform(\..*)", "t1mri_TO_acpc_t2diff_" + subject + "_" + self.default_acquisition + "_" + self.default_acquisition + "\\1"), (subject + "_t1_anatomy_TO_eacpc_transform(\..*)", "t1mri_TO_eacpc_t2diff_" + subject + "_" + self.default_acquisition + "_" + self.default_acquisition + "\\1"), ] def getOldIntraRegistrationPatterns(self, subject): # transformation in version 3.0 return [( subject + "_t2_diffusion_TO_acpc_transform(\..*)", "t2diff_TO_acpc_t2diff_" + subject + "_" + self.default_acquisition + "\\1"), (subject + "_acpc_TO_eacpc_transform(\..*)", "acpc_t2diff_TO_eacpc_t2diff_" + subject + "_" + self.default_acquisition + "\\1"), (subject + "_t2_diffusion_TO_eacpc_transform(\..*)", "t2diff_TO_eacpc_t2diff_" + subject + "_" + self.default_acquisition + "\\1"), ]
def contentMatch(d, pattern): regexp = re.compile(pattern) l = [] if type(d) is list: l = d elif os.path.isdir(d): l = os.listdir(d) for f in l: if regexp.match(f): return True return False #
[docs]class PETConverter(DBConverter): """ Add acquisition level in center/subject/pet if needed. and analysis/segmentation and analysis/ROI """ def __init__(self, dbDir, context=None): super(PETConverter, self).__init__(dbDir, context) self.undoScriptName = "undoPETConversion.py"
[docs] def findActions(self): self.fileProcesses = [] centers = os.listdir(self.dbDir) # first level : center/protocol currentDir = six.moves.getcwd() os.chdir(self.dbDir) for p in centers: if os.path.isdir(p): subjects = os.listdir(p) # second level : subject for s in subjects: subjectDir = os.path.join(p, s) if os.path.isdir(subjectDir) and s != "registration" and s != "group_analysis": # directory subject/pet petDir = os.path.join(subjectDir, "pet") if os.path.isdir(petDir): subdirs = [] files = [] for f in os.listdir(petDir): if os.path.isdir(os.path.join(petDir, f)) and not f.endswith(".data"): subdirs.append(f) else: files.append(f) acquisitionLevel = True if files: # if level acquisition exists, pet directory doesn't contain image file. acquisitionDir = os.path.join( petDir, self.default_acquisition) # move all in default_acquisition action = FileProcess( petDir, Move(acquisitionDir), ".*\..*") self.fileProcesses.append(action) # files in segment and graphe have no acquisition level, they are moved in default_acquisition # move segmentation files : they were first in subject/segment directory but after t1mri conversion, they are in subject/t1mri/default_acquisition/default_analysis/segmentation # they have to be moved in # pet/default_acquisition/default_analysis/segmentation segmentDir = os.path.join(subjectDir, "segment") segmentPatterns = self.getSegmentationPatterns(s) for segmentPattern in segmentPatterns: if contentMatch(segmentDir, segmentPattern): self.fileProcesses.append( FileProcess(segmentDir, Move(os.path.join(acquisitionDir, self.default_analysis, "segmentation")), pattern=segmentPattern)) # move graph files graphDir = os.path.join(subjectDir, "graphe") graphPatterns = self.getGraphPatterns(s) for graphPattern in graphPatterns: if contentMatch(graphDir, graphPattern): self.fileProcesses.append( FileProcess(graphDir, Move(os.path.join(acquisitionDir, self.default_analysis, "ROI")), pattern=graphPattern)) os.chdir(currentDir) return self.fileProcesses
def getSegmentationPatterns(self, subject): return ["segm_by_level_" + subject + "\..*", "fov_mask_" + subject + "\..*", "organism_" + subject + "\..*", "multistruct_criterion_" + subject + "\..*", "samples_" + subject + "\..*", "unfusioned_labels_" + subject + "\..*", "organs_in_voxel_" + subject + "\..*", "organs_proportion_" + subject + "\..*", "hys_" + subject + "\..*", "der_" + subject + "\..*", "contours_" + subject + "_.*\..*"] def getGraphPatterns(self, subject): return [subject + "_ROI\..*", "voi_stat_" + subject + "\..*", "striata_ml_stat_" + subject + "\..*"]
# # TODO : converters for other tooloboxes if needed # nuclear imaging (pet) # biology # surface analysis # morphometry / sulci # class SulciConverter(DBConverter): # pass #
[docs]class BVConverter_3_1(DBConverter): """ Convert database to Brainvisa 3.1 format. It contains a converter for each toolbox. To add a converter for a part of the database, create a sub class of DBConverter and redefine findActions method which goes throught the database and lists modification to do. Then, an instance of that converter must be added to this class's components list """ def __init__(self, db, context=None, segmentDefaultDestination=None, grapheDefaultDestination=None): super(BVConverter_3_1, self).__init__(db.name, context) self.components = {'t1mri': T1MriConverter(self.dbDir, context), #'sulci' : SulciConverter(self.dbDir), 'diffusion': DiffusionConverter(self.dbDir, context), 'pet': PETConverter(self.dbDir, context) } self.undoScriptName = "undoBv3_1Conversion.py" self.oldSettings = None self.newSettings = False self.segmentDefaultDestination = segmentDefaultDestination self.grapheDefaultDestination = grapheDefaultDestination # get the real object database because it can have changed after # execution of another process self.db = neuroHierarchy.databases.database(db.name)
[docs] def findActions(self, component=None): """ Traiter les fichiers restants dans segment et graphe : les mettre dans la toolbox par defaut choisie -> ajouter l'option dans le process de conversion """ self.fileProcesses = [] if self.segmentDefaultDestination or self.grapheDefaultDestination: self.context.write("\nRemaining files in segment and graphe directories will be moved to " + self.segmentDefaultDestination + " and " + self.grapheDefaultDestination) centers = os.listdir(self.dbDir) # first level : center currentDir = six.moves.getcwd() os.chdir(self.dbDir) for p in centers: if os.path.isdir(p): subjects = os.listdir(p) # second level : subject for s in subjects: subjectDir = os.path.join(p, s) if os.path.isdir(subjectDir) and s != "registration" and s != "group_analysis": segmentDir = os.path.join(subjectDir, "segment") grapheDir = os.path.join(subjectDir, "graphe") if os.path.exists(segmentDir) and self.segmentDefaultDestination: subdirs = [] files = [] for f in os.listdir(segmentDir): if os.path.isdir(os.path.join(segmentDir, f)) and not f.endswith(".data"): subdirs.append(f) else: files.append(f) if subdirs: # level acquisition exists for acquisition in subdirs: # segment/acquisition/* -> t1mri/acquisition/default_analysis/segmentation/* segmentAcquisitionDir = os.path.join( segmentDir, acquisition) self.fileProcesses.append( FileProcess(segmentAcquisitionDir, Move(os.path.join(subjectDir, self.segmentDefaultDestination, acquisition, self.default_analysis, "segmentation")), pattern=".*")) if files: self.fileProcesses.append( FileProcess(segmentDir, Move(os.path.join(subjectDir, self.segmentDefaultDestination, self.default_acquisition, self.default_analysis, "segmentation")), pattern=".*")) self.fileProcesses.append( FileProcess(segmentDir, Remove(subjectDir))) if os.path.exists(grapheDir) and self.grapheDefaultDestination: subdirs = [] files = [] for f in os.listdir(grapheDir): if os.path.isdir(os.path.join(grapheDir, f)) and not f.endswith(".data"): subdirs.append(f) else: files.append(f) acquisitionLevel = not contentMatch( files, ".*\.arg") if self.grapheDefaultDestination == "t1mri_folds": if acquisitionLevel: for acquisition in subdirs: self.fileProcesses.append(FileProcess(os.path.join(grapheDir, acquisition), Move( os.path.join(subjectDir, "t1mri", acquisition, self.default_analysis, "folds", "3.0")), pattern=".*")) if not acquisitionLevel or files: self.fileProcesses.append( FileProcess(grapheDir, Move(os.path.join(subjectDir, "t1mri", self.default_acquisition, self.default_analysis, "folds", "3.0")), pattern=".*")) elif self.grapheDefaultDestination == "t1mri_roi": if acquisitionLevel: for acquisition in subdirs: self.fileProcesses.append(FileProcess(os.path.join(grapheDir, acquisition), Move( os.path.join(subjectDir, "t1mri", acquisition, self.default_analysis, "ROI")), pattern=".*")) if not acquisitionLevel or files: self.fileProcesses.append( FileProcess(grapheDir, Move(os.path.join(subjectDir, "t1mri", self.default_acquisition, self.default_analysis, "ROI")), pattern=".*")) else: # pet if acquisitionLevel: for acquisition in subdirs: self.fileProcesses.append(FileProcess(os.path.join(grapheDir, acquisition), Move( os.path.join(subjectDir, "pet", acquisition, self.default_analysis, "ROI")), pattern=".*")) if not acquisitionLevel or files: self.fileProcesses.append( FileProcess(grapheDir, Move(os.path.join(subjectDir, "pet", self.default_acquisition, self.default_analysis, "ROI")), pattern=".*")) self.fileProcesses.append( FileProcess(grapheDir, Remove(subjectDir))) os.chdir(currentDir) actions = [] actions.extend(self.fileProcesses) actions.extend(super(BVConverter_3_1, self).findActions(component)) return actions
[docs] def process(self, component=None, debug=False): try: super(BVConverter_3_1, self).process(component, debug) finally: # after conversion, the database ontology must be changed and the # database reread. settingsFile = os.path.join(self.dbDir, 'database_settings.minf') if os.path.exists(settingsFile): self.oldSettings = settingsFile + ".sav" shutil.copyfile(settingsFile, settingsFile + ".sav") settings = readMinf(settingsFile)[0] else: settings = {} settings['ontology'] = 'brainvisa-3.2.0' writeMinf(settingsFile, [settings]) self.newSettings = True self.db = SQLDatabase(self.db.sqlDatabaseFile, self.db.directory) neuroHierarchy.databases.add(self.db) self.context.write("") self.context.write('<b>Clear database:', self.db.name, '</b>') self.db.clear(context=self.context) self.context.write('<b>Update database:', self.db.name, '</b>') self.db.update(context=self.context)
[docs] def generateUndoScripts(self, component=None): super(BVConverter_3_1, self).generateUndoScripts(component) if not os.path.exists(os.path.join(self.dbDir, 'scripts')): os.mkdir(os.path.join(self.dbDir, 'scripts')) undoScriptName = os.path.join( self.dbDir, 'scripts', self.undoScriptName) settingsFile = 'database_settings.minf' if self.doneProcesses: # a undo script has already been created by super class method, so open the file in append mode undoScript = open(undoScriptName, "a") print("os.chdir(os.path.dirname(os.path.dirname(sys.argv[0])))", file=undoScript) if self.oldSettings: print( "os.rename('" + settingsFile + ".sav', '" + settingsFile + "')\n", file=undoScript) elif self.newSettings: print("os.remove('" + settingsFile + "')\n", file=undoScript) undoScript.close() else: undoScript = open(undoScriptName, "w") # print("Generate undo script ", undoScriptName) print("#!/usr/bin/env python", file=undoScript) print("# This has been created by Brainvisa. ", file=undoScript) print("# Run it to undo changes made by database processes.", file=undoScript) print("import os, shutil, sys", file=undoScript) # the undo script is in database directory, so change dir to script # directory is change dir to database directory print("os.chdir(os.path.dirname(os.path.dirname(sys.argv[0])))", file=undoScript) if self.oldSettings: print( "os.rename('" + settingsFile + ".sav', '" + settingsFile + "')\n", file=undoScript) elif self.newSettings: print("os.remove('" + settingsFile + "')\n", file=undoScript) undoScript.close() # make the script executable os.chmod(undoScriptName, os.stat(undoScriptName)[ stat.ST_MODE] | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
[docs] def undo(self, component=None): try: try: super(BVConverter_3_1, self).undo(component) except Exception: raise finally: self.db = SQLDatabase(self.db.sqlDatabaseFile, self.db.directory) neuroHierarchy.databases.add(self.db) self.context.write("") self.context.write('<b>Clear database:', self.db.name, '</b>') self.db.clear(context=self.context) self.context.write('<b>Update database:', self.db.name, '</b>') self.db.update(context=self.context)
# # DBCleaner #
[docs]class DBCleaner(DBProcessor): """ This processor search unknown files in the database for brainvisa filesystem ontology. Default action proposed for these files is Remove. An unknown DiskItem in the database has _identified field false. """ def __init__(self, db, context=None): """ @type db : Directory @param db : diskitem that represents the database directory """ super(DBCleaner, self).__init__(db.name, context) self.db = neuroHierarchy.databases.database(db.name) self.undoScriptName = "undoCleaning.py"
[docs] def findActions(self): """ Gets files and directories that are not recognized in the hierarchy, and creates suggested actions to clean the database. """ # parse database to get all diskitem, unknown items will have the flag # _identified=False for diskItem in self.db.scanDatabaseDirectories(includeUnknowns=True): if not diskItem._identified: filename = diskItem.fullPath() filename = filename.replace(os.path.join(self.dbDir, ""), "") if not filename.startswith("trash"): self.fileProcesses.append( FileProcess(filename, Remove(os.path.dirname(filename)), diskItem=diskItem)) # Remove(f))) return self.fileProcesses
# def searchUnknowns(self, directory): #""" # Gets files and directories that are not recognized in the hierarchy. #""" # unknowns=[] # for item in os.listdir(directory): # file=os.path.join(directory, item) # diskItem=self.db.getDiskItemFromFileName(file, None) # if diskItem is None: # file=item.fullPath() # file=file.replace(os.path.join(self.dbDir, ""), "") # unknowns.append(file) # unknowns.append(file) # elif os.path.isdir(file): # unknowns.extend(self.searchUnknowns(file)) # return unknowns # # DBChecker #
[docs]class DBChecker(DBProcessor): """ Base class for Database checker. This processor search some type of data in the database, checks them and eventually suggests an action to correct the data. @type db: Directory @ivar db: database directory @type filters: list @ivar filters: attributes whose values are used to sort and group data found in the database. @type searchTypes: list @ivar searchTypes: type of data searched for in the database @type fileProcesses: SortedDictionary (to keep insertion order) @ivar fileProcesses: map of data found in the database, grouped by filter attributes values. { filter1_value1 : {filter2_value1 : {... {filtern_value1: [ files ] }, filter1_value2 : { ... } } } """ def __init__(self, db, context=None): super(DBChecker, self).__init__(db.name, context) self.db = neuroHierarchy.databases.database(db.name) self.filters = [] self.searchTypes = [] self.fileProcesses = SortedDictionary()
[docs] def centerAttribute(self): '''determine if the current database uses 'center' or 'protocol' attribute ''' rdi = ReadDiskItem('Center', 'Directory') if not self.db.fso.typeToPatterns.get(rdi.type): rdi = ReadDiskItem('Protocol', 'Directory') if self.db.fso.typeToPatterns.get(rdi.type): return 'protocol' return 'center'
[docs] def findActions(self, filters={}, component=None): if self.components: return super(DBChecker, self).findActions(component) else: images = [] # search data by type tm = registration.getTransformationManager() for t in self.searchTypes: images = self.db.findDiskItems(_type=t) # put each image in a map ordered by filters values for image in images: file = image.fullPath() file = file.replace(os.path.join(self.dbDir, ""), "") p = FileProcess( file, self.checkItem(image, t, tm), diskItem=image) m = self.fileProcesses mv = self.fileProcesses f = None cpt = len(self.filters) for f in self.filters: value = image.get(f, None) if cpt != 1: # the last filter, doesn't initilize value with a map but with a list mv = m.get(value, SortedDictionary()) else: mv = m.get(value, []) if not mv: m[value] = mv m = mv cpt -= 1 mv.append(p) return self.fileProcesses
[docs] def process(self, component=None, debug=False): """ For each data in fileProcesses, calls associated action doit method which does the action to process the database. There is no possible undo for this processor. Calls process method for each components. @type component: string @param component: key of a component to process only a part of the database """ currentDir = six.moves.getcwd() os.chdir(self.dbDir) self.processRec(self.fileProcesses, debug=debug) # do component converters actions if component is not None: c = self.components.get(component) if c is not None: c.process(debug=debug) else: raise NameError(self.__class__ + " has no component named " + component + ". Available components : " + list(self.components.keys()) + ".") else: for c in self.components.values(): c.process(debug=debug) os.chdir(currentDir)
[docs] def processRec(self, item, debug=False): """ Process an element of fileProcess : if it is a map or a list, call this method recursively on content. @type item: FileProcess, list or SortedDictionary @param item: an element of fileProcesses """ if isinstance(item, FileProcess): if item.selected and item.action: item.doit(debug, context=self.context) else: if isinstance(item, collections.abc.Mapping): item = list(item.values()) for i in item: self.processRec(i, debug)
[docs] def undo(self, component=None): """ Undo checking is not possible. """ pass
#
[docs]class BVChecker_3_1(DBChecker): """ Checker for Brainvisa 3.1 hierarchy. """ def __init__(self, db, context=None): super(BVChecker_3_1, self).__init__(db, context) self.components = {'t1mri': T1MriChecker(self.db, context), 'diffusion': DiffusionChecker(self.db, context)}
[docs] def process(self, component=None, debug=False): super(BVChecker_3_1, self).process(component, debug) self.context.write("") self.context.write('<b>Clear database:', self.db.name, '</b>') self.db.clear(context=self.context) self.context.write('<b>Update database:', self.db.name, '</b>') self.db.update(context=self.context)
#
[docs]class T1MriChecker(DBChecker): """ Checker for morphologist toolbox. Checks data generated by the segmentation pipeline. All data must have a referential : each Raw T1 MRI have its own referential, each generated data have the same referential as corresponding raw t1 mri. """ def __init__(self, db, context=None): super(T1MriChecker, self).__init__(db, context) self.filters = [ self.centerAttribute(), "subject", "acquisition", "analysis"] # searched types are the parameters of the segmentation pipeline # TODO : on pourrait eventuellement utiliser une signature, avec des # ReadDiskItem, ce qui peut permettre de specifier des attributs en # plus sur les donnees a rechecher, de mettre certains comme optionnels # et de reperer les fichiers manquants dans une analyse... self.searchTypes = ['Raw T1 MRI', 'Transform Raw T1 MRI to Talairach-AC/PC-Anatomist', # talairach transformation 'T1 MRI Bias Corrected', 'T1 MRI Filtered For Histo', 'T1 MRI White Matter Ridges', 'T1 MRI Bias Field', 'T1 MRI Mean Curvature', 'T1 MRI Variance', 'T1 MRI Edges', # t1 bias correction 'T1 Brain Mask', # brain mask segmentation 'Split Brain Mask', # split brain mask 'Grey White Mask', 'Hemisphere White Mesh', # grey white interface 'CSF+GREY Mask', 'Hemisphere Mesh', # Ana get opened hemi surface 'Head Mask', 'Head Mesh', # head mesh 'Cortical folds graph', 'Cortex Skeleton', 'Cortex Catchment Bassins' # cortical fold graph and automatic recognition ]
[docs] def checkItem(self, item, itemType, tm): """ Check if data has a referential. If not, associate an action to the data to create its referential. """ action = None if itemType == "Transform Raw T1 MRI to Talairach-AC/PC-Anatomist": sourceRef = tm.referential(item.get("source_referential", None)) destRef = tm.referential(item.get("destination_referential", None)) newSourceRef = tm.findOrCreateReferential( "Referential of Raw T1 MRI", item, simulation=True) if newSourceRef is not None and (not sourceRef or not destRef or (sourceRef != newSourceRef)): acpcReferential = tm.referential( registration.talairachACPCReferentialId) action = SetTransformationInfo( item, sourceRef=newSourceRef, destRef=acpcReferential) else: ref = tm.referential(item) if not ref: # new referential will copy the raw t1 referential if it exists action = CallProcess('newreferential', item) return action
#
[docs]class DiffusionChecker(DBChecker): """ Checker for t1mri toolbox. Checks data generated by the segmentation pipeline. All data must have a referential : each Raw T1 MRI have its own referential, each generated data have the same referential as corresponding raw t1 mri. """ def __init__(self, db, context=None): super(DiffusionChecker, self).__init__(db, context) self.filters = [ self.centerAttribute(), "subject", "acquisition", "analysis"] # searchTypes correspond to data written by brainvisa processes and # which have a referential self.searchTypes = ['Raw T2 Diffusion MR', 'Raw DW Diffusion MR', # import 'Diffusion Model', 'Error Mask', #DiffusionDTIModel, DiffusionQBallModel 'Apparent Diffusion Coefficient', 'Fractional Anisotropy', 'Volume Ratio', 'Diffusion Standard Deviation', 'Parallel Diffusion Coefficient', 'Transverse Diffusion Coefficient', 'Diffusion Weighted T2', 'Maximum Eigenvector', 'RGB Eigenvector', #DiffusionDTIMaps 'T2 Brain mask', # DiffusionT2BrainMask 'Corrected DW Diffusion MR', # diffusionEPICorrection 'Fascicles bundles', # if transformed=yes not the same referential 'Diffusion Density Map', 'Diffusion Anisotropy', #DiffusionInterpolatedTracking # transformations and images having a different referential : AC/PC # referential and Extended AC/PC T2 Diffusion MR # referential : 'Transform Raw T1 MRI to AC/PC', 'Transform T2 Diffusion MR to Raw T1 MRI', 'Transform Raw T1 MRI to T2 Diffusion MR', 'Transform T2 Diffusion MR to AC/PC', 'AC/PC T2 Diffusion MR', 'AC/PC DW Diffusion MR', #DiffusionToACPC 'Transform Raw T1 MRI to Extended AC/PC T2 Diffusion MR', 'Transform AC/PC to Extended AC/PC T2 Diffusion MR', 'Transform T2 Diffusion MR to Extended AC/PC T2 Diffusion MR', 'T1 MRI AC/PC oriented', 'Extended AC/PC T2 Diffusion MR', 'Extended AC/PC DW Diffusion MR' # DiffusionToACPCExtended ]
[docs] def checkItem(self, item, itemType, tm): """ Check if data has a referential. If not, associate an action to the data to create its referential. """ action = None # transformations if item.type.isA("Transformation Matrix"): sourceRef = tm.referential(item.get("source_referential", None)) destRef = tm.referential(item.get("destination_referential", None)) if (itemType == 'Transform Raw T1 MRI to AC/PC'): # center subject source.acquisition destination.acquisition # Referential of Raw T1 MRI -> AC/PC referential sourceRefType = "Referential of Raw T1 MRI" destRefType = "AC/PC referential" elif (itemType == 'Transform T2 Diffusion MR to Raw T1 MRI'): sourceRefType = 'Referential of Raw T2 Diffusion MR' destRefType = "Referential of Raw T1 MRI" elif (itemType == 'Transform Raw T1 MRI to T2 Diffusion MR'): sourceRefType = "Referential of Raw T1 MRI" destRefType = 'Referential of Raw T2 Diffusion MR' elif (itemType == 'Transform T2 Diffusion MR to AC/PC'): sourceRefType = 'Referential of Raw T2 Diffusion MR' destRefType = "AC/PC referential" elif (itemType == 'Transform Raw T1 MRI to Extended AC/PC T2 Diffusion MR'): sourceRefType = "Referential of Raw T1 MRI" destRefType = 'Extended AC/PC T2 Diffusion MR referential' elif (itemType == 'Transform AC/PC to Extended AC/PC T2 Diffusion MR'): sourceRefType = "AC/PC referential" destRefType = 'Extended AC/PC T2 Diffusion MR referential' elif (itemType == 'Transform T2 Diffusion MR to Extended AC/PC T2 Diffusion MR'): sourceRefType = 'Referential of Raw T2 Diffusion MR' destRefType = 'Extended AC/PC T2 Diffusion MR referential' # evaluate the correct referentials to compare with existing ones. if sourceRefType is not None: attributes = item.hierarchyAttributes() srcAcq = attributes.get("source.acquisition", None) if srcAcq is not None: attributes["acquisition"] = srcAcq newSourceRef = tm.findOrCreateReferential( referentialType=sourceRefType, diskItem=attributes, simulation=True) if destRefType is not None: destAcq = attributes.get("destination.acquisition", None) if destAcq is not None: attributes["acquisition"] = destAcq newDestRef = tm.findOrCreateReferential( referentialType=destRefType, diskItem=attributes, simulation=False) # if it lacks source or dest referentials or they are not the # correct referentials if newSourceRef is not None and newDestRef is not None: if not sourceRef or not destRef or (sourceRef != newSourceRef) or (destRef != newDestRef): action = SetTransformationInfo( item, sourceRef=newSourceRef, destRef=newDestRef) # other files else: if not (item.get('transformed', None) == "yes"): # for the transformed bundles, we cannot know what is the correct referential ref = tm.referential(item) if ((itemType == 'AC/PC T2 Diffusion MR') or (itemType == 'AC/PC DW Diffusion MR')): referentialType = 'AC/PC referential' elif ((itemType == 'T1 MRI AC/PC oriented') or (itemType == 'Extended AC/PC T2 Diffusion MR') or (itemType == 'Extended AC/PC DW Diffusion MR')): referentialType = 'Extended AC/PC T2 Diffusion MR referential' else: referentialType = 'Referential of Raw T2 Diffusion MR' if ref is not None: # if there is a referential, check if it the correct referential newRef = tm.findOrCreateReferential( referentialType, item, simulation=True) if newRef is not None and (newRef != ref): ref = None if ref is None: # new referential will copy the referential if it exists action = CallProcess( 'newreferential', item, referentialType) return action