Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
ver228 committed Dec 17, 2018
2 parents ab317e7 + 861385a commit 281761a
Show file tree
Hide file tree
Showing 10 changed files with 69 additions and 25 deletions.
1 change: 1 addition & 0 deletions recipe/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ requirements:
- numba
- tqdm
- requests
- ffmpeg

app:
entry: tierpsy_gui
Expand Down
5 changes: 3 additions & 2 deletions tierpsy/analysis/int_ske_orient/checkFinalOrientation.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import numpy as np
import pandas as pd
import tables
import warnings

from scipy.signal import savgol_filter
from tierpsy.analysis.ske_orient.checkHeadOrientation import isWormHTSwitched
Expand Down Expand Up @@ -95,8 +96,8 @@ def getHeadProbMov(
p_mov = p_mov.values[ind_valid]

if p_mov.size == 0:
import pdb
pdb.set_trace()
w_ind = trajectories_worm['worm_index_joined'].iloc[0]
warnings.warn('There is something weird with trajectory {} in file {}. No valid head tail movements found.'.format(w_ind, skeletons_file))
#average using only the indexes of valid skeletons
p_mov_avg = np.nanmean(p_mov)

Expand Down
3 changes: 2 additions & 1 deletion tierpsy/analysis/stage_aligment/alignStageMotion.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def _h_get_stage_inv(skeletons_file, timestamp):
# adjust the stage_vec to match the timestamps in the skeletons
good = (timestamp_ind >= first_frame) & (timestamp_ind <= last_frame)

ind_ff = timestamp_ind[good] - first_frame
ind_ff = (timestamp_ind[good] - first_frame).astype(np.int) #make sure it is int to be used as index
if timestamp_ind.shape[0] > stage_vec_ori.shape[0]:
#there are extra elements in the timestamp_ind, let's pad it with the same value in the stage vector
extra_n = timestamp_ind.shape[0] - stage_vec_ori.shape[0]
Expand All @@ -54,6 +54,7 @@ def _h_get_stage_inv(skeletons_file, timestamp):
stage_vec_ori = stage_vec_ori[good]

stage_vec = np.full((timestamp.size, 2), np.nan)

stage_vec[ind_ff, :] = stage_vec_ori
# the negative symbole is to add the stage vector directly, instead of
# substracting it.
Expand Down
File renamed without changes.
18 changes: 18 additions & 0 deletions tierpsy/extras/param_files/WT2_anticlockwise_TIERPSY.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"thresh_block_size": 61,
"thresh_C": 15,
"dilation_size": 10,
"min_area": 100,
"resampling_N": 49,
"expected_fps": 30.0,
"max_area": 100000,
"is_extract_metadata": true,
"keep_border_data": true,
"compression_buff": 1,
"worm_bw_thresh_factor":1.0,
"strel_size":10,
"int_save_maps": true,
"filt_min_displacement":0,
"analysis_type" : "TIERPSY_WT2",
"ventral_side" : "anticlockwise"
}
18 changes: 18 additions & 0 deletions tierpsy/extras/param_files/WT2_clockwise_TIERPSY.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"thresh_block_size": 61,
"thresh_C": 15,
"dilation_size": 10,
"min_area": 100,
"resampling_N": 49,
"expected_fps": 30.0,
"max_area": 100000,
"is_extract_metadata": true,
"keep_border_data": true,
"compression_buff": 1,
"worm_bw_thresh_factor":1.0,
"strel_size":10,
"int_save_maps": true,
"filt_min_displacement":0,
"analysis_type" : "TIERPSY_WT2",
"ventral_side" : "clockwise"
}
2 changes: 1 addition & 1 deletion tierpsy/gui/TrackerViewerAux.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ def drawSkel(self, worm_img, worm_qimg, row_data, roi_corner=(0, 0)):
dat = ske_file_id.get_node(field)[skel_id]
dat /= self.microns_per_pixel

if self.stage_position_pix is not None:
if self.stage_position_pix is not None and self.stage_position_pix.size > 0:
#subtract stage motion if necessary
dat -= self.stage_position_pix[self.frame_number]

Expand Down
11 changes: 6 additions & 5 deletions tierpsy/processing/AnalysisPoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,13 @@ def __next__(self):
class AnalysisPoints(object):
def __init__(self, video_file, masks_dir,
results_dir, json_file = ''):

self.video_file = os.path.realpath(os.path.abspath(video_file))
self.results_dir = os.path.realpath(os.path.abspath(results_dir))
self.masks_dir = os.path.realpath(os.path.abspath(masks_dir))


self.getFileNames(video_file, masks_dir, results_dir)

self.video_file = video_file
self.masks_dir = masks_dir
self.results_dir = results_dir
self.getFileNames(self.video_file, self.masks_dir, self.results_dir)

self.param = TrackerParams(json_file)
self.checkpoints = CheckPoints(self.file_names, self.param)
Expand Down
36 changes: 20 additions & 16 deletions tierpsy/processing/ProcessLocal.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@
class ProcessLocal(object):
def __init__(self, main_file, masks_dir, results_dir, tmp_mask_dir='',
tmp_results_dir='', json_file='', analysis_checkpoints = [],
is_copy_video = False, copy_unfinished=False):
is_copy_video = False, copy_unfinished = False):

self.main_file = os.path.realpath(main_file)
self.results_dir = os.path.realpath(results_dir)
self.masks_dir = os.path.realpath(masks_dir)
self.main_file = os.path.realpath(os.path.abspath(main_file))
self.results_dir = os.path.realpath(os.path.abspath(results_dir))
self.masks_dir = os.path.realpath(os.path.abspath(masks_dir))

#check that the files do exists
if not os.path.exists(self.main_file):
Expand Down Expand Up @@ -72,16 +72,20 @@ def __init__(self, main_file, masks_dir, results_dir, tmp_mask_dir='',
self.unfinished_points_src = self.ap_src.getUnfinishedPoints(self.analysis_checkpoints)
self.unfinished_points_tmp = self.ap_tmp.getUnfinishedPoints(self.analysis_checkpoints)

#TODO, here i should be more strict. If there are more unfinished points in temporary, use only the files in src...


#get the points to be processed compared with the existing files
self.checkpoints2process = self._getPoints2Process()
self.checkpoints2process = self._getPoints2Process(self.unfinished_points_src, self.unfinished_points_tmp)

# we need to group steps into start and clean steps for the multiprocess
# part
def start(self):
#It is key that processlocal is a separated process since that is how i can parallelize the the process

self.start_time = time.time()
#copy tmp files

self._copyFinaltoTmp()
args = [self.tmp_main_file]
argkws = {'masks_dir':self.tmp_mask_dir, 'results_dir':self.tmp_results_dir,
Expand All @@ -104,21 +108,22 @@ def clean(self):

print_flush(progress_str)

def _getPoints2Process(self):
def _getPoints2Process(self, _unfinished_points_src, _unfinished_points_tmp):
def assignAndCheckSubset(small_list, larger_list):
assert set(small_list).issubset(set(larger_list))
return small_list

if len(self.unfinished_points_src) < len(self.unfinished_points_tmp):
checkpoints2process = assignAndCheckSubset(self.unfinished_points_src, self.unfinished_points_tmp)
if len(_unfinished_points_src) < len(_unfinished_points_tmp):
checkpoints2process = assignAndCheckSubset(_unfinished_points_src, _unfinished_points_tmp)
else:
checkpoints2process = assignAndCheckSubset(self.unfinished_points_tmp, self.unfinished_points_src)
checkpoints2process = assignAndCheckSubset(_unfinished_points_tmp, _unfinished_points_src)
return checkpoints2process

def _copyFinaltoTmp(self):
#files that are required as input
inputs_required = self._points2Files(self.checkpoints2process, self.ap_tmp, "input_files")


new_created_files = self._getNewFilesCreated(self.checkpoints2process, self.ap_tmp)
#files that are required as input but are not produced later on
needed_files = inputs_required - new_created_files
Expand All @@ -145,7 +150,6 @@ def _copyFinaltoTmp(self):

files2copy += self._getAddFilesForTmpSW()


self._copyFilesLocal(files2copy)

def _getAddFilesForTmpSW(self):
Expand Down Expand Up @@ -192,8 +196,6 @@ def _copyTmpToFinalAndClean(self):
self._deleteTmpFiles()




def _deleteTmpFiles(self):
def _points2FullFiles(points2check, ap_obj, field_name):
data = ap_obj.getField(field_name, points2check)
Expand All @@ -208,9 +210,11 @@ def _points2FullFiles(points2check, ap_obj, field_name):
#CLEAN
all_tmp_files = _points2FullFiles(self.analysis_checkpoints, self.ap_tmp, "output_files") | \
_points2FullFiles(self.analysis_checkpoints, self.ap_tmp, "input_files")
all_tmp_files = set(map(os.path.realpath, map(os.path.abspath, all_tmp_files)))

all_src_files = _points2FullFiles(self.analysis_checkpoints, self.ap_src, "output_files") | \
_points2FullFiles(self.analysis_checkpoints, self.ap_src, "input_files")
all_src_files = set(map(os.path.realpath, map(os.path.abspath, all_src_files)))

#remove all tmp files that are not in the source
files2remove = all_tmp_files - all_src_files
Expand Down Expand Up @@ -263,10 +267,10 @@ def _getFilesSrcDstPairs(self, fnames, f2d_src, f2dir_dst):
def _copyFilesLocal(self, files2copy):
''' copy files to the source directory'''
for files in files2copy:
file_name, destination = files
file_name, destination = map(os.path.realpath, map(os.path.abspath, files))
assert(os.path.exists(destination))

if os.path.abspath(os.path.dirname(file_name)) != os.path.abspath(destination):
if os.path.dirname(file_name) != destination:
print_flush('Copying %s to %s' % (file_name, destination))
shutil.copy(file_name, destination)

Expand Down

0 comments on commit 281761a

Please sign in to comment.