Skip to content

Instantly share code, notes, and snippets.

@vigneswaran-chandrasekaran
Created July 12, 2021 14:46
Show Gist options
  • Save vigneswaran-chandrasekaran/75818c60940546bad4a6161e358a1712 to your computer and use it in GitHub Desktop.
Save vigneswaran-chandrasekaran/75818c60940546bad4a6161e358a1712 to your computer and use it in GitHub Desktop.
% Replicating PyNWB version of converting Steinmetz et. al dataset to
% NWB format in MatNWB. Source: https://github.com/SteinmetzLab/dataToNWB
% add subject information
subject = types.core.Subject('age', '77', ...
'genotype', 'tetO-G6s x CaMK-tTA', ...
'sex', 'F', ...
'species', 'Mus musculus', ...
'description', 'strain: C57Bl6/J');
% create nwb file
nwb_file = NwbFile(...
'session_description', {'Neuropixels recording during visual '
'discrimination in awake mice.'}, ...
'identifier', 'Cori_2016-12-14', ...
'session_start_time', datetime(2016, 12, 14, 12, 0, 0), ...
'general_institution', 'University College London', ...
'general_lab', 'The Carandini & Harris Lab', ...
'general_subject', subject, ...
'general_experimenter', 'Nick Steinmetz', ...
'general_experiment_description', {'Large-scale Neuropixels recordings'
'across brain regions of mice '
'during a head-fixed visual '
'discrimination task. '}, ...
'general_related_publications', 'DOI 10.1038/s41586-019-1787-x', ...
'general_keywords', ['Neural coding', 'Neuropixels', 'mouse', ...
'brain-wide', 'vision', 'visual discrimination', ...
'electrophysiology']);
% create processing module
behavior_mod = types.core.ProcessingModule('description', 'behavior module');
nwb_file.processing.set('behavior', behavior_mod);
behavior_mod = Eye(behavior_mod);
behavior_mod = Face(behavior_mod);
lp_ts = LickPiezo();
nwb_file.acquisition.set('LickPiezo', lp_ts);
behavior_mod = LickTimes(behavior_mod);
spont_ti = Spontaneous();
nwb_file.intervals.set('spontaneous', spont_ti);
wheel_ts = Wheel();
nwb_file.acquisition.set('WheelTimes', wheel_ts);
behavior_mod = WheelMoves(behavior_mod);
%trials = TrialTable();
%nwb_file.intervals_trials = trials;
sp_noise = SparseNoise();
nwb_file.stimulus_presentation.set('receptive_field_mapping_sparse_noise', sp_noise);
[beep_ts, click_ts, pass_l, pass_r, pass_white] = PassiveStim();
nwb_file.stimulus_presentation.set('passive_beeps', beep_ts);
nwb_file.stimulus_presentation.set('passive_click_times', click_ts);
nwb_file.stimulus_presentation.set('passive_left_contrast', pass_l);
nwb_file.stimulus_presentation.set('passive_right_contrast', pass_r);
nwb_file.stimulus_presentation.set('passive_white_noise', pass_white);
nwbExport(nwb_file, 'test.nwb');
function rate = Rate(timestamps)
% calculate rate
rate = (timestamps(2, 2) - timestamps(1, 2)) / (timestamps(2, 1));
end
function behavior_mod = Eye(behavior_mod)
% function to add eye position and pupil tracking to processing module
eye_timestamps = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_eye.timestamps.npy');
eye_area = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_eye.area.npy');
eye_xy_pos = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_eye.xyPos.npy');
pupil = types.core.TimeSeries('timestamps', eye_timestamps(:, 2), ...
'data', reshape(eye_area.',1,[]), ...
'data_unit', 'arb. unit', ...
'description', {'Features extracted from the '
'video of the right eye.'}, ...
'comments', {'The area of the pupil extracted '
'with DeepLabCut. Note that '
'it is relatively very small during the discrimination task '
'and during the passive replay because the three screens are '
'medium-grey at this time and black elsewhere - so the much '
'brighter overall luminance levels lead to relatively '
'constricted pupils.'});
eye_xy = types.core.TimeSeries('timestamps',eye_timestamps(:, 2), ...
'data', eye_xy_pos, ...
'data_unit', 'arb. unit', ...
'description', {'Features extracted from the video '
'of the right eye.'}, ...
'comments', {'The 2D position of the center of the pupil '
'in the video frame. This is not registered '
'to degrees visual angle, but '
'could be used to detect saccades or '
'other changes in eye position.'});
pupil_track = types.core.PupilTracking('timeseries', [pupil, eye_xy]);
behavior_mod.nwbdatainterface.set(...
'PupilTracking', pupil_track);
end
function behavior_mod = Face(behavior_mod)
% Add face energy behavior data to behavior processing module
face_motion_energy = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_face.motionEnergy.npy');
face_timestamps = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_face.timestamps.npy');
face_rate = Rate(face_timestamps);
face_energy = types.core.TimeSeries(...
'data', reshape(face_motion_energy.',1,[]), ...
'data_unit', 'arb. unit', ...
'starting_time', face_timestamps(1, 2), ...
'starting_time_rate', face_rate, ...
'description', {'Features extracted from the video of the '
'frontal aspect of the subject, including the '
'subject face and forearms.'}, ...
'comments', {'The integrated motion energy across the whole frame'
', i.e. sum( (thisFrame-lastFrame)^2 ). '
'Some smoothing is applied before this operation.'});
face_interface = types.core.BehavioralTimeSeries('timeseries', face_energy);
behavior_mod.nwbdatainterface.set(...
'BehavioralTimeSeries', face_interface);
end
function lp_timeseries = LickPiezo()
% add lick piezo to nwb acquisition
lp_raw = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_lickPiezo.raw.npy');
lp_timestamps = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_lickPiezo.timestamps.npy');
lp_rate = Rate(lp_timestamps);
lp_timeseries = types.core.TimeSeries(...
'starting_time', lp_timestamps(1, 2), ...
'starting_time_rate', lp_rate, ...
'data', reshape(lp_raw.',1,[]), ...
'data_unit', 'V', ...
'description', {'Voltage values from a thin-film piezo connected to the '
'lick spout, so that values are proportional to deflection '
'of the spout and licks can be detected as peaks of the signal.'});
end
function behavior_mod = LickTimes(behavior_mod)
% add lick behavioral events to behavior processing module
lick_timestamps = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_licks.times.npy');
lick_data = zeros(size(lick_timestamps, 1), 1);
lick_data(:, :) = true;
lick_ts = types.core.TimeSeries(...
'timestamps', reshape(lick_timestamps.',1,[]), ...
'data', lick_data, ...
'data_unit', '', ...
'description', {'Extracted times of licks, '
'from the lickPiezo signal.'});
lick_behavior = types.core.BehavioralEvents('timeseries', lick_ts);
behavior_mod.nwbdatainterface.set(...
'BehavioralEvents', lick_behavior);
end
function spont_ti = Spontaneous()
% add spontaneous intervals to acquisition
spont = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_spontaneous.intervals.npy');
start_time = zeros(size(spont(:, 1), 1), 1);
stop_time = zeros(size(spont(:, 1), 1), 1);
for i = 1:size(spont(:, 1), 1)
start_time(i, 1) = spont(i, 1);
stop_time(i, 1) = spont(i, 2);
end
spont_ti = types.core.TimeIntervals(...
'colnames', {'start_time', 'stop_time'}, ...
'id', types.hdmf_common.ElementIdentifiers('data', 0:size(spont(:, 1), 1)-1), ...
'description', {'Intervals of sufficient duration when nothing '
'else is going on (no task or stimulus presentation'}, ...
'start_time', types.hdmf_common.VectorData('data', ...
start_time, 'description', 'this is start time'), ...
'stop_time', types.hdmf_common.VectorData('data', ...
stop_time, 'description','this is stop time'));
end
function wheel_ts = Wheel()
% add wheel position to nwb.acquisition
wheel_pos = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_wheel.position.npy');
wheel_ts = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_wheel.timestamps.npy');
wheel_rate = Rate(wheel_ts);
wheel_ts = types.core.TimeSeries(...
'starting_time', wheel_ts(1, 2), ...
'starting_time_rate', wheel_rate, ...
'data', reshape(wheel_pos.',1,[]), ...
'data_unit', 'mm', ...
'data_conversion', 0.135, ...
'description', {'The position reading of the rotary encoder attached to '
'the rubber wheel that the mouse pushes left and right '
'with his forelimbs.'}, ...
'comments', {'The wheel has radius 31 mm and 1440 ticks per revolution, '
'so multiply by 2*pi*r/tpr=0.135 to convert to millimeters. '
'Positive velocity (increasing numbers) correspond to clockwise '
'turns (if looking at the wheel from behind the mouse), i.e. '
'turns that are in the correct direction for stimuli presented '
'to the left. Likewise negative velocity corresponds to right choices.'});
end
function behavior_mod = WheelMoves(behavior_mod)
% add wheel moves to BehavioralEpochs to behavior module
wheel_moves_type = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_wheelMoves.type.npy');
wheel_moves_int = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_wheelMoves.intervals.npy');
wheel_moves_int = ceil(wheel_moves_int);
wheel_moves_is = types.core.IntervalSeries(...
'timestamps', reshape(wheel_moves_int.',1,[]), ...
'data', reshape(wheel_moves_type.',1,[]), ...
'description', {'Detected wheel movements.'}, ...
'comments', {'0 for flinches or otherwise unclassified movements, '
'1 for left/clockwise turns, 2 for right/counter-clockwise '
'turns (where again "left" means "would be the correct '
'direction for a stimulus presented on the left). A detected '
'movement is counted as left or right only if it was '
'sufficient amplitude that it would have registered a correct '
'response (and possibly did), within a minimum amount of time '
'from the start of the movement. Movements failing those '
'criteria are flinch/unclassified type.'});
wheel_moves_beh = types.core.BehavioralEpochs('intervalseries', wheel_moves_is);
behavior_mod.nwbdatainterface.set(...
'BehavioralEpochs', wheel_moves_beh);
end
function trials = TrialTable()
% create trial table to add to nwb
included = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.included.npy');
fb_type = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.feedbackType.npy');
fb_type = ceil(fb_type);
fb_time = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.feedback_times.npy');
go_cue = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.goCue_times.npy');
trial_intervals = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.intervals.npy');
rep_num = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.repNum.npy');
response_choice = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.response_choice.npy');
response_times = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.response_times.npy');
visual_left = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.visualStim_contrastLeft.npy');
visual_right = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.visualStim_contrastRight.npy');
visual_times = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_trials.visualStim_times.npy');
start_time = zeros(size(trial_intervals, 1), 1);
stop_time = zeros(size(trial_intervals, 1), 1);
for i = 1 : zeros(size(trial_intervals, 1), 1)
start_time(i, 1) = trial_intervals(i, 1);
stop_time(i, 1) = trial_intervals(i, 2);
end
% add trials
trials = types.core.TimeIntervals(...
'colnames', {'type', 'start_time', 'stop_time', 'description', ...
'data'}, ...
'type', ['included', 'go_cue', 'visual_stimulus_time', ...
'visual_stimulus_left_contrast', ...
'visual_stimulus_right_contrast', ...
'response_time', 'response_choice', 'feedback_time', ...
'feedback_type', 'rep_num'], ...
'start_time', types.hdmf_common.VectorData('data', ...
start_time, 'description', 'this is start time'), ...
'stop_time', types.hdmf_common.VectorData('data', ...
stop_time, 'description','this is stop time'), ...
'description', [...
{'Importantly, while this variable gives inclusion criteria according '
'to the definition of disengagement (see manuscript Methods), it does '
'not give inclusion criteria based on the time of response, as used '
'for most analyses in the paper.'}, ...
{'The goCue is referred to as the auditory tone cue in the manuscript.'}, ...
{'Times are relative to the same time base as every other time in the dataset, '
'not to the start of the trial.'}, ...
{'Proportion contrast. A value of 0.5 means 50% contrast. 0 is a blank '
'screen: no change to any pixel values on that side (completely undetectable).'}, ...
{'Times are relative to the same time base as every other time in the dataset, '
'not to the start of the trial.'}, ...
{'Enumerated type. The response registered at the end of the trial, '
'which determines the feedback according to the contrast condition. '
'Note that in a small percentage of cases (~4%, see manuscript Methods) '
'the initial wheel turn was in the opposite direction. -1 for Right '
'choice (i.e. correct when stimuli are on the right); +1 for left '
'choice; 0 for Nogo choice.'}, ...
{'Times are relative to the same time base as every other time in the dataset, '
'not to the start of the trial.'}, ...
{'Enumerated type. -1 for negative feedback (white noise burst); +1 for '
'positive feedback (water reward delivery).'}, ...
{'Trials are repeated if they are "easy" trials (high contrast stimuli '
'with large difference between the two sides, or the blank screen '
'condition) and this keeps track of how many times the current '
'trials condition has been repeated.'}], ...
'data', [reshape(included.',1,[]), ...
reshape(go_cue.',1,[]), ...
reshape(visual_times.',1,[]), ...
reshape(visual_left.',1,[]), ...
reshape(visual_right.',1,[]), ...
reshape(response_times.',1,[]), ...
reshape(response_choice.',1,[]), ...
reshape(fb_time.',1,[]), ...
reshape(fb_type.',1,[]), ...
reshape(rep_num.',1,[])]);
end
function sp_noise = SparseNoise()
% sdds receptive field mapping task to nwb_file.stimulus
sp_noise_pos = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_sparseNoise.positions.npy');
sp_noise_t = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_sparseNoise.times.npy');
sp_noise = types.core.TimeSeries(...
'timestamps', reshape(sp_noise_t.',1,[]), ...
'data', sp_noise_pos, ...
'data_unit', 'degrees visual angle', ...
'description', {'White squares shown on the screen with randomized '
'positions and timing - see manuscript Methods.'}, ...
'comments', {'The altitude (first column) and azimuth (second column) '
'of the square.'});
end
function [beep_ts, click_ts, pass_l, pass_r, pass_white] = PassiveStim()
% add passive beeps
pass_beeps = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_passiveBeeps.times.npy');
pb_data = zeros(size(pass_beeps, 1), 1);
pb_data(:, :) = true;
beep_ts = types.core.TimeSeries(...
'timestamps', reshape(pass_beeps.',1,[]), ...
'data', pb_data, ...
'data_unit', 'uk', ...
'description', {'Auditory tones of the same frequency as the auditory '
'tone cue in the task'});
% passive valve clicks
pass_clicks = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_passiveValveClick.times.npy');
pc_data = zeros(size(pass_clicks, 1), 1);
pc_data(:, :) = true;
click_ts = types.core.TimeSeries(...
'timestamps', reshape(pass_clicks.',1,[]), ...
'data', pc_data, ...
'data_unit', 'uk', ...
'description', {'Opening of the reward valve, but with a clamp in place '
'such that no water flows. Therefore the auditory sound of '
'the valve is heard, but no water reward is obtained.'});
% passive visual times
pass_vis = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_passiveVisual.times.npy');
pass_vis_left = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_passiveVisual.contrastLeft.npy');
pass_vis_right = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_passiveVisual.contrastRight.npy');
pass_l = types.core.TimeSeries(...
'timestamps', reshape(pass_vis.',1,[]), ...
'data', reshape(pass_vis_left.',1,[]), ...
'data_unit', 'proportion contrast', ...
'description', {'Gratings of the same size, spatial freq, position, etc '
'as during the discrimination task.'});
pass_r = types.core.TimeSeries(...
'timestamps', reshape(pass_vis.',1,[]), ...
'data', reshape(pass_vis_right.',1,[]), ...
'data_unit', 'proportion contrast', ...
'description', {'Gratings of the same size, spatial freq, position, etc '
'as during the discrimination task.'});
% passive valve clicks
pass_noise = readNPY('nicklab_Subjects_Hench_2017-06-15_001_alf_passiveWhiteNoise.times.npy');
pc_data = zeros(size(pass_noise, 1), 1);
pc_data(:, :) = true;
pass_white = types.core.TimeSeries(...
'timestamps', reshape(pass_noise.',1,[]), ...
'data', pc_data, ...
'data_unit', 'uk', ...
'description', {'The sound that accompanies an incorrect response during the '
'discrimination task.'});
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment