Skip to content

Commit a0ad5d0

Browse files
authored
Merge pull request #61 from TASBE/coverney.issue410
Created the batch_template_csv file
2 parents 020fcb1 + 74dc99e commit a0ad5d0

19 files changed

+1866717
-1866612
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ before_install:
4545
- make -C MOdox install
4646
# retrieve TASBE for running tests
4747
- rm -rf TASBEFlowAnalytics
48-
- git clone --branch=master https://github.com/TASBE/TASBEFlowAnalytics
48+
- git clone --branch=develop https://github.com/TASBE/TASBEFlowAnalytics
4949
- make -C TASBEFlowAnalytics install
5050
# go back to original directory
5151
- cd TASBEFlowAnalytics-Tutorial
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
% This template shows how to perform a simple batch analysis of a set of conditions
2+
% Each color is analyzed independently
3+
TASBEConfig.checkpoint(TASBEConfig.checkpoints());
4+
5+
% load the color model
6+
load('../template_colormodel/CM120312.mat');
7+
% can also add filters, such as gating out all "low transfection" red less than 10^6 MEFL:
8+
%CM = add_postfilter(CM,RangeFilter('PE-Tx-Red-YG-A',[1e6 inf]));
9+
10+
11+
% set up metadata
12+
experimentName = 'LacI Transfer Curve';
13+
14+
% Configure the analysis
15+
% Analyze on a histogram of 10^[first] to 10^[third] ERF, with bins every 10^[second]
16+
bins = BinSequence(4,0.1,10,'log_bins');
17+
18+
% Designate which channels have which roles
19+
AP = AnalysisParameters(bins,{});
20+
% Ignore any bins with less than valid count as noise
21+
AP=setMinValidCount(AP,100');
22+
% Ignore any raw fluorescence values less than this threshold as too contaminated by instrument noise
23+
AP=setPemDropThreshold(AP,5');
24+
% Add autofluorescence back in after removing for compensation?
25+
AP=setUseAutoFluorescence(AP,false');
26+
% By default, analysis tries to fit constitutive to transformed and non-transformed components
27+
% If your distribution is more complex or less complex, you can change the number of components
28+
% AP=setNumGaussianComponents(AP,3);
29+
30+
% Make a map of condition names to file sets
31+
stem1011 = 'csv/LacI-CAGop_Dox';
32+
root1011 = '_PointCloud.csv';
33+
file_pairs = {...
34+
'Dox 0.1', {[stem1011 '01' root1011]}; % Replicates go here, e.g., {[rep1], [rep2], [rep3]}
35+
'Dox 0.2', {[stem1011 '02' root1011]};
36+
'Dox 0.5', {[stem1011 '05' root1011]};
37+
'Dox 1.0', {[stem1011 '1' root1011]};
38+
'Dox 2.0', {[stem1011 '2' root1011]};
39+
'Dox 5.0', {[stem1011 '5' root1011]};
40+
'Dox 10.0', {[stem1011 '10' root1011]};
41+
'Dox 20.0', {[stem1011 '20' root1011]};
42+
'Dox 50.0', {[stem1011 '50' root1011]};
43+
'Dox 100.0', {[stem1011 '100' root1011]};
44+
'Dox 200.0', {[stem1011 '200' root1011]};
45+
'Dox 500.0', {[stem1011 '500' root1011]};
46+
'Dox 1000.0', {[stem1011 '1000' root1011]};
47+
'Dox 2000.0', {[stem1011 '2000' root1011]};
48+
};
49+
50+
n_conditions = size(file_pairs,1);
51+
52+
% Execute the actual analysis
53+
TASBEConfig.set('OutputSettings.StemName','LacI-CAGop');
54+
TASBEConfig.set('OutputSettings.FixedInputAxis',[1e4 1e10]);
55+
% Set CSVReaderHeader (temporary feature)
56+
TASBEConfig.set('flow.defaultCSVReadHeader','csv/LacI-CAGop.json');
57+
% Generate point cloud csv files
58+
%TASBEConfig.set('flow.outputPointCloud', true);
59+
[results, sampleresults] = per_color_constitutive_analysis(CM,file_pairs,{'EYFP','mKate', 'EBFP2'},AP);
60+
61+
% Make output plots
62+
plot_batch_histograms(results,sampleresults,CM); % linespecs obtained from CM
63+
% can enter own linespecs for plot_batch_histograms:
64+
% plot_batch_histograms(results,sampleresults,CM,{'b','g','r'});
65+
66+
[statisticsFile, histogramFile] = serializeBatchOutput(file_pairs, CM, AP, sampleresults);
67+
68+
save('LacI-CAGop-batch.mat','AP','bins','file_pairs','results','sampleresults');

template_analysis/csv/LacI Transfer Curve.json

Lines changed: 0 additions & 1 deletion
This file was deleted.
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
[
2+
3,
3+
"FITC-A",
4+
"PE-Tx-Red-YG-A",
5+
"Pacific Blue-A",
6+
"EYFP",
7+
"mKate",
8+
"EBFP2",
9+
"ERF",
10+
"ERF",
11+
"ERF",
12+
14,
13+
"csv\/LacI-CAGop_Dox01_PointCloud.csv",
14+
"csv\/LacI-CAGop_Dox02_PointCloud.csv",
15+
"csv\/LacI-CAGop_Dox05_PointCloud.csv",
16+
"csv\/LacI-CAGop_Dox1_PointCloud.csv",
17+
"csv\/LacI-CAGop_Dox2_PointCloud.csv",
18+
"csv\/LacI-CAGop_Dox5_PointCloud.csv",
19+
"csv\/LacI-CAGop_Dox10_PointCloud.csv",
20+
"csv\/LacI-CAGop_Dox20_PointCloud.csv",
21+
"csv\/LacI-CAGop_Dox50_PointCloud.csv",
22+
"csv\/LacI-CAGop_Dox100_PointCloud.csv",
23+
"csv\/LacI-CAGop_Dox200_PointCloud.csv",
24+
"csv\/LacI-CAGop_Dox500_PointCloud.csv",
25+
"csv\/LacI-CAGop_Dox1000_PointCloud.csv",
26+
"csv\/LacI-CAGop_Dox2000_PointCloud.csv"
27+
]

0 commit comments

Comments
 (0)