Skip to content

Commit

Permalink
changed repository name to make it valid
Browse files Browse the repository at this point in the history
  • Loading branch information
foellmelanie committed Jun 29, 2024
1 parent 415419b commit 28af6cb
Show file tree
Hide file tree
Showing 7 changed files with 2,130 additions and 0 deletions.
9 changes: 9 additions & 0 deletions tools/zeiss_lmd_converter/.shed.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
categories:
- Proteomics
description: Converts coordinates from a tabular file into a formatted text file readable by Zeiss laser-capture microdissection systems
long_description: |
This tool is part of a workflow that allows to use tissue anontations in geojson format from machine learning based imaging software like QuPath and HaloAI and transform these annotation coordinates into a text format readable by Zeiss laser-capture microdissection systems
owner: galaxyp
remote_repository_url: https://github.com/galaxyproteomics/tools-galaxyp/tree/master/tools/zeiss_lmd_converter
homepage_url: https://github.com/galaxyproteomics/tools-galaxyp/tree/master/tools/zeiss_lmd_converter
type: unrestricted
53 changes: 53 additions & 0 deletions tools/zeiss_lmd_converter/tabular_to_ZeissLMDtext.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import argparse
from shapely.geometry import Polygon

def tabular_to_text(input_file, text_file):
"""
Converts tabular coordinate data from Galaxy into a formatted text file which is readable for the LMD.
This function reads tabular data from an input file, processes it to form a closed polygon, calculates the area
of the polygon, and writes the formatted information to an output text file.
Parameters:
input_file (str): Path to the input file containing tabular coordinate data.
The file should have a header and each line should contain x and y coordinates separated by a tab.
text_file (str): Path to the output text file where the formatted information will be written.
The output text file will contain:
- Header information
- A section with metadata including version, date, and time
- A section with details of the polygon including type, color, thickness, number, cutshot, area, comment,
and coordinates.
"""
coordinates = []
with open(input_file, 'r') as f:
next(f) # Skip the header
for line in f:
x, y = map(float, line.strip().split('\t'))
coordinates.append([x, y])

coordinates.append(coordinates[0]) # Close the polygon by repeating the first point as the last point
area = Polygon(coordinates).area

with open(text_file, 'w') as f:
f.write("PALMRobo Elements\n")
f.write("Version:\tV 4.6.0.4\n")
f.write("Date, Time:\t13.02.2024\t16:06:32\n")
f.write("\nMICROMETER\nElements :\n\nType\tColor\tThickness\tNo\tCutShot\tArea\tComment\tCoordinates\n\n")
f.write(f"Freehand\tgreen\t0\t7\t0,0\t{area}\tROI imported from tabular data\n")

for i in range(0, len(coordinates), 5):
for j in range(5):
if i + j < len(coordinates):
x, y = coordinates[i + j]
f.write(f"\t{x},{y}")
f.write("\n.")

if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Convert tabular coordinate data into a formatted text file")
parser.add_argument('--input', type=str, required=True, help='Path to the input tabular file')
parser.add_argument('--output', type=str, required=True, help='Path to the output text file')
args = parser.parse_args()

tabular_to_text(args.input, args.output)

345 changes: 345 additions & 0 deletions tools/zeiss_lmd_converter/test-data/Zeiss_converted_file.txt

Large diffs are not rendered by default.

13 changes: 13 additions & 0 deletions tools/zeiss_lmd_converter/test-data/Zeiss_converted_file_small.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
PALMRobo Elements
Version: V 4.6.0.4
Date, Time: 13.02.2024 16:06:32

MICROMETER
Elements :

Type Color Thickness No CutShot Area Comment Coordinates

Freehand green 0 7 0,0 23955.0 ROI imported from tabular data
1.0,1.0 200.0,200.0 300.0,100.0 200.0,50.0 100.0,10.0
. 1.0,1.0
.
Loading

0 comments on commit 28af6cb

Please sign in to comment.