Skip to content

Commit

Permalink
updated fix for worker
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexPatrie committed Sep 23, 2024
1 parent c086668 commit 4be6820
Show file tree
Hide file tree
Showing 4 changed files with 37 additions and 25 deletions.
8 changes: 6 additions & 2 deletions compose_worker/Dockerfile-compose_worker
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,12 @@ COPY . /app/worker
RUN /app/assets/install_deps.sh /app/worker/requirements.worker.txt

# add deps with extras
RUN poetry add pysces --extras=sbml \
&& poetry add biosimulators-utils --extras=logging
RUN yes | poetry cache clear PyPI --all \
&& poetry add biosimulators-utils@^0.1.188 --extras=logging \
&& poetry add pysces --extras=sbml

# RUN poetry add pysces --extras=sbml \
# && poetry add biosimulators-utils --extras=logging

WORKDIR /app/worker

Expand Down
14 changes: 11 additions & 3 deletions compose_worker/output_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,9 @@ def get_sbml_species_mapping(sbml_fp: str):
# parse and handle names/ids
sbml_species_ids = []
for spec in sbml_model_object.getListOfSpecies():
spec_name = spec.name
if not spec_name:
spec.name = spec.getId()
if not spec.name == "":
sbml_species_ids.append(spec)
names = list(map(lambda s: s.name, sbml_species_ids))
Expand Down Expand Up @@ -204,8 +207,10 @@ def run_sbml_pysces(sbml_fp: str, start, dur, steps):

def run_sbml_tellurium(sbml_fp: str, start, dur, steps):
simulator = te.loadSBMLModel(sbml_fp)
floating_species_list = simulator.getFloatingSpeciesIds()
sbml_species_names = list(get_sbml_species_mapping(sbml_fp).keys())
floating_species_list = simulator.getFloatingSpeciesIds() # SBML IDS
mapping = get_sbml_species_mapping(sbml_fp)
sbml_species_names = list(mapping.keys()) # SBML NAMES
sbml_species_ids = list(mapping.values())

try:
# in the case that the start time is set to a point after the simulation begins
Expand All @@ -218,7 +223,10 @@ def run_sbml_tellurium(sbml_fp: str, start, dur, steps):
for index, row in enumerate(result.transpose()):
if not index == 0:
for i, name in enumerate(floating_species_list):
outputs[sbml_species_names[i]] = row
spec_index = sbml_species_ids.index(name)
spec_name = sbml_species_names[spec_index]
if spec_index == index:
outputs[spec_name] = row

return outputs
except:
Expand Down
3 changes: 2 additions & 1 deletion compose_worker/requirements.worker.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
meson-python
meson ninja
meson
ninja
biosimulators-amici
biosimulators-copasi
biosimulators-pysces
Expand Down
37 changes: 18 additions & 19 deletions compose_worker/supervisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,34 +157,33 @@ async def _check(self):
# run job again
try:
# check: run simulations

if job_id.startswith('simulation-execution'):
worker = SimulationRunWorker(job=pending_job)
# check: verifications
elif job_id.startswith('verification'):
# otherwise: create new worker with job
worker = VerificationWorker(job=pending_job)
# check: files
elif job_id.startswith('files'):
worker = FilesWorker(job=pending_job)

# check: composition
if job_id.startswith('composition-run'):
worker = CompositionWorker(job=pending_job)
await worker.run(conn=self.db_connector)
# result_data = worker.job_result
# await self.db_connector.insert_job_async(
# collection_name=DatabaseCollections.COMPLETED_JOBS.value,
# job_id=job_id,
# timestamp=self.db_connector.timestamp(),
# status=JobStatus.COMPLETED.value,
# source=source_name,
# simulator=simulator,
# results=result_data['data']
# )
else:
# when worker completes, dismiss worker (if in parallel)
await worker.run()

# elif job_id.startswith('composition-run'):
# worker = CompositionWorker(job=pending_job)
# await worker.run(conn=self.db_connector)
# result_data = worker.job_result
# simulator = pending_job.get('simulator', 'copasi')
# await self.db_connector.insert_job_async(
# collection_name=DatabaseCollections.COMPLETED_JOBS.value,
# job_id=job_id,
# timestamp=self.db_connector.timestamp(),
# status=JobStatus.COMPLETED.value,
# source=source_name,
# simulator=simulator,
# results=result_data['data']
# )

# when worker completes, dismiss worker (if in parallel)
await worker.run()
# create new completed job using the worker's job_result
result_data = worker.job_result
await self.db_connector.insert_job_async(
Expand Down

0 comments on commit 4be6820

Please sign in to comment.