You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
import xobservatory
from daxa.mission import XMMPointed
from daxa.archive import Archive
from daxa.process.simple import full_process_xmm
from daxa.process.xmm.setup import cif_build, odf_ingest
from daxa.process.xmm.assemble import emchain, epchain, cleaned_evt_lists, merge_subexposures,
rgs_events, rgs_angles, cleaned_rgs_event_lists
from daxa.process.xmm.check import emanom
from daxa.process.xmm.clean import espfilt
from daxa.process.xmm.generate import generate_images_expmaps
from astroquery.vizier import Vizier
import pandas as pd
def get_mnras_dataframe(table_name='J/MNRAS/449/199/table1'):
"""Download Table 1 from J/MNRAS/449/199 as pandas DataFrame"""
v = Vizier(column_filters={}, row_limit=-1)
result = v.get_catalogs(table_name)
return result[0].to_pandas()
and got the following error. Upon checking I see the pipeline is continuing but the summary bar no longer updates.
XMM-Newton Pointed - Generating ODF summary files: 77%|███████▋ | 490/639 [1:44:36<31:10, 12.55s/it] Exception in thread Thread-67 (_handle_results):
Traceback (most recent call last):
File "/opt/homebrew/Cellar/python@3.13/3.13.0_1/Frameworks/Python.framework/Versions/3.13/lib/python3.13/threading.py", line 1041, in _bootstrap_inner
self.run()
~~~~~~~~^^
File "/Users/philiprooney/Documents/projects/daxa/.venv/lib/python3.13/site-packages/ipykernel/ipkernel.py", line 766, in run_closure
_threading_Thread_run(self)
~~~~~~~~~~~~~~~~~~~~~^^^^^^
File "/opt/homebrew/Cellar/python@3.13/3.13.0_1/Frameworks/Python.framework/Versions/3.13/lib/python3.13/threading.py", line 992, in run
self._target(*self._args, **self._kwargs)
~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/python@3.13/3.13.0_1/Frameworks/Python.framework/Versions/3.13/lib/python3.13/multiprocessing/pool.py", line 595, in _handle_results
cache[job]._set(i, obj)
~~~~~~~~~~~~~~~^^^^^^^^
File "/opt/homebrew/Cellar/python@3.13/3.13.0_1/Frameworks/Python.framework/Versions/3.13/lib/python3.13/multiprocessing/pool.py", line 779, in _set
self._callback(self._value)
~~~~~~~~~~~~~~^^^^^^^^^^^^^
File "/Users/philiprooney/Documents/projects/daxa/.venv/lib/python3.13/site-packages/daxa/process/xmm/_common.py", line 302, in callback
parsed_obs_info[mission_name][relevant_id] = parse_odf_sum(proc_extra_info['sum_path'],
~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
relevant_id)
...
~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^
File "/Users/philiprooney/Documents/projects/daxa/.venv/lib/python3.13/site-packages/pandas/core/indexing.py", line 1685, in _validate_integer
raise IndexError("single positional indexer is out-of-bounds")
IndexError: single positional indexer is out-of-bounds
The text was updated successfully, but these errors were encountered:
The issue was a couple of the tar files didn't get decompressed correctly. Odfingest then failed on these. It carried on running for all of them but then the process didn't finish - no logs were created, the process_info json never updated. No easy way for me to fix so I am having to rerun, with the odfs now hopefully fixed!
I think there could be a need to fail gracefully for some while letting the process continue then letting the user know which failed and giving options to rerun them specifically. I imagine this or something like this exists already but I couldn't find a good way after some digging.
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
I ran the following:
import xobservatory
from daxa.mission import XMMPointed
from daxa.archive import Archive
from daxa.process.simple import full_process_xmm
from daxa.process.xmm.setup import cif_build, odf_ingest
from daxa.process.xmm.assemble import emchain, epchain, cleaned_evt_lists, merge_subexposures,
rgs_events, rgs_angles, cleaned_rgs_event_lists
from daxa.process.xmm.check import emanom
from daxa.process.xmm.clean import espfilt
from daxa.process.xmm.generate import generate_images_expmaps
from astroquery.vizier import Vizier
import pandas as pd
def get_mnras_dataframe(table_name='J/MNRAS/449/199/table1'):
"""Download Table 1 from J/MNRAS/449/199 as pandas DataFrame"""
v = Vizier(column_filters={}, row_limit=-1)
result = v.get_catalogs(table_name)
return result[0].to_pandas()
df = get_mnras_dataframe()
xm = XMMPointed()
xm.filter_on_positions(df[['RAJ2000', 'DEJ2000']].values.tolist())
xm.download(num_cores=8)
arch = Archive('fgas', xm)
full_process_xmm(arch)
and got the following error. Upon checking I see the pipeline is continuing but the summary bar no longer updates.
XMM-Newton Pointed - Generating ODF summary files: 77%|███████▋ | 490/639 [1:44:36<31:10, 12.55s/it] Exception in thread Thread-67 (_handle_results):
Traceback (most recent call last):
File "/opt/homebrew/Cellar/python@3.13/3.13.0_1/Frameworks/Python.framework/Versions/3.13/lib/python3.13/threading.py", line 1041, in _bootstrap_inner
self.run()
~~~~~~~~^^
File "/Users/philiprooney/Documents/projects/daxa/.venv/lib/python3.13/site-packages/ipykernel/ipkernel.py", line 766, in run_closure
_threading_Thread_run(self)
~~~~~~~~~~~~~~~~~~~~~^^^^^^
File "/opt/homebrew/Cellar/python@3.13/3.13.0_1/Frameworks/Python.framework/Versions/3.13/lib/python3.13/threading.py", line 992, in run
self._target(*self._args, **self._kwargs)
~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/python@3.13/3.13.0_1/Frameworks/Python.framework/Versions/3.13/lib/python3.13/multiprocessing/pool.py", line 595, in _handle_results
cache[job]._set(i, obj)
~~~~~~~~~~~~~~~^^^^^^^^
File "/opt/homebrew/Cellar/python@3.13/3.13.0_1/Frameworks/Python.framework/Versions/3.13/lib/python3.13/multiprocessing/pool.py", line 779, in _set
self._callback(self._value)
~~~~~~~~~~~~~~^^^^^^^^^^^^^
File "/Users/philiprooney/Documents/projects/daxa/.venv/lib/python3.13/site-packages/daxa/process/xmm/_common.py", line 302, in callback
parsed_obs_info[mission_name][relevant_id] = parse_odf_sum(proc_extra_info['sum_path'],
~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
relevant_id)
...
~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^
File "/Users/philiprooney/Documents/projects/daxa/.venv/lib/python3.13/site-packages/pandas/core/indexing.py", line 1685, in _validate_integer
raise IndexError("single positional indexer is out-of-bounds")
IndexError: single positional indexer is out-of-bounds
The text was updated successfully, but these errors were encountered: