Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
G
gallery-notebooks
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Carlo Ferrigno
gallery-notebooks
Commits
7f2d58e6
Commit
7f2d58e6
authored
1 year ago
by
Carlo Ferrigno
Browse files
Options
Downloads
Patches
Plain Diff
Process one source notebook
parent
48a895f7
No related branches found
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
ProcessConsRevolutions.ipynb
+5
-5
5 additions, 5 deletions
ProcessConsRevolutions.ipynb
Process_one_source.ipynb
+202
-0
202 additions, 0 deletions
Process_one_source.ipynb
with
207 additions
and
5 deletions
ProcessConsRevolutions.ipynb
+
5
−
5
View file @
7f2d58e6
...
...
@@ -16,9 +16,9 @@
"url = 'https://www.isdc.unige.ch/integral/restricted/Operations/Shift/Status/consolidated.html'\n",
"token=''\n",
"# This defines the range of revlutions to process, default is the last 10\n",
"n_start = 2
555
\n",
"n_stop = 2
560
\n",
"force_reprocessing =
Fals
e\n",
"n_start = 2
617
\n",
"n_stop = 2
618
\n",
"force_reprocessing =
Tru
e\n",
"host_type = 'staging'\n",
"data_processing = 'CONS'\n",
"E1_keV = \"28.0\"\n",
...
...
@@ -300,7 +300,7 @@
"\n",
"for rr in revolutions_range:\n",
" failed = False\n",
" rev_num = \"%04d\" % rr\n",
" rev_num = \"%04d\" %
int(
rr
)
\n",
" now = datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")\n",
" par_dict = {\n",
" 'rev_num' : int(rr),\n",
...
...
@@ -454,7 +454,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.
9
.1
2
"
"version": "3.
10
.1
1
"
},
"vscode": {
"interpreter": {
...
...
%% Cell type:code id:cd05caa6 tags:parameters
```
python
import
os
,
sys
url
=
'
https://www.isdc.unige.ch/integral/restricted/Operations/Shift/Status/consolidated.html
'
token
=
''
# This defines the range of revlutions to process, default is the last 10
n_start
=
2
555
n_stop
=
2
560
force_reprocessing
=
Fals
e
n_start
=
2
617
n_stop
=
2
618
force_reprocessing
=
Tru
e
host_type
=
'
staging
'
data_processing
=
'
CONS
'
E1_keV
=
"
28.0
"
E2_keV
=
"
40.0
"
notebooks_folder
=
os
.
getcwd
()
```
%% Cell type:code id:0924c9db tags:
```
python
import
pandas
as
pd
import
requests
from
bs4
import
BeautifulSoup
import
numpy
as
np
import
oda_integral_wrapper.wrapper
import
yaml
import
json
import
papermill
as
pm
from
datetime
import
datetime
```
%% Cell type:code id:374f2a15 tags:
```
python
import
logging
logging
.
getLogger
().
setLevel
(
logging
.
WARNING
)
# logging.getLogger().setLevel(logging.INFO) #for more verbose logging
logger
=
logging
.
getLogger
(
''
).
addHandler
(
logging
.
StreamHandler
())
```
%% Cell type:code id:d1b6bb71 tags:
```
python
import
oda_api.token
if
token
==
''
:
token
=
oda_api
.
token
.
discover_token
()
print
(
token
)
# extend token lifetime
from
oda_api.api
import
DispatcherAPI
disp
=
DispatcherAPI
(
url
=
'
https://www.astro.unige.ch/mmoda/dispatch-data
'
)
token
=
disp
.
refresh_token
(
token
,
write_token
=
True
)
oda_api
.
token
.
decode_oda_token
(
token
)
```
%% Cell type:markdown id:84407faa tags:
## Get the CONS HTML page updated by the operator
%% Cell type:code id:6a87fe73 tags:
```
python
import
yaml
if
os
.
path
.
isfile
(
'
.secret.yaml
'
):
with
open
(
'
.secret.yaml
'
)
as
f
:
secret
=
yaml
.
safe_load
(
f
)
else
:
secret
=
{
'
user
'
:
'
dummy
'
,
'
passwd
'
:
'
dummy
'
}
```
%% Cell type:code id:f05a8bd3 tags:
```
python
#from requests.auth import HTTPBasicAuth
#html_request = requests.get(url, auth = HTTPBasicAuth(secret['user'], secret['passwd']))
html_request
=
requests
.
get
(
url
,
proxies
=
{
"
http
"
:
None
,
"
https
"
:
None
})
if
html_request
.
status_code
==
200
:
print
(
"
Table downlaoded successfully
"
)
soup
=
BeautifulSoup
(
html_request
.
content
)
con_table
=
soup
.
find
(
'
table
'
,
attrs
=
{
'
border
'
:
3
,
'
cellpadding
'
:
3
,
'
cellspacing
'
:
2
})
else
:
print
(
"
Error while requesting the table of Cons Data
"
)
```
%% Cell type:code id:9ecda19e tags:
```
python
if
html_request
.
status_code
==
200
:
cons
=
pd
.
read_html
(
str
(
con_table
))
ind
=
(
cons
[
0
][(
'
Revol. report
'
,
'
Revol. report
'
)]
==
'
Rev_2
'
)
|
(
cons
[
0
][(
'
Revol. report
'
,
'
Revol. report
'
)]
==
'
Rev_3
'
)
|
(
cons
[
0
][(
'
Revol. report
'
,
'
Revol. report
'
)]
==
'
Rev_1
'
)
cc
=
cons
[
0
][
np
.
logical_not
(
ind
)]
```
%% Cell type:code id:560c995e tags:
```
python
# Time of data archiving
if
html_request
.
status_code
==
200
:
archived
=
pd
.
to_datetime
(
cc
[(
'
ScW Processing
'
,
'
Archived
'
)].
str
.
extract
(
r
'
(\d{2}/\d{2}/\d{4})
'
)[
0
].
values
,
format
=
"
%d/%m/%Y
"
)
```
%% Cell type:code id:867839cc tags:
```
python
#Revolution numbers
if
html_request
.
status_code
==
200
:
revolutions
=
cc
[(
'
Revol. report
'
,
'
Revol. report
'
)].
astype
(
int
).
values
```
%% Cell type:markdown id:76a822ff tags:
## Get useful range (it excludes data that have not been archived, yet)
%% Cell type:code id:c626a24b tags:
```
python
if
html_request
.
status_code
==
200
:
ind
=
(
archived
>
'
2003-01-01
'
)
&
(
revolutions
>
46
)
np
.
sum
(
ind
)
if
n_start
>
-
1
:
ind_n_start
=
np
.
where
(
revolutions
[
ind
]
==
n_start
)[
0
][
0
]
if
n_stop
>
-
1
:
ind_n_stop
=
np
.
where
(
revolutions
[
ind
]
==
n_stop
)[
0
][
0
]
if
n_start
==
-
1
:
# process last N revolutions
revolutions_range
=
revolutions
[
ind
][
0
:
5
]
else
:
if
n_stop
==
-
1
:
revolutions_range
=
[
revolutions
[
ind
][
ind_n_start
]]
else
:
if
n_stop
>
n_start
:
revolutions_range
=
revolutions
[
ind
][
ind_n_start
:
ind_n_stop
:
-
1
]
else
:
revolutions_range
=
revolutions
[
ind
][
ind_n_start
:
ind_n_stop
]
else
:
if
n_start
<=
46
:
raise
ReferenceError
(
'
We could not read the Cons Table URL, use n_start > 46
'
)
revolutions_range
=
[
str
(
r
)
for
r
in
range
(
int
(
n_start
),
int
(
n_stop
))]
print
(
revolutions_range
)
```
%% Cell type:markdown id:70f2d840 tags:
## Function to test if data have been processed in the Gallery
%% Cell type:code id:6603f562 tags:
```
python
def
get_observations
(
rev_num
):
wrap
=
oda_integral_wrapper
.
wrapper
.
INTEGRALwrapper
(
token
=
token
,
integral_data_rights
=
'
all-private
'
,
host_type
=
host_type
)
observations
=
None
observation_title
=
"
rev.
"
+
str
(
rev_num
)
try
:
output_get
=
wrap
.
disp
.
get_yaml_files_observation_with_title
(
observation_title
=
observation_title
,
token
=
token
)
observations
=
yaml
.
safe_load
(
output_get
[
'
file_content
'
])
except
Exception
as
e
:
print
(
e
)
print
(
"
Revolution %s was not processed
"
%
rev_num
)
return
observations
def
test_if_NRT
(
observations
):
for
source
in
observations
:
tt
=
source
[
'
scw_list
'
]
if
tt
[
0
]
==
np
.
floor
(
tt
[
0
]):
print
(
"
Revolution %s was processed in NRT
"
%
rev_num
)
return
True
else
:
print
(
"
Revolution %s was processed in CONS
"
%
rev_num
)
return
False
def
test_if_processed_mosaics
(
observations
):
for
source
in
observations
:
if
not
'
processed_mosaics
'
in
source
or
not
source
[
'
processed_mosaics
'
]:
return
False
return
True
def
test_if_processed_spectra
(
observations
):
for
source
in
observations
:
if
not
'
processed_spectra
'
in
source
or
not
source
[
'
processed_spectra
'
]:
return
False
return
True
def
test_if_processed_lc
(
observations
):
for
source
in
observations
:
if
not
'
processed_lc
'
in
source
or
not
source
[
'
processed_lc
'
]:
return
False
return
True
# rev_num = 2578
# observations = get_observations(rev_num)
# if observations is not None:
# print(f"processed for rev num: {rev_num}")
# # print(json.dumps(observations, indent=4))
# if test_if_processed_mosaics(observations):
# print("mosaics have been processed")
# if test_if_processed_spectra(observations):
# print("spectra have been processed")
# if test_if_processed_lc(observations):
# print("light curves have been processed")
# else:
# print(f"not processed for rev num: {rev_num}")
```
%% Cell type:markdown id:6ba77738 tags:
## Actual processing using papermill
%% Cell type:code id:b021fee7 tags:
```
python
file_failed
=
open
(
'
failed_revolutions.txt
'
,
'
a
'
)
for
rr
in
revolutions_range
:
failed
=
False
rev_num
=
"
%04d
"
%
rr
rev_num
=
"
%04d
"
%
int
(
rr
)
now
=
datetime
.
now
().
strftime
(
"
%Y_%m_%d_%H_%M_%S
"
)
par_dict
=
{
'
rev_num
'
:
int
(
rr
),
'
host_type
'
:
host_type
,
'
token
'
:
token
,
'
batch_run
'
:
True
,
'
E1_keV
'
:
E1_keV
,
'
E2_keV
'
:
E2_keV
,
'
notebooks_folder
'
:
notebooks_folder
,
'
data_version
'
:
data_processing
}
print
(
f
"
Checking revolution number for Mosaics:
{
rev_num
}
"
)
rev_observations
=
get_observations
(
rev_num
)
mosaic_processed
=
False
mosaic_nrt_processed
=
True
if
rev_observations
is
not
None
:
mosaic_processed
=
test_if_processed_mosaics
(
rev_observations
)
mosaic_nrt_processed
=
test_if_NRT
(
rev_observations
)
if
rev_observations
is
None
or
not
mosaic_processed
or
mosaic_nrt_processed
or
force_reprocessing
:
msg_mosaic
=
f
"
Processing Mosaics for revolution number:
{
rev_num
}
"
if
mosaic_processed
:
msg_mosaic
=
f
"
Re-processing Mosaics for revolution number:
{
rev_num
}
"
print
(
msg_mosaic
)
try
:
mosaics_notebook_path
=
os
.
path
.
join
(
notebooks_folder
,
"
Generic Revolution Mosaics.ipynb
"
)
print
(
f
"
Mosaics notebook path:
{
mosaics_notebook_path
}
"
)
output_mosaics_notebook_path
=
os
.
path
.
join
(
notebooks_folder
,
f
"
out/Generic Revolution Mosaics_output_
{
rev_num
}
_
{
now
}
.ipynb
"
)
print
(
f
"
Output Mosaics notebook path:
{
output_mosaics_notebook_path
}
"
)
nb_mosaics_execution
=
pm
.
execute_notebook
(
mosaics_notebook_path
,
output_mosaics_notebook_path
,
parameters
=
par_dict
,
log_output
=
True
)
print
(
"
Succesfully processed mosaics for revolution number: %s
"
%
(
rev_num
))
except
Exception
as
e
:
print
(
e
)
failed
=
True
print
(
f
'
Mosaics failed for revolution number:
{
rev_num
}
'
)
print
(
'
Check
'
+
f
"
out/Generic Revolution Mosaics_output_
{
rev_num
}
_
{
now
}
.ipynb
"
)
file_failed
.
write
(
rev_num
+
'
Mosaics
\n
'
)
else
:
msg_mosaic
=
f
"
Processing Mosaics for revolution number:
{
rev_num
}
skipped
"
if
mosaic_processed
:
msg_mosaic
+=
"
, because it was already processed
"
print
(
msg_mosaic
)
if
not
failed
:
print
(
f
"
Checking revolution number for Spectra:
{
rev_num
}
"
)
rev_observations
=
get_observations
(
rev_num
)
spectra_processed
=
test_if_processed_spectra
(
rev_observations
)
spectra_nrt_processed
=
test_if_NRT
(
rev_observations
)
if
rev_observations
is
None
or
not
spectra_processed
or
spectra_nrt_processed
or
force_reprocessing
:
msg_spectra
=
f
"
Processing Spectra for revolution number:
{
rev_num
}
"
if
spectra_processed
:
msg_spectra
=
f
"
Re-processing Spectra for revolution number:
{
rev_num
}
"
print
(
msg_spectra
)
try
:
spectra_notebook_path
=
os
.
path
.
join
(
notebooks_folder
,
"
Generic Revolution Spectra.ipynb
"
)
print
(
f
"
Spectra notebook path:
{
spectra_notebook_path
}
"
)
output_spectra_notebook_path
=
os
.
path
.
join
(
notebooks_folder
,
f
"
out/Generic Revolution Spectra_output_
{
rev_num
}
_
{
now
}
.ipynb
"
)
print
(
f
"
Output Spectra notebook path:
{
output_spectra_notebook_path
}
"
)
nb_spectra_execution
=
pm
.
execute_notebook
(
spectra_notebook_path
,
output_spectra_notebook_path
,
parameters
=
par_dict
,
log_output
=
True
)
print
(
f
"
Succesfully processed spectra for revolution number:
{
rev_num
}
"
)
except
Exception
as
e
:
print
(
e
)
failed
=
True
print
(
f
'
Spectra failed for revolution number:
{
rev_num
}
'
)
print
(
'
Check
'
+
f
"
out/Generic Revolution Spectra_output_
{
rev_num
}
_
{
now
}
.ipynb
"
)
file_failed
.
write
(
rev_num
+
'
Spectra
\n
'
)
else
:
msg_spectra
=
f
"
Processing Spectra for revolution number:
{
rev_num
}
skipped
"
if
spectra_processed
:
msg_spectra
+=
"
, because it was already processed
"
print
(
msg_spectra
)
print
(
f
"
Checking revolution number for LC:
{
rev_num
}
"
)
rev_observations
=
get_observations
(
rev_num
)
lc_processed
=
test_if_processed_lc
(
rev_observations
)
lc_nrt_processed
=
test_if_NRT
(
rev_observations
)
if
rev_observations
is
None
or
not
lc_processed
or
lc_nrt_processed
or
force_reprocessing
:
msg_lc
=
f
"
Processing LC for revolution number:
{
rev_num
}
"
if
lc_processed
:
msg_lc
=
f
"
Re-processing LC for revolution number:
{
rev_num
}
"
print
(
msg_lc
)
try
:
lc_notebook_path
=
os
.
path
.
join
(
notebooks_folder
,
"
Generic Revolution LC.ipynb
"
)
print
(
f
"
LC notebook path:
{
lc_notebook_path
}
"
)
output_lc_notebook_path
=
os
.
path
.
join
(
notebooks_folder
,
f
"
out/Generic Revolution LC_output_
{
rev_num
}
_
{
now
}
.ipynb
"
)
print
(
f
"
Output LC notebook path:
{
output_lc_notebook_path
}
"
)
nb_lc_execution
=
pm
.
execute_notebook
(
lc_notebook_path
,
output_lc_notebook_path
,
parameters
=
par_dict
,
log_output
=
True
)
print
(
f
"
Succesfully processed LC for revolution number:
{
rev_num
}
"
)
except
Exception
as
e
:
print
(
e
)
failed
=
True
print
(
f
'
Light Curve failed for revolution number:
{
rev_num
}
'
)
print
(
'
Check
'
+
f
"
out/Generic Revolution LC_output_
{
rev_num
}
_
{
now
}
.ipynb
"
)
file_failed
.
write
(
rev_num
+
'
LC
\n
'
)
else
:
msg_lc
=
f
"
Processing LC for revolution number:
{
rev_num
}
skipped
"
if
lc_processed
:
msg_lc
+=
"
, because it was already processed
"
print
(
msg_lc
)
file_failed
.
close
()
```
%% Cell type:code id:a7435cc1 tags:
```
python
```
...
...
This diff is collapsed.
Click to expand it.
Process_one_source.ipynb
0 → 100644
+
202
−
0
View file @
7f2d58e6
{
"cells": [
{
"cell_type": "markdown",
"id": "c276f355-b810-415d-b240-b757c426d646",
"metadata": {},
"source": [
"## Process one source and post to gallery the products\n",
"(Note that the user needs to have the rights for private data and gallery)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4c6bbf5c-accd-43c7-808d-8a1b770bb95f",
"metadata": {
"tags": [
"parameters"
]
},
"outputs": [],
"source": [
"tstart='2023-04-13T02:51:17' # http://odahub.io/ontology#StartTimeISOT\n",
"tstop='2023-04-15T09:53:00' # http://odahub.io/ontology#StopTimeISOT\n",
"source_name=\"GX 5-1\" # http://odahub.io/ontology#AstrophysicalObject\n",
"make_isgri_spectrum = False \n",
"make_isgri_lc = False\n",
"make_jemx_lc = True\n",
"make_jemx_spectra = True\n",
"jemx_units = [1, 2]\n",
"E1_keV_isgri = 28 # oda:keV\n",
"E2_keV_isgri = 40 # oda:keV\n",
"E1_keV_jemx = 3 # oda:keV\n",
"E2_keV_jemx = 20 # oda:keV\n",
"make_image = False\n",
"lc_time_bin = 1000 # http://odahub.io/ontology#TimeBinSeconds\n",
"use_default_catalog = False\n",
"in_evidence = True"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "eb157394-3bbd-4441-89f4-bb877bb072f4",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"import papermill as pm\n",
"from datetime import datetime\n",
"now = datetime.now().strftime(\"%Y_%m_%d_%H_%M_%S\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "cfbea58c-5222-4462-99e0-052f421674f9",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"par_dict = {\n",
" 'tstart' : tstart,\n",
" 'tstop' :tstop,\n",
" 'source_name' :source_name,\n",
" 'make_image' : make_image,\n",
" 'lc_time_bin': lc_time_bin,\n",
" 'in_evidence': in_evidence\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "80740324-41f9-47ce-8120-e7968e429f3a",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"source_name_nb = source_name.lower().replace(' ','_').replace('+','p')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2e97a6f1-7f74-4c0b-9bb6-1f3a00a81361",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"if make_jemx_spectra:\n",
" mosaics_notebook_path = 'jemx-spectrum.ipynb'\n",
" for jemx_unit in jemx_units:\n",
" par_dict.update({'jemx_unit': jemx_unit,\n",
" 'E1_keV' : E1_keV_jemx,\n",
" 'E2_keV' : E2_keV_jemx})\n",
" output_mosaics_notebook_path = 'jemx%d-spectrum_%s_%s.ipynb' % (jemx_unit, source_name_nb, now)\n",
" nb_mosaics_execution = pm.execute_notebook(\n",
" mosaics_notebook_path,\n",
" output_mosaics_notebook_path,\n",
" parameters=par_dict,\n",
" log_output=True\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2186e9df-8085-4c74-bdb8-26d71f258162",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"if make_jemx_lc:\n",
" mosaics_notebook_path = 'jemx-lightcurve.ipynb'\n",
" for jemx_unit in jemx_units:\n",
" par_dict.update({'jemx_unit': jemx_unit,\n",
" 'E1_keV' : E1_keV_jemx,\n",
" 'E2_keV' : E2_keV_jemx})\n",
" output_mosaics_notebook_path = 'jemx%d-lightcurve_%s_%s.ipynb' % (jemx_unit, source_name_nb,now)\n",
" nb_mosaics_execution = pm.execute_notebook(\n",
" mosaics_notebook_path,\n",
" output_mosaics_notebook_path,\n",
" parameters=par_dict,\n",
" log_output=True\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0eeaef37-d055-4ed8-9e8e-2024e0361285",
"metadata": {},
"outputs": [],
"source": [
"if make_isgri_spectrum:\n",
" mosaics_notebook_path = 'isgri-spectrum.ipynb'\n",
" \n",
" par_dict.update({\n",
" 'E1_keV' : E1_keV_isgri,\n",
" 'E2_keV' : E2_keV_isgri})\n",
" output_mosaics_notebook_path = 'isgri-spectrum_%s_%s.ipynb' % (source_name_nb, now)\n",
" nb_mosaics_execution = pm.execute_notebook(\n",
" mosaics_notebook_path,\n",
" output_mosaics_notebook_path,\n",
" parameters=par_dict,\n",
" log_output=True\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "618a1aac-4205-4645-9a90-baa6b619db33",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"if make_isgri_lc:\n",
" mosaics_notebook_path = 'isgri-lightcurve.ipynb'\n",
" \n",
" par_dict.update({\n",
" 'E1_keV' : E1_keV_isgri,\n",
" 'E2_keV' : E2_keV_isgri})\n",
" output_mosaics_notebook_path = 'isgri-lightcurve_%s_%s.ipynb' % (source_name_nb, now)\n",
" nb_mosaics_execution = pm.execute_notebook(\n",
" mosaics_notebook_path,\n",
" output_mosaics_notebook_path,\n",
" parameters=par_dict,\n",
" log_output=True\n",
" )"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.11"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
%% Cell type:markdown id:c276f355-b810-415d-b240-b757c426d646 tags:
## Process one source and post to gallery the products
(Note that the user needs to have the rights for private data and gallery)
%% Cell type:code id:4c6bbf5c-accd-43c7-808d-8a1b770bb95f tags:parameters
```
python
tstart
=
'
2023-04-13T02:51:17
'
# http://odahub.io/ontology#StartTimeISOT
tstop
=
'
2023-04-15T09:53:00
'
# http://odahub.io/ontology#StopTimeISOT
source_name
=
"
GX 5-1
"
# http://odahub.io/ontology#AstrophysicalObject
make_isgri_spectrum
=
False
make_isgri_lc
=
False
make_jemx_lc
=
True
make_jemx_spectra
=
True
jemx_units
=
[
1
,
2
]
E1_keV_isgri
=
28
# oda:keV
E2_keV_isgri
=
40
# oda:keV
E1_keV_jemx
=
3
# oda:keV
E2_keV_jemx
=
20
# oda:keV
make_image
=
False
lc_time_bin
=
1000
# http://odahub.io/ontology#TimeBinSeconds
use_default_catalog
=
False
in_evidence
=
True
```
%% Cell type:code id:eb157394-3bbd-4441-89f4-bb877bb072f4 tags:
```
python
import
papermill
as
pm
from
datetime
import
datetime
now
=
datetime
.
now
().
strftime
(
"
%Y_%m_%d_%H_%M_%S
"
)
```
%% Cell type:code id:cfbea58c-5222-4462-99e0-052f421674f9 tags:
```
python
par_dict
=
{
'
tstart
'
:
tstart
,
'
tstop
'
:
tstop
,
'
source_name
'
:
source_name
,
'
make_image
'
:
make_image
,
'
lc_time_bin
'
:
lc_time_bin
,
'
in_evidence
'
:
in_evidence
}
```
%% Cell type:code id:80740324-41f9-47ce-8120-e7968e429f3a tags:
```
python
source_name_nb
=
source_name
.
lower
().
replace
(
'
'
,
'
_
'
).
replace
(
'
+
'
,
'
p
'
)
```
%% Cell type:code id:2e97a6f1-7f74-4c0b-9bb6-1f3a00a81361 tags:
```
python
if
make_jemx_spectra
:
mosaics_notebook_path
=
'
jemx-spectrum.ipynb
'
for
jemx_unit
in
jemx_units
:
par_dict
.
update
({
'
jemx_unit
'
:
jemx_unit
,
'
E1_keV
'
:
E1_keV_jemx
,
'
E2_keV
'
:
E2_keV_jemx
})
output_mosaics_notebook_path
=
'
jemx%d-spectrum_%s_%s.ipynb
'
%
(
jemx_unit
,
source_name_nb
,
now
)
nb_mosaics_execution
=
pm
.
execute_notebook
(
mosaics_notebook_path
,
output_mosaics_notebook_path
,
parameters
=
par_dict
,
log_output
=
True
)
```
%% Cell type:code id:2186e9df-8085-4c74-bdb8-26d71f258162 tags:
```
python
if
make_jemx_lc
:
mosaics_notebook_path
=
'
jemx-lightcurve.ipynb
'
for
jemx_unit
in
jemx_units
:
par_dict
.
update
({
'
jemx_unit
'
:
jemx_unit
,
'
E1_keV
'
:
E1_keV_jemx
,
'
E2_keV
'
:
E2_keV_jemx
})
output_mosaics_notebook_path
=
'
jemx%d-lightcurve_%s_%s.ipynb
'
%
(
jemx_unit
,
source_name_nb
,
now
)
nb_mosaics_execution
=
pm
.
execute_notebook
(
mosaics_notebook_path
,
output_mosaics_notebook_path
,
parameters
=
par_dict
,
log_output
=
True
)
```
%% Cell type:code id:0eeaef37-d055-4ed8-9e8e-2024e0361285 tags:
```
python
if
make_isgri_spectrum
:
mosaics_notebook_path
=
'
isgri-spectrum.ipynb
'
par_dict
.
update
({
'
E1_keV
'
:
E1_keV_isgri
,
'
E2_keV
'
:
E2_keV_isgri
})
output_mosaics_notebook_path
=
'
isgri-spectrum_%s_%s.ipynb
'
%
(
source_name_nb
,
now
)
nb_mosaics_execution
=
pm
.
execute_notebook
(
mosaics_notebook_path
,
output_mosaics_notebook_path
,
parameters
=
par_dict
,
log_output
=
True
)
```
%% Cell type:code id:618a1aac-4205-4645-9a90-baa6b619db33 tags:
```
python
if
make_isgri_lc
:
mosaics_notebook_path
=
'
isgri-lightcurve.ipynb
'
par_dict
.
update
({
'
E1_keV
'
:
E1_keV_isgri
,
'
E2_keV
'
:
E2_keV_isgri
})
output_mosaics_notebook_path
=
'
isgri-lightcurve_%s_%s.ipynb
'
%
(
source_name_nb
,
now
)
nb_mosaics_execution
=
pm
.
execute_notebook
(
mosaics_notebook_path
,
output_mosaics_notebook_path
,
parameters
=
par_dict
,
log_output
=
True
)
```
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment