Skip to content

Commit

Permalink
Merge pull request #36 from LCOGT/fix/update_for_tns_and_persistent_s…
Browse files Browse the repository at this point in the history
…hare

Update tomtoolkit version, fix tns harvester, and integrate persisten…
  • Loading branch information
cmccully authored Jan 30, 2025
2 parents 38341d9 + b9592a3 commit f3b9518
Show file tree
Hide file tree
Showing 8 changed files with 37 additions and 16 deletions.
11 changes: 9 additions & 2 deletions custom_code/harvesters/tns_harvester.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,15 @@ def get(term):
get_data = {'api_key': api_key, 'data': json.dumps(json_file)}

response = requests.post(get_url, headers={'User-Agent': 'tns_marker{"tns_id":'+str(tns_id)+', "type":"bot", "name":"SNEx_Bot1"}'}, data=get_data)
response = json.loads(response.text)['data']['reply']
return response
response_data = json.loads(response.text)['data']
# If TNS succeeds in finding an object, it returns a reply containing the `objname`.
# If TNS fails to find the object, it returns a reply in the form:
# {'name': {'110': {'message': 'No results found.', 'message_id': 110}},
# 'objid': {'110': {'message': 'No results found.', 'message_id': 110}}}
# In this case, we return None
if not response_data.get('objname'):
return None
return response_data
except Exception as e:
return [None,'Error message : \n'+str(e)]

Expand Down
1 change: 1 addition & 0 deletions custom_code/hermes_data_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ class SNEx2HermesDataConverter(HermesDataConverter):
def get_hermes_spectroscopy(self, datum):
spectroscopy_row = super().get_hermes_spectroscopy(datum)
# Add in SNEx specific ReducedDatumExtras here
snex1_id = None
snex1_id_row = ReducedDatumExtra.objects.filter(
data_type='spectroscopy',
target_id=datum.target.id,
Expand Down
8 changes: 4 additions & 4 deletions custom_code/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,15 +121,15 @@ def _get_tns_params(target):
result = json.dumps(parsed, indent=4)

result = json.loads(result)
discoverydate = result['data']['reply']['discoverydate']
discoverymag = result['data']['reply']['discoverymag']
discoveryfilt = result['data']['reply']['discmagfilter']['name']
discoverydate = result['data']['discoverydate']
discoverymag = result['data']['discoverymag']
discoveryfilt = result['data']['discmagfilter']['name']


nondets = {}
dets = {}

photometry = result['data']['reply']['photometry']
photometry = result['data']['photometry']
for phot in photometry:
remarks = phot['remarks']
if 'Last non detection' in remarks:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,13 +94,13 @@ def ingest_targets(q, stream_name):

json_list = {'ra': str(ra), 'dec': str(dec), 'radius': '5', 'units': 'arcsec', 'internal_name': name}
obj_list = requests.post(search_url, headers={'User-Agent': 'tns_marker{"tns_id":'+str(tns_id)+', "type":"bot", "name":"SNEx_Bot1"}'}, data={'api_key': api_key, 'data': json.dumps(json_list)})
obj_list = json.loads(obj_list.text)['data']['reply']
obj_list = json.loads(obj_list.text)['data']
if obj_list:
tns_name = obj_list[0]['objname']

class_json_list = {'objname': tns_name, 'photometry': 0, 'spectra': 0, 'classification': 1}
obj_data = requests.post(obj_url, headers={'User-Agent': 'tns_marker{"tns_id":'+str(tns_id)+', "type":"bot", "name":"SNEx_Bot1"}'}, data={'api_key': api_key, 'data': json.dumps(class_json_list)})
obj_data = json.loads(obj_data.text)['data']['reply']
obj_data = json.loads(obj_data.text)['data']
if obj_data:
sn_class = obj_data['object_type']['name']
else:
Expand Down Expand Up @@ -183,13 +183,13 @@ def handle(self, *args, **options):

json_list = {'ra': str(obj.ra), 'dec': str(obj.dec), 'radius': '5', 'units': 'arcsec', 'internal_name': obj.name}
obj_list = requests.post(search_url, headers={'User-Agent': 'tns_marker{"tns_id":'+str(tns_id)+', "type":"bot", "name":"SNEx_Bot1"}'}, data={'api_key': api_key, 'data': json.dumps(json_list)})
obj_list = json.loads(obj_list.text)['data']['reply']
obj_list = json.loads(obj_list.text)['data']
if obj_list:
tns_name = obj_list[0]['objname']

class_json_list = {'objname': tns_name, 'photometry': 1, 'spectra': 0, 'classification': 0}
obj_data = requests.post(obj_url, headers={'User-Agent': 'tns_marker{"tns_id":'+str(tns_id)+', "type":"bot", "name":"SNEx_Bot1"}'}, data={'api_key': api_key, 'data': json.dumps(class_json_list)})
obj_data = json.loads(obj_data.text)['data']['reply']
obj_data = json.loads(obj_data.text)['data']

det = json.loads(obj.detections)
nondet = json.loads(obj.nondetections)
Expand Down
4 changes: 2 additions & 2 deletions custom_code/management/commands/ingest_tnstargets.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def handle(self, *args, **kwargs):
json_list = {'public_timestamp': date}

obj_list = requests.post(search_url, headers={'User-Agent': 'tns_marker{"tns_id":'+str(tns_id)+', "type":"bot", "name":"SNEx_Bot1"}'}, data={'api_key': api_key, 'data': json.dumps(json_list)})
obj_list = json.loads(obj_list.text)['data']['reply']
obj_list = json.loads(obj_list.text)['data']

if not obj_list:
logger.info('No TNS targets found, have a good day!')
Expand All @@ -45,7 +45,7 @@ def handle(self, *args, **kwargs):

json_list = {'objname': obj['objname'], 'photometry': 1, 'spectra': 0}
obj_data = requests.post(obj_url, headers={'User-Agent': 'tns_marker{"tns_id":'+str(tns_id)+', "type":"bot", "name":"SNEx_Bot1"}'}, data={'api_key': api_key, 'data': json.dumps(json_list)})
obj_data = json.loads(obj_data.text)['data']['reply']
obj_data = json.loads(obj_data.text)['data']

name = obj_data['objname']
logger.info('Ingesting {name} . . .'.format(name=name))
Expand Down
6 changes: 3 additions & 3 deletions custom_code/processors/data_processor.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from importlib import import_module
from django.conf import settings
from tom_dataproducts.models import ReducedDatum
from tom_targets.sharing import continuous_share_data

DEFAULT_DATA_PROCESSOR_CLASS = 'tom_dataproducts.data_processor.DataProcessor'

Expand All @@ -21,8 +22,7 @@ def run_custom_data_processor(dp, extras, rd_extras):

reduced_datums = [ReducedDatum(target=dp.target, data_product=dp, data_type=dp.data_product_type,
timestamp=datum[0], value=datum[1]) for datum in data]
ReducedDatum.objects.bulk_create(reduced_datums)
reduced_datums = ReducedDatum.objects.bulk_create(reduced_datums)
continuous_share_data(dp.target, reduced_datums)

return ReducedDatum.objects.filter(data_product=dp), rd_extras


2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
dataclasses
tomtoolkit>=2.20.0
tomtoolkit>=2.22.5
tom-scimma
tom-nonlocalizedevents==0.8.1
tom-alertstreams
Expand Down
13 changes: 13 additions & 0 deletions templates/tom_targets/target_detail.html
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,9 @@
<li class="nav-item">
<a class="nav-link" id="manage-data-tab" href="#manage-data" role="tab" data-toggle="tab" style="color: #174460;">Manage Data</a>
</li>
<li class="nav-item">
<a class="nav-link" id="manage-sharing-tab" href="#manage-sharing" role="tab" data-toggle="tab" style="color: #174460;">Manage Sharing</a>
</li>
<li class="nav-item">
<a class="nav-link" id="manage-groups-tab" href="#manage-groups" role="tab" data-toggle="tab" style="color: #174460;">Observing Runs</a>
</li>
Expand Down Expand Up @@ -211,6 +214,16 @@ <h4>Schedule Observations</h4>
{% endif %}
{% dataproduct_list_for_target object %}
</div>
<div class="tab-pane" id="manage-sharing">
<h3>Create Persistent Share</h3>
<div id='target-persistent-share-create'>
{% create_persistent_share target %}
</div>
<h3>Manage Persistent Shares</h3>
<div id='target-persistent-share-table'>
{% persistent_share_table object %}
</div>
</div>
<div class="tab-pane" id="manage-groups">
{% target_groups target %}
</div>
Expand Down

0 comments on commit f3b9518

Please sign in to comment.