Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if action == 'data':
raise NotImplementedError(
"The action='data' option is a placeholder for future " +
"functionality.")
args = {
'INTERSECT': intersect
}
# Note: in IBE, if 'mcen' argument is present, it is true.
# If absent, it is false.
if most_centered:
args['mcen'] = '1'
if coordinate is not None:
c = commons.parse_coordinates(coordinate).transform_to(coord.ICRS)
args['POS'] = '{0},{1}'.format(c.ra.deg, c.dec.deg)
if width and height:
args['SIZE'] = '{0},{1}'.format(
coord.Angle(width).value,
coord.Angle(height).value)
elif width or height:
args['SIZE'] = str(coord.Angle(width or height).value)
if where:
args['where'] = where
if columns:
if isinstance(columns, six.string_types):
columns = columns.split()
args['columns'] = ','.join(columns)
:param catalog:
:param keywords:
:param radius:
:param limit:
:param column_filters:
:return:
"""
# Define the center coordinate for the box
coordinate = SkyCoordinate(ra=box[0], dec=box[1], unit="deg", frame="fk5") # frame: icrs, fk5... ?
# Make a Vizier object
if column_filters is None:
viz = Vizier(columns=['_RAJ2000', '_DEJ2000','B-V', 'Vmag', 'Plx'], keywords=keywords)
else:
viz = Vizier(columns=['_RAJ2000', '_DEJ2000','B-V', 'Vmag', 'Plx'], column_filters=column_filters, keywords=keywords)
# No limit on the number of entries
viz.ROW_LIMIT = limit if limit is not None else -1
# Query the box of our image frame
result = viz.query_region(coordinate.to_astropy(), width=box[3] * Unit("deg"), height=box[2] * Unit("deg"), catalog=catalog)
region_string = "# Region file format: DS9 version 3.0\n"
region_string += "global color=green\n"
# Result may contain multiple tables (for different catalogs)
for table in result:
# For every entry in the table
for entry in table:
import sys
try:
from astroquery.vizier import Vizier
except ImportError as e:
raise ImportError("astroquery module required to use the download_gleam script") from e
catalog_dir = "first_generation/catalog_files/"
name = "gleam.vot"
opath = os.path.join(catalog_dir, name)
if os.path.exists(opath):
print("GLEAM already downloaded to {}.".format(opath))
sys.exit()
Vizier.ROW_LIMIT = -1
Vizier.columns = ['GLEAM', 'RAJ2000', 'DEJ2000', 'Fintwide']
catname = 'VIII/100/gleamegc'
tab = Vizier.get_catalogs(catname)[0]
tab.write(opath, format='votable')
print("GLEAM catalog downloaded and saved to " + opath)
This function ...
:param box:
:param catalog:
:param keywords:
:param radius:
:param limit:
:param column_filters:
:return:
"""
# Define the center coordinate for the box
coordinate = SkyCoordinate(ra=box[0], dec=box[1], unit="deg", frame="fk5") # frame: icrs, fk5... ?
# Make a Vizier object
if column_filters is None:
viz = Vizier(columns=['_RAJ2000', '_DEJ2000','B-V', 'Vmag', 'Plx'], keywords=keywords)
else:
viz = Vizier(columns=['_RAJ2000', '_DEJ2000','B-V', 'Vmag', 'Plx'], column_filters=column_filters, keywords=keywords)
# No limit on the number of entries
viz.ROW_LIMIT = limit if limit is not None else -1
# Query the box of our image frame
result = viz.query_region(coordinate.to_astropy(), width=box[3] * Unit("deg"), height=box[2] * Unit("deg"), catalog=catalog)
region_string = "# Region file format: DS9 version 3.0\n"
region_string += "global color=green\n"
# Result may contain multiple tables (for different catalogs)
for table in result:
# For every entry in the table
table : str
table to strip as a string
Returns
-------
single string joined by newlines.
"""
numbersletters = re.compile("[0-9A-Za-z]")
if isinstance(table, str):
table = table.split('\n')
table = [line for line in table if numbersletters.search(line)]
return "\n".join(table)
@async_to_sync
class NistClass(BaseQuery):
URL = conf.server
TIMEOUT = conf.timeout
unit_code = {'Angstrom': 0,
'nm': 1,
'um': 2}
energy_level_code = {'cm-1': 0, 'invcm': 0, 'cm': 0,
'ev': 1, 'eV': 1, 'EV': 1, 'electronvolt': 1,
'R': 2, 'Rydberg': 2, 'rydberg': 2}
order_out_code = {'wavelength': 0,
'multiplet': 1}
wavelength_unit_code = {'vacuum': 3,
'vac+air': 4}
def _args_to_payload(self, *args, **kwargs):
"""
Serves the same purpose as `~NistClass.query` but returns
output_file : str, optional, default None
file name where the results are saved if dumpToFile is True.
If this parameter is not provided, the jobid is used instead
output_format : str, optional, default 'votable'
results format
verbose : bool, optional, default 'False'
flag to display information about the process
dump_to_file : bool, optional, default 'False'
if True, the results are saved in a file instead of using memory
Returns
-------
A Job object
"""
coord = self.__getCoordInput(coordinate, "coordinate")
raHours, dec = commons.coord_to_radec(coord)
ra = raHours * 15.0 # Converts to degrees
if radius is not None:
radiusQuantity = self.__getQuantityInput(radius, "radius")
radiusDeg = commons.radius_to_unit(radiusQuantity, unit='deg')
query = "SELECT DISTANCE(POINT('ICRS',"+str(MAIN_GAIA_TABLE_RA)+","\
+str(MAIN_GAIA_TABLE_DEC)+"), \
POINT('ICRS',"+str(ra)+","+str(dec)+")) AS dist, * \
FROM "+str(MAIN_GAIA_TABLE)+" WHERE CONTAINS(\
POINT('ICRS',"+str(MAIN_GAIA_TABLE_RA)+","+str(MAIN_GAIA_TABLE_DEC)+"),\
CIRCLE('ICRS',"+str(ra)+","+str(dec)+", "+str(radiusDeg)+"))=1 \
ORDER BY dist ASC"
if async:
return self.launch_job_async(query=query,
output_file=output_file,
output_format=output_format,
verbose=verbose,
If this parameter is not provided, the jobid is used instead
output_format : str, optional, default 'votable'
results format
verbose : bool, optional, default 'False'
flag to display information about the process
dump_to_file : bool, optional, default 'False'
if True, the results are saved in a file instead of using memory
columns: list, optional, default []
if empty, all columns will be selected
Returns
-------
A Job object
"""
coord = self.__getCoordInput(coordinate, "coordinate")
raHours, dec = commons.coord_to_radec(coord)
ra = raHours * 15.0 # Converts to degrees
if radius is not None:
radiusQuantity = self.__getQuantityInput(radius, "radius")
radiusDeg = commons.radius_to_unit(radiusQuantity, unit='deg')
if columns:
columns = ','.join(map(str, columns))
else:
columns = "*"
query = """
SELECT
{columns},
DISTANCE(
POINT('ICRS', {ra_column}, {dec_column}),
POINT('ICRS', {ra}, {dec})
"the file URLs directly with download_files.")
response.raise_for_status()
if 'j_spring_cas_security_check' in response.url:
time.sleep(1)
# CANNOT cache this stage: it not a real data page! results in
# infinite loops
response = self._request('POST', url, data=payload,
timeout=self.TIMEOUT, cache=False)
self._staging_log['initial_response'] = response
if 'j_spring_cas_security_check' in response.url:
log.warning("Staging request was not successful. Try again?")
response.raise_for_status()
if 'j_spring_cas_security_check' in response.url:
raise RemoteServiceError("Could not access data. This error "
"can arise if the data are private and "
"you do not have access rights or are "
"not logged in.")
request_id = response.url.split("/")[-2]
self._staging_log['request_id'] = request_id
log.debug("Request ID: {0}".format(request_id))
# Submit a request for the specific request ID identified above
submission_url = urljoin(self.dataarchive_url,
url_helpers.join('rh/submission', request_id))
log.debug("Submission URL: {0}".format(submission_url))
self._staging_log['submission_url'] = submission_url
staging_submission = self._request('GET', submission_url, cache=True)
self._staging_log['staging_submission'] = staging_submission
staging_submission.raise_for_status()
size, unit = re.search(r'([0-9\.]*)([A-Za-z]*)',
tds[3].text).groups()
columns['uid'].append(uid)
columns['URL'].append(href.attrs['href'])
unit = (u.Unit(unit) if unit in ('GB', 'MB')
else u.Unit('kB') if 'kb' in unit.lower()
else 1)
columns['size'].append(float(size) * u.Unit(unit))
log.log(level=5, msg="Found an old-style entry. "
"size={0} uid={1} url={2}".format(size, uid,
columns['URL'][-1]))
columns['size'] = u.Quantity(columns['size'], u.Gbyte)
if len(columns['uid']) == 0:
raise RemoteServiceError(
"No valid UIDs were found in the staged data table. "
"Please include {0} in a bug report."
.format(self._staging_log['data_list_url']))
tbl = Table([Column(name=k, data=v) for k, v in iteritems(columns)])
return tbl
cache=cache and not get_html_version)
self._last_response = response
response.raise_for_status()
if get_html_version:
if 'run' not in response.text:
if max_retries > 0:
log.info("Failed query. Retrying up to {0} more times"
.format(max_retries))
return self.query_async(payload=payload, cache=False,
public=public, science=science,
max_retries=max_retries-1,
get_html_version=get_html_version,
get_query_payload=get_query_payload,
**kwargs)
raise RemoteServiceError("Incorrect return from HTML table query.")
response2 = self._request('GET',
"{0}/{1}/{2}".format(
self._get_dataarchive_url(), 'aq',
response.text),
params={'query_url':
response.url.split("?")[-1]},
timeout=self.TIMEOUT,
cache=False,
)
self._last_response = response2
response2.raise_for_status()
if len(response2.text) == 0:
if max_retries > 0:
log.info("Failed (empty) query. Retrying up to {0} more times"
.format(max_retries))
return self.query_async(payload=payload, cache=cache,