text stringlengths 81 112k |
|---|
Get existing validation records.
def _get_existing(driver, zone_name, server_name, validation):
"""
Get existing validation records.
"""
if zone_name is None:
zones = sorted(
(z for z
in driver.list_zones()
if server_name.rstrip(u'.')
.endsw... |
Get the validation value for a challenge response.
def _validation(response):
"""
Get the validation value for a challenge response.
"""
h = hashlib.sha256(response.key_authorization.encode("utf-8"))
return b64encode(h.digest()).decode() |
Load the client key from a directory, creating it if it does not exist.
.. note:: The client key that will be created will be a 2048-bit RSA key.
:type pem_path: ``twisted.python.filepath.FilePath``
:param pem_path: The certificate directory
to use, as with the endpoint.
def load_or_create_client... |
Parse a txacme endpoint description.
:param reactor: The Twisted reactor.
:param directory: ``twisted.python.url.URL`` for the ACME directory to use
for issuing certs.
:param str pemdir: The path to the certificate directory to use.
def _parse(reactor, directory, pemdir, *args, **kwargs):
"""
... |
Generator which continually reads ``f`` to the next instance
of ``delimiter``.
This allows you to do batch processing on the contents of ``f`` without
loading the entire file into memory.
:param f: Any file-like object which has a ``.read()`` method.
:param delimiter: Delimiter on which to split u... |
Generate a random private key using sensible parameters.
:param str key_type: The type of key to generate. One of: ``rsa``.
def generate_private_key(key_type):
"""
Generate a random private key using sensible parameters.
:param str key_type: The type of key to generate. One of: ``rsa``.
"""
i... |
Generate a certificate/key pair for responding to a tls-sni-01 challenge.
:param str server_name: The SAN the certificate should have.
:param str key_type: The type of key to generate; usually not necessary.
:rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]``
:return: A tuple of the certif... |
"Tap" a Deferred callback chain with a function whose return value is
ignored.
def tap(f):
"""
"Tap" a Deferred callback chain with a function whose return value is
ignored.
"""
@wraps(f)
def _cb(res, *a, **kw):
d = maybeDeferred(f, res, *a, **kw)
d.addCallback(lambda ignore... |
Decode JOSE Base-64 DER-encoded CSR.
:param str b64der: The encoded CSR.
:rtype: `cryptography.x509.CertificateSigningRequest`
:return: The decoded CSR.
def decode_csr(b64der):
"""
Decode JOSE Base-64 DER-encoded CSR.
:param str b64der: The encoded CSR.
:rtype: `cryptography.x509.Certif... |
Generate a certificate signing request for the given names and private key.
.. seealso:: `acme.client.Client.request_issuance`
.. seealso:: `generate_private_key`
:param ``List[str]``: One or more names (subjectAltName) for which to
request a certificate.
:param key: A Cryptography private ... |
async wrapper is required to avoid await calls raising a SyntaxError
def _wrap_parse(code, filename):
"""
async wrapper is required to avoid await calls raising a SyntaxError
"""
code = 'async def wrapper():\n' + indent(code, ' ')
return ast.parse(code, filename=filename).body[0... |
Determine the URL corresponding to Python object
def linkcode_resolve(domain, info):
"""
Determine the URL corresponding to Python object
"""
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
... |
Sync n layers in Solr.
def layers_to_solr(self, layers):
"""
Sync n layers in Solr.
"""
layers_dict_list = []
layers_success_ids = []
layers_errors_ids = []
for layer in layers:
layer_dict, message = layer2dict(layer)
if not layer_dict:
... |
Sync a layer in Solr.
def layer_to_solr(self, layer):
"""
Sync a layer in Solr.
"""
success = True
message = 'Synced layer id %s to Solr' % layer.id
layer_dict, message = layer2dict(layer)
if not layer_dict:
success = False
else:
... |
Clear all indexes in the solr core
def clear_solr(self, catalog="hypermap"):
"""Clear all indexes in the solr core"""
solr_url = "{0}/solr/{1}".format(SEARCH_URL, catalog)
solr = pysolr.Solr(solr_url, timeout=60)
solr.delete(q='*:*')
LOGGER.debug('Solr core cleared') |
set the mapping in solr.
:param catalog: core
:return:
def update_schema(self, catalog="hypermap"):
"""
set the mapping in solr.
:param catalog: core
:return:
"""
schema_url = "{0}/solr/{1}/schema".format(SEARCH_URL, catalog)
print schema_url
... |
Create a layer / keyword list from a metadata record if it does not already exist.
def create_layer_from_metadata_xml(resourcetype, xml, monitor=False, service=None, catalog=None):
"""
Create a layer / keyword list from a metadata record if it does not already exist.
"""
from models import gen_anytext,... |
Create a service from an endpoint if it does not already exists.
def create_service_from_endpoint(endpoint, service_type, title=None, abstract=None, catalog=None):
"""
Create a service from an endpoint if it does not already exists.
"""
from models import Service
if Service.objects.filter(url=endpo... |
Generate service/services from an endpoint.
WMS, WMTS, TMS endpoints correspond to a single service.
ESRI, CSW endpoints corrispond to many services.
:return: imported, message
def create_services_from_endpoint(url, catalog, greedy_opt=True):
"""
Generate service/services from an endpoint.
WMS,... |
Function that parses from url the service and folder of services.
def service_url_parse(url):
"""
Function that parses from url the service and folder of services.
"""
endpoint = get_sanitized_endpoint(url)
url_split_list = url.split(endpoint + '/')
if len(url_split_list) != 0:
url_spli... |
Given coordinates in spherical mercator, return a lon,lat tuple.
def inverse_mercator(xy):
"""
Given coordinates in spherical mercator, return a lon,lat tuple.
"""
lon = (xy[0] / 20037508.34) * 180
lat = (xy[1] / 20037508.34) * 180
lat = 180 / math.pi * \
(2 * math.atan(math.exp(lat * m... |
OWSLib wrapper function to perform version negotiation against owslib.wms.WebMapService
def get_wms_version_negotiate(url, timeout=10):
"""
OWSLib wrapper function to perform version negotiation against owslib.wms.WebMapService
"""
try:
LOGGER.debug('Trying a WMS 1.3.0 GetCapabilities request'... |
Sanitize an endpoint, as removing unneeded parameters
def get_sanitized_endpoint(url):
"""
Sanitize an endpoint, as removing unneeded parameters
"""
# sanitize esri
sanitized_url = url.rstrip()
esri_string = '/rest/services'
if esri_string in url:
match = re.search(esri_string, sani... |
A method to get a service name from an esri endpoint.
For example: http://example.com/arcgis/rest/services/myservice/mylayer/MapServer/?f=json
Will return: myservice/mylayer
def get_esri_service_name(url):
"""
A method to get a service name from an esri endpoint.
For example: http://example.com/arc... |
Get the extent of an ESRI resource
def get_esri_extent(esriobj):
"""
Get the extent of an ESRI resource
"""
extent = None
srs = None
if 'fullExtent' in esriobj._json_struct:
extent = esriobj._json_struct['fullExtent']
if 'extent' in esriobj._json_struct:
extent = esriobj._... |
Return OGC WKT Polygon of a simple bbox list of strings
def bbox2wktpolygon(bbox):
"""
Return OGC WKT Polygon of a simple bbox list of strings
"""
minx = float(bbox[0])
miny = float(bbox[1])
maxx = float(bbox[2])
maxy = float(bbox[3])
return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %... |
Returns a date in a valid Solr format from a string.
def get_solr_date(pydate, is_negative):
"""
Returns a date in a valid Solr format from a string.
"""
# check if date is valid and then set it to solr format YYYY-MM-DDThh:mm:ssZ
try:
if isinstance(pydate, datetime.datetime):
s... |
Returns a custom date representation. A date can be detected or from metadata.
It can be a range or a simple date in isoformat.
def get_date(layer):
"""
Returns a custom date representation. A date can be detected or from metadata.
It can be a range or a simple date in isoformat.
"""
date = Non... |
Return a json representation for a layer.
def layer2dict(layer):
"""
Return a json representation for a layer.
"""
category = None
username = None
# bbox must be valid before proceeding
if not layer.has_valid_bbox():
message = 'Layer id: %s has a not valid bbox' % layer.id
... |
detect whether a url is a Service type that HHypermap supports
def detect_metadata_url_scheme(url):
"""detect whether a url is a Service type that HHypermap supports"""
scheme = None
url_lower = url.lower()
if any(x in url_lower for x in ['wms', 'service=wms']):
scheme = 'OGC:WMS'
if any(... |
Serialize a check_set for raphael
def serialize_checks(check_set):
"""
Serialize a check_set for raphael
"""
check_set_list = []
for check in check_set.all()[:25]:
check_set_list.append(
{
'datetime': check.checked_datetime.isoformat(),
'value': c... |
A page with number of services and layers faceted on domains.
def domains(request):
"""
A page with number of services and layers faceted on domains.
"""
url = ''
query = '*:*&facet=true&facet.limit=-1&facet.pivot=domain_name,service_id&wt=json&indent=true&rows=0'
if settings.SEARCH_TYPE == 'el... |
A page that let the admin to run global tasks.
def tasks_runner(request):
"""
A page that let the admin to run global tasks.
"""
# server info
cached_layers_number = 0
cached_layers = cache.get('layers')
if cached_layers:
cached_layers_number = len(cached_layers)
cached_delete... |
Get Layer with matching catalog and uuid
def layer_mapproxy(request, catalog_slug, layer_uuid, path_info):
"""
Get Layer with matching catalog and uuid
"""
layer = get_object_or_404(Layer,
uuid=layer_uuid,
catalog__slug=catalog_slug)
# fo... |
Parses a date string to date object.
for BCE dates, only supports the year part.
def parse_datetime(date_str):
"""
Parses a date string to date object.
for BCE dates, only supports the year part.
"""
is_common_era = True
date_str_parts = date_str.split("-")
if date_str_parts and date_st... |
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00]
:return: (2013-03-01, 2013-05-01T00:00:00)
def parse_solr_time_range_as_pair(time_filter):
"""
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00]
:return: (2013-03-01, 2013-05-01T00:00:00)
"""
pattern = "\\[(.*) TO (.*)\\]"
matche... |
Parse the url param to python objects.
From what time range to divide by a.time.gap into intervals.
Defaults to q.time and otherwise 90 days.
Validate in API: re.search("\\[(.*) TO (.*)\\]", value)
:param time_filter: [2013-03-01 TO 2013-05-01T00:00:00]
:return: datetime.datetime(2013, 3, 1, 0, 0), ... |
P1D to (1, ("DAYS", isodate.Duration(days=1)).
P1Y to (1, ("YEARS", isodate.Duration(years=1)).
:param time_gap: ISO8601 string.
:return: tuple with quantity and unit of time.
def parse_ISO8601(time_gap):
"""
P1D to (1, ("DAYS", isodate.Duration(days=1)).
P1Y to (1, ("YEARS", isodate.Duration(y... |
Compute a gap that seems reasonable, considering natural time units and limit.
# TODO: make it to be reasonable.
# TODO: make it to be small unit of time sensitive.
:param start: datetime
:param end: datetime
:param time_limit: gaps count
:return: solr's format duration.
def compute_gap(start, ... |
P1D to +1DAY
:param time_gap:
:return: solr's format duration.
def gap_to_sorl(time_gap):
"""
P1D to +1DAY
:param time_gap:
:return: solr's format duration.
"""
quantity, unit = parse_ISO8601(time_gap)
if unit[0] == "WEEKS":
return "+{0}DAYS".format(quantity * 7)
else:
... |
time facet query builder
:param field: map the query to this field.
:param time_limit: Non-0 triggers time/date range faceting. This value is the maximum number of time ranges to
return when a.time.gap is unspecified. This is a soft maximum; less will usually be returned.
A suggested value is 100.
N... |
:param geo_box_str: [-90,-180 TO 90,180]
:return: ("-90,-180", "90,180")
def parse_solr_geo_range_as_pair(geo_box_str):
"""
:param geo_box_str: [-90,-180 TO 90,180]
:return: ("-90,-180", "90,180")
"""
pattern = "\\[(.*) TO (.*)\\]"
matcher = re.search(pattern, geo_box_str)
if matcher:
... |
parses [-90,-180 TO 90,180] to a shapely.geometry.box
:param geo_box_str:
:return:
def parse_geo_box(geo_box_str):
"""
parses [-90,-180 TO 90,180] to a shapely.geometry.box
:param geo_box_str:
:return:
"""
from_point_str, to_point_str = parse_solr_geo_range_as_pair(geo_box_str)
fro... |
heatmap facet query builder
:param field: map the query to this field.
:param hm_filter: From what region to plot the heatmap. Defaults to q.geo or otherwise the world.
:param hm_grid_level: To explicitly specify the grid level, e.g. to let a user ask for greater or courser
resolution than the most rece... |
traduce [* TO *] to something like [MIN-INDEXED-DATE TO MAX-INDEXED-DATE]
:param field: map the stats to this field.
:param time_filter: this is the value to be translated. think in "[* TO 2000]"
:param search_engine_endpoint: solr core
:param actual_params: (not implemented) to merge with other params.... |
Set a service object based on the XML metadata
<dct:references scheme="OGC:WMS">http://ngamaps.geointapps.org/arcgis
/services/RIO/Rio_Foundation_Transportation/MapServer/WMSServer
</dct:references>
:param instance:
:return: Layer
def get_service(raw_xml):
"""
Set a service object ... |
Query by list of identifiers
def query_ids(self, ids):
"""
Query by list of identifiers
"""
results = self._get_repo_filter(Layer.objects).filter(uuid__in=ids).all()
if len(results) == 0: # try services
results = self._get_repo_filter(Service.objects).filter(uuid_... |
Query by property domain values
def query_domain(self, domain, typenames, domainquerytype='list', count=False):
"""
Query by property domain values
"""
objects = self._get_repo_filter(Layer.objects)
if domainquerytype == 'range':
return [tuple(objects.aggregate(Min... |
Query to get latest (default) or earliest update to repository
def query_insert(self, direction='max'):
"""
Query to get latest (default) or earliest update to repository
"""
if direction == 'min':
return Layer.objects.aggregate(
Min('last_updated'))['last_up... |
Query by source
def query_source(self, source):
"""
Query by source
"""
return self._get_repo_filter(Layer.objects).filter(url=source) |
Query records from underlying repository
def query(self, constraint, sortby=None, typenames=None, maxrecords=10, startposition=0):
"""
Query records from underlying repository
"""
# run the raw query and get total
# we want to exclude layers which are not valid, as it is done i... |
Insert a record into the repository
def insert(self, resourcetype, source, insert_date=None):
"""
Insert a record into the repository
"""
caller = inspect.stack()[1][3]
if caller == 'transaction': # insert of Layer
hhclass = 'Layer'
source = resourcety... |
Insert or update a record in the repository
def _insert_or_update(self, resourcetype, source, mode='insert', hhclass='Service'):
"""
Insert or update a record in the repository
"""
keywords = []
if self.filter is not None:
catalog = Catalog.objects.get(id=int(self.... |
Delete a record from the repository
def delete(self, constraint):
"""
Delete a record from the repository
"""
results = self._get_repo_filter(Service.objects).extra(where=[constraint['where']],
params=constraint['values']).... |
Apply repository wide side filter / mask query
def _get_repo_filter(self, query):
"""
Apply repository wide side filter / mask query
"""
if self.filter is not None:
return query.extra(where=[self.filter])
return query |
緯度経度から指定次の地域メッシュコードを算出する。
Args:
lat: 世界測地系の緯度(度単位)
lon: 世界測地系の経度(度単位)
level: 地域メッシュコードの次数
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
... |
メッシュコードから次数を算出する。
Args:
meshcode: メッシュコード
Return:
地域メッシュコードの次数
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
... |
地域メッシュコードから緯度経度を算出する。
下記のメッシュに対応している。
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(... |
Check the permissions, http method and login state.
def check(func):
"""
Check the permissions, http method and login state.
"""
def iCheck(request, *args, **kwargs):
if not request.method == "POST":
return HttpResponseBadRequest("Must be POST request.")
follow = func(reque... |
This registers any model class to be follow-able.
def register(model, field_name=None, related_name=None, lookup_method_name='get_follows'):
"""
This registers any model class to be follow-able.
"""
if model in registry:
return
registry.append(model)
if not field_name:
... |
Make a user follow an object
def follow(user, obj):
""" Make a user follow an object """
follow, created = Follow.objects.get_or_create(user, obj)
return follow |
Make a user unfollow an object
def unfollow(user, obj):
""" Make a user unfollow an object """
try:
follow = Follow.objects.get_follows(obj).get(user=user)
follow.delete()
return follow
except Follow.DoesNotExist:
pass |
Toggles a follow status. Useful function if you don't want to perform follow
checks but just toggle it on / off.
def toggle(user, obj):
""" Toggles a follow status. Useful function if you don't want to perform follow
checks but just toggle it on / off. """
if Follow.objects.is_following(user, obj):
... |
Would be for example: [2013-03-01 TO 2013-04-01T00:00:00] and/or [* TO *]
Returns a valid sorl value. [2013-03-01T00:00:00Z TO 2013-04-01T00:00:00Z] and/or [* TO *]
def validate_q_time(self, value):
"""
Would be for example: [2013-03-01 TO 2013-04-01T00:00:00] and/or [* TO *]
Returns a ... |
Would be for example: [-90,-180 TO 90,180]
def validate_q_geo(self, value):
"""
Would be for example: [-90,-180 TO 90,180]
"""
if value:
try:
rectangle = utils.parse_geo_box(value)
return "[{0},{1} TO {2},{3}]".format(
rect... |
Would be for example: [2013-03-01 TO 2013-04-01:00:00:00] and/or [* TO *]
def validate_a_time_filter(self, value):
"""
Would be for example: [2013-03-01 TO 2013-04-01:00:00:00] and/or [* TO *]
"""
if value:
try:
utils.parse_datetime_range(value)
e... |
Return the field name on the :class:`Follow` model for ``model_or_obj_or_qs``.
def fname(self, model_or_obj_or_qs):
"""
Return the field name on the :class:`Follow` model for ``model_or_obj_or_qs``.
"""
if isinstance(model_or_obj_or_qs, QuerySet):
_, fname = model_map[model... |
Create a new follow link between a user and an object
of a registered model type.
def create(self, user, obj, **kwargs):
"""
Create a new follow link between a user and an object
of a registered model type.
"""
follow = Follow(user=user)
follow.target = ... |
Almost the same as `FollowManager.objects.create` - behaves the same
as the normal `get_or_create` methods in django though.
Returns a tuple with the `Follow` and either `True` or `False`
def get_or_create(self, user, obj, **kwargs):
"""
Almost the same as `FollowManager.objects.cre... |
Returns `True` or `False`
def is_following(self, user, obj):
""" Returns `True` or `False` """
if isinstance(user, AnonymousUser):
return False
return 0 < self.get_follows(obj).filter(user=user).count() |
Returns all the followers of a model, an object or a queryset.
def get_follows(self, model_or_obj_or_qs):
"""
Returns all the followers of a model, an object or a queryset.
"""
fname = self.fname(model_or_obj_or_qs)
if isinstance(model_or_obj_or_qs, QuerySet):
... |
create_event_regressors creates the part of the design matrix corresponding to one event type.
:param event_times_indices: indices in the resampled data, on which the events occurred.
:type event_times_indices: numpy array, (nr_events)
:param covariates: covariates belonging to thi... |
create_design_matrix calls create_event_regressors for each of the covariates in the self.covariates dict. self.designmatrix is created and is shaped (nr_regressors, self.resampled_signal.shape[-1])
def create_design_matrix(self, demean = False, intercept = True):
"""create_design_matrix calls create_event_reg... |
add_continuous_regressors_to_design_matrix appends continuously sampled regressors to the existing design matrix. One uses this addition to the design matrix when one expects the data to contain nuisance factors that aren't tied to the moments of specific events. For instance, in fMRI analysis this allows us to add car... |
regress performs linear least squares regression of the designmatrix on the data.
:param method: method, or backend to be used for the regression analysis.
:type method: string, one of ['lstsq', 'sm_ols']
:returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' ... |
perform k-folds cross-validated ridge regression on the design_matrix. To be used when the design matrix contains very collinear regressors. For cross-validation and ridge fitting, we use sklearn's RidgeCV functionality. Note: intercept is not fit, and data are not prenormalized.
:param cv: cross-validate... |
betas_for_cov returns the beta values (i.e. IRF) associated with a specific covariate.
:param covariate: name of covariate.
:type covariate: string
def betas_for_cov(self, covariate = '0'):
"""betas_for_cov returns the beta values (i.e. IRF) associated with a specific covariate.
... |
betas_for_events creates an internal self.betas_per_event_type array, of (nr_covariates x self.devonvolution_interval_size),
which holds the outcome betas per event type,in the order generated by self.covariates.keys()
def betas_for_events(self):
"""betas_for_events creates an internal self.betas_per_... |
predict_from_design_matrix predicts signals given a design matrix.
:param design_matrix: design matrix from which to predict a signal.
:type design_matrix: numpy array, (nr_samples x betas.shape)
:returns: predicted signal(s)
:rtype: numpy array (nr_signals x nr_samples... |
calculate_rsq calculates coefficient of determination, or r-squared, defined here as 1.0 - SS_res / SS_tot. rsq is only calculated for those timepoints in the data for which the design matrix is non-zero.
def calculate_rsq(self):
"""calculate_rsq calculates coefficient of determination, or r-squared, defined h... |
bootstrap_on_residuals bootstraps, by shuffling the residuals. bootstrap_on_residuals should only be used on single-channel data, as otherwise the memory load might increase too much. This uses the lstsq backend regression for a single-pass fit across repetitions. Please note that shuffling the residuals may change the... |
Global values to pass to templates
def resource_urls(request):
"""Global values to pass to templates"""
url_parsed = urlparse(settings.SEARCH_URL)
defaults = dict(
APP_NAME=__description__,
APP_VERSION=__version__,
SITE_URL=settings.SITE_URL.rstrip('/'),
SEARCH_TYPE=setting... |
Index and unindex all layers in the Django cache (Index all layers who have been checked).
def index_cached_layers(self):
"""
Index and unindex all layers in the Django cache (Index all layers who have been checked).
"""
from hypermap.aggregator.models import Layer
if SEARCH_TYPE == 'solr':
... |
Remove all checks from a service.
def remove_service_checks(self, service_id):
"""
Remove all checks from a service.
"""
from hypermap.aggregator.models import Service
service = Service.objects.get(id=service_id)
service.check_set.all().delete()
layer_to_process = service.layer_set.all()
... |
Index a service in search engine.
def index_service(self, service_id):
"""
Index a service in search engine.
"""
from hypermap.aggregator.models import Service
service = Service.objects.get(id=service_id)
if not service.is_valid:
LOGGER.debug('Not indexing service with id %s in search... |
Index a layer in the search backend.
If cache is set, append it to the list, if it isn't send the transaction right away.
cache needs memcached to be available.
def index_layer(self, layer_id, use_cache=False):
"""
Index a layer in the search backend.
If cache is set, append it to the list, if it i... |
Remove the index for layers in search backend, which are linked to an issue.
def unindex_layers_with_issues(self, use_cache=False):
"""
Remove the index for layers in search backend, which are linked to an issue.
"""
from hypermap.aggregator.models import Issue, Layer, Service
from django.contrib.c... |
Remove the index for a layer in the search backend.
If cache is set, append it to the list of removed layers, if it isn't send the transaction right away.
def unindex_layer(self, layer_id, use_cache=False):
"""
Remove the index for a layer in the search backend.
If cache is set, append it to the list o... |
Index all layers in search engine.
def index_all_layers(self):
"""
Index all layers in search engine.
"""
from hypermap.aggregator.models import Layer
if not settings.REGISTRY_SKIP_CELERY:
layers_cache = set(Layer.objects.filter(is_valid=True).values_list('id', flat=True))
deleted_... |
Update and index the last added and deleted layers (num_layers) in WorldMap service.
def update_last_wm_layers(self, service_id, num_layers=10):
"""
Update and index the last added and deleted layers (num_layers) in WorldMap service.
"""
from hypermap.aggregator.models import Service
LOGGER.debug(... |
Return OGC WKT Polygon of a simple bbox list
def bbox2wktpolygon(bbox):
"""
Return OGC WKT Polygon of a simple bbox list
"""
try:
minx = float(bbox[0])
miny = float(bbox[1])
maxx = float(bbox[2])
maxy = float(bbox[3])
except:
LOGGER.debug("Invalid bbox, set... |
Create a csw:Record XML document from harvested metadata
def create_metadata_record(**kwargs):
"""
Create a csw:Record XML document from harvested metadata
"""
if 'srs' in kwargs:
srs = kwargs['srs']
else:
srs = '4326'
modified = '%sZ' % datetime.datetime.utcnow().isoformat().... |
Convenience function to create bag of words for anytext property
def gen_anytext(*args):
"""
Convenience function to create bag of words for anytext property
"""
bag = []
for term in args:
if term is not None:
if isinstance(term, list):
for term2 in term:
... |
Update layers for an OGC:WMTS service.
Sample endpoint: http://map1.vis.earthdata.nasa.gov/wmts-geo/1.0.0/WMTSCapabilities.xml
def update_layers_wmts(service):
"""
Update layers for an OGC:WMTS service.
Sample endpoint: http://map1.vis.earthdata.nasa.gov/wmts-geo/1.0.0/WMTSCapabilities.xml
"""
... |
Update layers for a WorldMap instance.
Sample endpoint: http://localhost:8000/
def update_layers_geonode_wm(service, num_layers=None):
"""
Update layers for a WorldMap instance.
Sample endpoint: http://localhost:8000/
"""
wm_api_url = urlparse.urljoin(service.url, 'worldmap/api/2.8/layer/?forma... |
Update layers for a Warper service.
Sample endpoint: http://warp.worldmap.harvard.edu/maps
def update_layers_warper(service):
"""
Update layers for a Warper service.
Sample endpoint: http://warp.worldmap.harvard.edu/maps
"""
params = {'field': 'title', 'query': '', 'show_warped': '1', 'format':... |
Update layers for an ESRI REST MapServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest/services/SampleWorldCities/MapServer/?f=json
def update_layers_esri_mapserver(service, greedy_opt=False):
"""
Update layers for an ESRI REST MapServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest... |
Update layers for an ESRI REST ImageServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest/services/bag_bathymetry/ImageServer/?f=json
def update_layers_esri_imageserver(service):
"""
Update layers for an ESRI REST ImageServer.
Sample endpoint: https://gis.ngdc.noaa.gov/arcgis/rest/services/bag... |
Used to process the lines of the endpoint list.
def endpointlist_post_save(instance, *args, **kwargs):
"""
Used to process the lines of the endpoint list.
"""
with open(instance.upload.file.name, mode='rb') as f:
lines = f.readlines()
for url in lines:
if len(url) > 255:
... |
Used to do a service full check when saving it.
def service_pre_save(instance, *args, **kwargs):
"""
Used to do a service full check when saving it.
"""
# check if service is unique
# we cannot use unique_together as it relies on a combination of fields
# from different models (service, resour... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.