Package s3 :: Module s3gis
[frames] | no frames]

Source Code for Module s3.s3gis

   1  # -*- coding: utf-8 -*- 
   2   
   3  """ GIS Module 
   4   
   5      @requires: U{B{I{gluon}} <http://web2py.com>} 
   6      @requires: U{B{I{shapely}} <http://trac.gispython.org/lab/wiki/Shapely>} 
   7   
   8      @copyright: (c) 2010-2019 Sahana Software Foundation 
   9      @license: MIT 
  10   
  11      Permission is hereby granted, free of charge, to any person 
  12      obtaining a copy of this software and associated documentation 
  13      files (the "Software"), to deal in the Software without 
  14      restriction, including without limitation the rights to use, 
  15      copy, modify, merge, publish, distribute, sublicense, and/or sell 
  16      copies of the Software, and to permit persons to whom the 
  17      conditions: 
  18   
  19      The above copyright notice and this permission notice shall be 
  20      included in all copies or substantial portions of the Software. 
  21   
  22      THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 
  23      EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 
  24      OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 
  25      NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 
  26      HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 
  27      WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 
  28      FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 
  29      OTHER DEALINGS IN THE SOFTWARE. 
  30  """ 
  31   
  32  __all__ = ("GIS", 
  33             "S3Map", 
  34             "S3ExportPOI", 
  35             "S3ImportPOI", 
  36             ) 
  37   
  38  import datetime         # Needed for Feed Refresh checks & web2py version check 
  39  import json 
  40  import os 
  41  import re 
  42  import sys 
  43  #import logging 
  44  import urllib           # Needed for urlencoding 
  45  import urllib2          # Needed for quoting & error handling on fetch 
  46   
  47  from collections import OrderedDict 
  48  try: 
  49      from cStringIO import StringIO    # Faster, where available 
  50  except: 
  51      from StringIO import StringIO 
  52   
  53  try: 
  54      from lxml import etree # Needed to follow NetworkLinks 
  55  except ImportError: 
  56      sys.stderr.write("ERROR: lxml module needed for XML handling\n") 
  57      raise 
  58   
  59  KML_NAMESPACE = "http://earth.google.com/kml/2.2" 
  60   
  61  from gluon import * 
  62  # Here are dependencies listed for reference: 
  63  #from gluon import current, redirect 
  64  #from gluon.html import * 
  65  #from gluon.http import HTTP 
  66  from gluon.fileutils import parse_version 
  67  from gluon.languages import lazyT, regex_translate 
  68  from gluon.settings import global_settings 
  69  from gluon.storage import Storage 
  70   
  71  from s3dal import Rows 
  72  from s3datetime import s3_format_datetime, s3_parse_datetime 
  73  from s3fields import s3_all_meta_field_names 
  74  from s3rest import S3Method 
  75  from s3track import S3Trackable 
  76  from s3utils import s3_include_ext, s3_include_underscore, s3_str 
  77   
  78  # Map WKT types to db types 
  79  GEOM_TYPES = {"point": 1, 
  80                "linestring": 2, 
  81                "polygon": 3, 
  82                "multipoint": 4, 
  83                "multilinestring": 5, 
  84                "multipolygon": 6, 
  85                "geometrycollection": 7, 
  86                } 
  87   
  88  # km 
  89  RADIUS_EARTH = 6371.01 
  90   
  91  # Compact JSON encoding 
  92  SEPARATORS = (",", ":") 
  93   
  94  # Map Defaults 
  95  # Also in static/S3/s3.gis.js 
  96  # http://dev.openlayers.org/docs/files/OpenLayers/Strategy/Cluster-js.html 
  97  CLUSTER_ATTRIBUTE = "colour" 
  98  CLUSTER_DISTANCE = 20   # pixels 
  99  CLUSTER_THRESHOLD = 2   # minimum # of features to form a cluster 
 100   
 101  # Garmin GPS Symbols 
 102  GPS_SYMBOLS = ("Airport", 
 103                 "Amusement Park" 
 104                 "Ball Park", 
 105                 "Bank", 
 106                 "Bar", 
 107                 "Beach", 
 108                 "Bell", 
 109                 "Boat Ramp", 
 110                 "Bowling", 
 111                 "Bridge", 
 112                 "Building", 
 113                 "Campground", 
 114                 "Car", 
 115                 "Car Rental", 
 116                 "Car Repair", 
 117                 "Cemetery", 
 118                 "Church", 
 119                 "Circle with X", 
 120                 "City (Capitol)", 
 121                 "City (Large)", 
 122                 "City (Medium)", 
 123                 "City (Small)", 
 124                 "Civil", 
 125                 "Contact, Dreadlocks", 
 126                 "Controlled Area", 
 127                 "Convenience Store", 
 128                 "Crossing", 
 129                 "Dam", 
 130                 "Danger Area", 
 131                 "Department Store", 
 132                 "Diver Down Flag 1", 
 133                 "Diver Down Flag 2", 
 134                 "Drinking Water", 
 135                 "Exit", 
 136                 "Fast Food", 
 137                 "Fishing Area", 
 138                 "Fitness Center", 
 139                 "Flag", 
 140                 "Forest", 
 141                 "Gas Station", 
 142                 "Geocache", 
 143                 "Geocache Found", 
 144                 "Ghost Town", 
 145                 "Glider Area", 
 146                 "Golf Course", 
 147                 "Green Diamond", 
 148                 "Green Square", 
 149                 "Heliport", 
 150                 "Horn", 
 151                 "Hunting Area", 
 152                 "Information", 
 153                 "Levee", 
 154                 "Light", 
 155                 "Live Theater", 
 156                 "Lodging", 
 157                 "Man Overboard", 
 158                 "Marina", 
 159                 "Medical Facility", 
 160                 "Mile Marker", 
 161                 "Military", 
 162                 "Mine", 
 163                 "Movie Theater", 
 164                 "Museum", 
 165                 "Navaid, Amber", 
 166                 "Navaid, Black", 
 167                 "Navaid, Blue", 
 168                 "Navaid, Green", 
 169                 "Navaid, Green/Red", 
 170                 "Navaid, Green/White", 
 171                 "Navaid, Orange", 
 172                 "Navaid, Red", 
 173                 "Navaid, Red/Green", 
 174                 "Navaid, Red/White", 
 175                 "Navaid, Violet", 
 176                 "Navaid, White", 
 177                 "Navaid, White/Green", 
 178                 "Navaid, White/Red", 
 179                 "Oil Field", 
 180                 "Parachute Area", 
 181                 "Park", 
 182                 "Parking Area", 
 183                 "Pharmacy", 
 184                 "Picnic Area", 
 185                 "Pizza", 
 186                 "Police Station", 
 187                 "Post Office", 
 188                 "Private Field", 
 189                 "Radio Beacon", 
 190                 "Red Diamond", 
 191                 "Red Square", 
 192                 "Residence", 
 193                 "Restaurant", 
 194                 "Restricted Area", 
 195                 "Restroom", 
 196                 "RV Park", 
 197                 "Scales", 
 198                 "Scenic Area", 
 199                 "School", 
 200                 "Seaplane Base", 
 201                 "Shipwreck", 
 202                 "Shopping Center", 
 203                 "Short Tower", 
 204                 "Shower", 
 205                 "Skiing Area", 
 206                 "Skull and Crossbones", 
 207                 "Soft Field", 
 208                 "Stadium", 
 209                 "Summit", 
 210                 "Swimming Area", 
 211                 "Tall Tower", 
 212                 "Telephone", 
 213                 "Toll Booth", 
 214                 "TracBack Point", 
 215                 "Trail Head", 
 216                 "Truck Stop", 
 217                 "Tunnel", 
 218                 "Ultralight Area", 
 219                 "Water Hydrant", 
 220                 "Waypoint", 
 221                 "White Buoy", 
 222                 "White Dot", 
 223                 "Zoo" 
 224                 ) 
225 226 # ----------------------------------------------------------------------------- 227 -class GIS(object):
228 """ 229 GeoSpatial functions 230 """ 231 232 # Used to disable location tree updates during prepopulate. 233 # It is not appropriate to use auth.override for this, as there are times 234 # (e.g. during tests) when auth.override is turned on, but location tree 235 # updates should still be enabled. 236 disable_update_location_tree = False 237
238 - def __init__(self):
239 messages = current.messages 240 #messages.centroid_error = str(A("Shapely", _href="http://pypi.python.org/pypi/Shapely/", _target="_blank")) + " library not found, so can't find centroid!" 241 messages.centroid_error = "Shapely library not functional, so can't find centroid! Install Geos & Shapely for Line/Polygon support" 242 messages.unknown_type = "Unknown Type!" 243 messages.invalid_wkt_point = "Invalid WKT: must be like POINT(3 4)" 244 messages.invalid_wkt = "Invalid WKT: see http://en.wikipedia.org/wiki/Well-known_text" 245 messages.lon_empty = "Invalid: Longitude can't be empty if Latitude specified!" 246 messages.lat_empty = "Invalid: Latitude can't be empty if Longitude specified!" 247 messages.unknown_parent = "Invalid: %(parent_id)s is not a known Location" 248 self.DEFAULT_SYMBOL = "White Dot" 249 self.hierarchy_level_keys = ("L0", "L1", "L2", "L3", "L4", "L5") 250 self.hierarchy_levels = {} 251 self.max_allowed_level_num = 4 252 253 self.relevant_hierarchy_levels = None
254 255 #self.google_geocode_retry = True 256 257 # ------------------------------------------------------------------------- 258 @staticmethod
259 - def gps_symbols():
260 return GPS_SYMBOLS
261 262 # -------------------------------------------------------------------------
263 - def download_kml(self, record_id, filename, session_id_name, session_id):
264 """ 265 Download a KML file: 266 - unzip it if-required 267 - follow NetworkLinks recursively if-required 268 269 Save the file to the /uploads folder 270 271 Designed to be called asynchronously using: 272 current.s3task.async("download_kml", [record_id, filename]) 273 274 @param record_id: id of the record in db.gis_layer_kml 275 @param filename: name to save the file as 276 @param session_id_name: name of the session 277 @param session_id: id of the session 278 279 @ToDo: Pass error messages to Result & have JavaScript listen for these 280 """ 281 282 table = current.s3db.gis_layer_kml 283 record = current.db(table.id == record_id).select(table.url, 284 limitby=(0, 1) 285 ).first() 286 url = record.url 287 288 filepath = os.path.join(global_settings.applications_parent, 289 current.request.folder, 290 "uploads", 291 "gis_cache", 292 filename) 293 294 warning = self.fetch_kml(url, filepath, session_id_name, session_id) 295 296 # @ToDo: Handle errors 297 #query = (cachetable.name == name) 298 if "URLError" in warning or "HTTPError" in warning: 299 # URL inaccessible 300 if os.access(filepath, os.R_OK): 301 statinfo = os.stat(filepath) 302 if statinfo.st_size: 303 # Use cached version 304 #date = db(query).select(cachetable.modified_on, 305 # limitby=(0, 1)).first().modified_on 306 #response.warning += "%s %s %s\n" % (url, 307 # T("not accessible - using cached version from"), 308 # str(date)) 309 #url = URL(c="default", f="download", 310 # args=[filename]) 311 pass 312 else: 313 # 0k file is all that is available 314 #response.warning += "%s %s\n" % (url, 315 # T("not accessible - no cached version available!")) 316 # skip layer 317 return 318 else: 319 # No cached version available 320 #response.warning += "%s %s\n" % (url, 321 # T("not accessible - no cached version available!")) 322 # skip layer 323 return 324 else: 325 # Download was succesful 326 #db(query).update(modified_on=request.utcnow) 327 if "ParseError" in warning: 328 # @ToDo Parse detail 329 #response.warning += "%s: %s %s\n" % (T("Layer"), 330 # name, 331 # T("couldn't be parsed so NetworkLinks not followed.")) 332 pass 333 if "GroundOverlay" in warning or "ScreenOverlay" in warning: 334 #response.warning += "%s: %s %s\n" % (T("Layer"), 335 # name, 336 # T("includes a GroundOverlay or ScreenOverlay which aren't supported in OpenLayers yet, so it may not work properly.")) 337 # Code to support GroundOverlay: 338 # https://github.com/openlayers/openlayers/pull/759 339 pass
340 341 # -------------------------------------------------------------------------
342 - def fetch_kml(self, url, filepath, session_id_name, session_id):
343 """ 344 Fetch a KML file: 345 - unzip it if-required 346 - follow NetworkLinks recursively if-required 347 348 Returns a file object 349 350 Designed as a helper function for download_kml() 351 """ 352 353 from gluon.tools import fetch 354 355 response = current.response 356 public_url = current.deployment_settings.get_base_public_url() 357 358 warning = "" 359 360 local = False 361 if not url.startswith("http"): 362 local = True 363 url = "%s%s" % (public_url, url) 364 elif len(url) > len(public_url) and url[:len(public_url)] == public_url: 365 local = True 366 if local: 367 # Keep Session for local URLs 368 import Cookie 369 cookie = Cookie.SimpleCookie() 370 cookie[session_id_name] = session_id 371 # For sync connections 372 current.session._unlock(response) 373 try: 374 file = fetch(url, cookie=cookie) 375 except urllib2.URLError: 376 warning = "URLError" 377 return warning 378 except urllib2.HTTPError: 379 warning = "HTTPError" 380 return warning 381 else: 382 try: 383 file = fetch(url) 384 except urllib2.URLError: 385 warning = "URLError" 386 return warning 387 except urllib2.HTTPError: 388 warning = "HTTPError" 389 return warning 390 391 filenames = [] 392 if file[:2] == "PK": 393 # Unzip 394 fp = StringIO(file) 395 import zipfile 396 myfile = zipfile.ZipFile(fp) 397 files = myfile.infolist() 398 main = None 399 candidates = [] 400 for _file in files: 401 filename = _file.filename 402 if filename == "doc.kml": 403 main = filename 404 elif filename[-4:] == ".kml": 405 candidates.append(filename) 406 if not main: 407 if candidates: 408 # Any better way than this to guess which KML file is the main one? 409 main = candidates[0] 410 else: 411 response.error = "KMZ contains no KML Files!" 412 return "" 413 # Write files to cache (other than the main one) 414 request = current.request 415 path = os.path.join(request.folder, "static", "cache", "kml") 416 if not os.path.exists(path): 417 os.makedirs(path) 418 for _file in files: 419 filename = _file.filename 420 if filename != main: 421 if "/" in filename: 422 _filename = filename.split("/") 423 dir = os.path.join(path, _filename[0]) 424 if not os.path.exists(dir): 425 os.mkdir(dir) 426 _filepath = os.path.join(path, *_filename) 427 else: 428 _filepath = os.path.join(path, filename) 429 430 try: 431 f = open(_filepath, "wb") 432 except: 433 # Trying to write the Folder 434 pass 435 else: 436 filenames.append(filename) 437 __file = myfile.read(filename) 438 f.write(__file) 439 f.close() 440 441 # Now read the main one (to parse) 442 file = myfile.read(main) 443 myfile.close() 444 445 # Check for NetworkLink 446 if "<NetworkLink>" in file: 447 try: 448 # Remove extraneous whitespace 449 parser = etree.XMLParser(recover=True, remove_blank_text=True) 450 tree = etree.XML(file, parser) 451 # Find contents of href tag (must be a better way?) 452 url = "" 453 for element in tree.iter(): 454 if element.tag == "{%s}href" % KML_NAMESPACE: 455 url = element.text 456 if url: 457 # Follow NetworkLink (synchronously) 458 warning2 = self.fetch_kml(url, filepath) 459 warning += warning2 460 except (etree.XMLSyntaxError,): 461 e = sys.exc_info()[1] 462 warning += "<ParseError>%s %s</ParseError>" % (e.line, e.errormsg) 463 464 # Check for Overlays 465 if "<GroundOverlay>" in file: 466 warning += "GroundOverlay" 467 if "<ScreenOverlay>" in file: 468 warning += "ScreenOverlay" 469 470 for filename in filenames: 471 replace = "%s/%s" % (URL(c="static", f="cache", args=["kml"]), 472 filename) 473 # Rewrite all references to point to the correct place 474 # need to catch <Icon><href> (which could be done via lxml) 475 # & also <description><![CDATA[<img src=" (which can't) 476 file = file.replace(filename, replace) 477 478 # Write main file to cache 479 f = open(filepath, "w") 480 f.write(file) 481 f.close() 482 483 return warning
484 485 # ------------------------------------------------------------------------- 486 @staticmethod
487 - def geocode(address, postcode=None, Lx_ids=None, geocoder=None):
488 """ 489 Geocode an Address 490 - used by S3LocationSelector 491 settings.get_gis_geocode_imported_addresses 492 493 @param address: street address 494 @param postcode: postcode 495 @param Lx_ids: list of ancestor IDs 496 @param geocoder: which geocoder service to use 497 """ 498 499 try: 500 from geopy import geocoders 501 except ImportError: 502 current.log.error("S3GIS unresolved dependency: geopy required for Geocoder support") 503 return "S3GIS unresolved dependency: geopy required for Geocoder support" 504 505 settings = current.deployment_settings 506 if geocoder is None: 507 geocoder = settings.get_gis_geocode_service() 508 509 if geocoder == "nominatim": 510 g = geocoders.Nominatim(user_agent = "Sahana Eden") 511 geocode_ = lambda names, g=g, **kwargs: g.geocode(names, **kwargs) 512 elif geocoder == "google": 513 api_key = settings.get_gis_api_google() 514 if not api_key: 515 current.log.error("Geocoder: No API Key") 516 return "No API Key" 517 g = geocoders.GoogleV3(api_key = api_key) 518 #if current.gis.google_geocode_retry: 519 # # Retry when reaching maximum requests per second 520 # import time 521 # from geopy.geocoders.googlev3 import GTooManyQueriesError 522 # def geocode_(names, g=g, **kwargs): 523 # attempts = 0 524 # while attempts < 3: 525 # try: 526 # result = g.geocode(names, **kwargs) 527 # except GTooManyQueriesError: 528 # if attempts == 2: 529 # # Daily limit reached 530 # current.gis.google_geocode_retry = False 531 # raise 532 # time.sleep(1) 533 # else: 534 # break 535 # attempts += 1 536 # return result 537 #else: 538 geocode_ = lambda names, g=g, **kwargs: g.geocode(names, **kwargs) 539 else: 540 # @ToDo 541 raise NotImplementedError 542 543 location = address 544 if postcode: 545 location = "%s,%s" % (location, postcode) 546 547 Lx = L5 = L4 = L3 = L2 = L1 = L0 = None 548 if Lx_ids: 549 # Convert Lx IDs to Names 550 table = current.s3db.gis_location 551 limit = len(Lx_ids) 552 if limit > 1: 553 query = (table.id.belongs(Lx_ids)) 554 else: 555 query = (table.id == Lx_ids[0]) 556 db = current.db 557 Lx = db(query).select(table.id, 558 table.name, 559 table.level, 560 table.gis_feature_type, 561 # Better as separate query 562 #table.lon_min, 563 #table.lat_min, 564 #table.lon_max, 565 #table.lat_max, 566 # Better as separate query 567 #table.wkt, 568 limitby=(0, limit), 569 orderby=~table.level 570 ) 571 if Lx: 572 Lx_names = ",".join([l.name for l in Lx]) 573 location = "%s,%s" % (location, Lx_names) 574 for l in Lx: 575 if l.level == "L0": 576 L0 = l.id 577 continue 578 elif l.level == "L1": 579 L1 = l.id 580 continue 581 elif l.level == "L2": 582 L2 = l.id 583 continue 584 elif l.level == "L3": 585 L3 = l.id 586 continue 587 elif l.level == "L4": 588 L4 = l.id 589 continue 590 elif l.level == "L5": 591 L5 = l.id 592 Lx = Lx.as_dict() 593 594 try: 595 results = geocode_(location, exactly_one=False) 596 except: 597 error = sys.exc_info()[1] 598 output = str(error) 599 else: 600 if results is None: 601 output = "No results found" 602 elif len(results) > 1: 603 output = "Multiple results found" 604 # @ToDo: Iterate through the results to see if just 1 is within the right bounds 605 else: 606 place, (lat, lon) = results[0] 607 if Lx: 608 output = None 609 # Check Results are for a specific address & not just that for the City 610 results = geocode_(Lx_names, exactly_one=False) 611 if not results: 612 output = "Can't check that these results are specific enough" 613 for result in results: 614 place2, (lat2, lon2) = result 615 if place == place2: 616 output = "We can only geocode to the Lx" 617 break 618 if not output: 619 # Check Results are within relevant bounds 620 L0_row = None 621 wkt = None 622 if L5 and Lx[L5]["gis_feature_type"] != 1: 623 wkt = db(table.id == L5).select(table.wkt, 624 limitby=(0, 1) 625 ).first().wkt 626 used_Lx = L5 627 elif L4 and Lx[L4]["gis_feature_type"] != 1: 628 wkt = db(table.id == L4).select(table.wkt, 629 limitby=(0, 1) 630 ).first().wkt 631 used_Lx = L4 632 elif L3 and Lx[L3]["gis_feature_type"] != 1: 633 wkt = db(table.id == L3).select(table.wkt, 634 limitby=(0, 1) 635 ).first().wkt 636 used_Lx = L3 637 elif L2 and Lx[L2]["gis_feature_type"] != 1: 638 wkt = db(table.id == L2).select(table.wkt, 639 limitby=(0, 1) 640 ).first().wkt 641 used_Lx = L2 642 elif L1 and Lx[L1]["gis_feature_type"] != 1: 643 wkt = db(table.id == L1).select(table.wkt, 644 limitby=(0, 1) 645 ).first().wkt 646 used_Lx = L1 647 elif L0: 648 L0_row = db(table.id == L0).select(table.wkt, 649 table.lon_min, 650 table.lat_min, 651 table.lon_max, 652 table.lat_max, 653 limitby=(0, 1) 654 ).first() 655 if not L0_row.wkt.startswith("POI"): # Point 656 wkt = L0_row.wkt 657 used_Lx = L0 658 if wkt: 659 from shapely.geometry import point 660 from shapely.wkt import loads as wkt_loads 661 try: 662 # Enable C-based speedups available from 1.2.10+ 663 from shapely import speedups 664 speedups.enable() 665 except: 666 current.log.info("S3GIS", 667 "Upgrade Shapely for Performance enhancements") 668 test = point.Point(lon, lat) 669 shape = wkt_loads(wkt) 670 ok = test.intersects(shape) 671 if not ok: 672 output = "Returned value not within %s" % Lx[used_Lx]["name"] 673 elif L0: 674 # Check within country at least 675 if not L0_row: 676 L0_row = db(table.id == L0).select(table.lon_min, 677 table.lat_min, 678 table.lon_max, 679 table.lat_max, 680 limitby=(0, 1) 681 ).first() 682 if lat < L0_row["lat_max"] and \ 683 lat > L0_row["lat_min"] and \ 684 lon < L0_row["lon_max"] and \ 685 lon > L0_row["lon_min"]: 686 ok = True 687 else: 688 ok = False 689 output = "Returned value not within %s" % Lx["name"] 690 else: 691 # We'll just have to trust it! 692 ok = True 693 if ok: 694 output = {"lat": lat, 695 "lon": lon, 696 } 697 else: 698 # We'll just have to trust it! 699 output = {"lat": lat, 700 "lon": lon, 701 } 702 703 return output
704 705 # ------------------------------------------------------------------------- 706 @staticmethod
707 - def geocode_r(lat, lon):
708 """ 709 Reverse Geocode a Lat/Lon 710 - used by S3LocationSelector 711 """ 712 713 if not lat or not lon: 714 return "Need Lat & Lon" 715 716 results = "" 717 # Check vaguely valid 718 try: 719 lat = float(lat) 720 except ValueError: 721 results = "Latitude is Invalid!" 722 try: 723 lon = float(lon) 724 except ValueError: 725 results += "Longitude is Invalid!" 726 727 if not results: 728 if lon > 180 or lon < -180: 729 results = "Longitude must be between -180 & 180!" 730 elif lat > 90 or lat < -90: 731 results = "Latitude must be between -90 & 90!" 732 else: 733 table = current.s3db.gis_location 734 query = (table.level != None) & \ 735 (table.deleted != True) 736 if current.deployment_settings.get_gis_spatialdb(): 737 point = "POINT(%s %s)" % (lon, lat) 738 query &= (table.the_geom.st_intersects(point)) 739 rows = current.db(query).select(table.id, 740 table.level, 741 ) 742 results = {} 743 for row in rows: 744 results[row.level] = row.id 745 else: 746 # Oh dear, this is going to be slow :/ 747 # Filter to the BBOX initially 748 query &= (table.lat_min < lat) & \ 749 (table.lat_max > lat) & \ 750 (table.lon_min < lon) & \ 751 (table.lon_max > lon) 752 rows = current.db(query).select(table.id, 753 table.level, 754 table.wkt, 755 ) 756 from shapely.geometry import point 757 from shapely.wkt import loads as wkt_loads 758 test = point.Point(lon, lat) 759 results = {} 760 for row in rows: 761 shape = wkt_loads(row.wkt) 762 ok = test.intersects(shape) 763 if ok: 764 #sys.stderr.write("Level: %s, id: %s\n" % (row.level, row.id)) 765 results[row.level] = row.id 766 return results
767 768 # ------------------------------------------------------------------------- 769 @staticmethod
770 - def get_bearing(lat_start, lon_start, lat_end, lon_end):
771 """ 772 Given a Start & End set of Coordinates, return a Bearing 773 Formula from: http://www.movable-type.co.uk/scripts/latlong.html 774 """ 775 776 import math 777 778 # shortcuts 779 cos = math.cos 780 sin = math.sin 781 782 delta_lon = lon_start - lon_end 783 bearing = math.atan2(sin(delta_lon) * cos(lat_end), 784 (cos(lat_start) * sin(lat_end)) - \ 785 (sin(lat_start) * cos(lat_end) * cos(delta_lon)) 786 ) 787 # Convert to a compass bearing 788 bearing = (bearing + 360) % 360 789 790 return bearing
791 792 # -------------------------------------------------------------------------
793 - def get_bounds(self, 794 features = None, 795 bbox_min_size = None, 796 bbox_inset = None):
797 """ 798 Calculate the Bounds of a list of Point Features, suitable for 799 setting map bounds. If no features are supplied, the current map 800 configuration bounds will be returned. 801 e.g. When a map is displayed that focuses on a collection of points, 802 the map is zoomed to show just the region bounding the points. 803 e.g. To use in GPX export for correct zooming 804 ` 805 Ensure a minimum size of bounding box, and that the points 806 are inset from the border. 807 808 @param features: A list of point features 809 @param bbox_min_size: Minimum bounding box - gives a minimum width 810 and height in degrees for the region shown. 811 Without this, a map showing a single point would not show any 812 extent around that point. 813 @param bbox_inset: Bounding box insets - adds a small amount of 814 distance outside the points. 815 Without this, the outermost points would be on the bounding 816 box, and might not be visible. 817 @return: An appropriate map bounding box, as a dict: 818 dict(lon_min=lon_min, lat_min=lat_min, 819 lon_max=lon_max, lat_max=lat_max) 820 821 @ToDo: Support Polygons (separate function?) 822 """ 823 824 if features: 825 826 lon_min = 180 827 lat_min = 90 828 lon_max = -180 829 lat_max = -90 830 831 # Is this a simple feature set or the result of a join? 832 try: 833 lon = features[0].lon 834 simple = True 835 except (AttributeError, KeyError): 836 simple = False 837 838 # @ToDo: Optimised Geospatial routines rather than this crude hack 839 for feature in features: 840 841 try: 842 if simple: 843 lon = feature.lon 844 lat = feature.lat 845 else: 846 # A Join 847 lon = feature.gis_location.lon 848 lat = feature.gis_location.lat 849 except AttributeError: 850 # Skip any rows without the necessary lat/lon fields 851 continue 852 853 # Also skip those set to None. Note must use explicit test, 854 # as zero is a legal value. 855 if lon is None or lat is None: 856 continue 857 858 lon_min = min(lon, lon_min) 859 lat_min = min(lat, lat_min) 860 lon_max = max(lon, lon_max) 861 lat_max = max(lat, lat_max) 862 863 # Assure a reasonable-sized box. 864 settings = current.deployment_settings 865 bbox_min_size = bbox_min_size or settings.get_gis_bbox_inset() 866 delta_lon = (bbox_min_size - (lon_max - lon_min)) / 2.0 867 if delta_lon > 0: 868 lon_min -= delta_lon 869 lon_max += delta_lon 870 delta_lat = (bbox_min_size - (lat_max - lat_min)) / 2.0 871 if delta_lat > 0: 872 lat_min -= delta_lat 873 lat_max += delta_lat 874 875 # Move bounds outward by specified inset. 876 bbox_inset = bbox_inset or settings.get_gis_bbox_inset() 877 lon_min -= bbox_inset 878 lon_max += bbox_inset 879 lat_min -= bbox_inset 880 lat_max += bbox_inset 881 882 else: 883 # no features 884 config = GIS.get_config() 885 if config.lat_min is not None: 886 lat_min = config.lat_min 887 else: 888 lat_min = -90 889 if config.lon_min is not None: 890 lon_min = config.lon_min 891 else: 892 lon_min = -180 893 if config.lat_max is not None: 894 lat_max = config.lat_max 895 else: 896 lat_max = 90 897 if config.lon_max is not None: 898 lon_max = config.lon_max 899 else: 900 lon_max = 180 901 902 return {"lon_min": lon_min, 903 "lat_min": lat_min, 904 "lon_max": lon_max, 905 "lat_max": lat_max, 906 }
907 908 # -------------------------------------------------------------------------
909 - def get_parent_bounds(self, parent=None):
910 """ 911 Get bounds from the specified (parent) location and its ancestors. 912 This is used to validate lat, lon, and bounds for child locations. 913 914 Caution: This calls update_location_tree if the parent bounds are 915 not set. During prepopulate, update_location_tree is disabled, 916 so unless the parent contains its own bounds (i.e. they do not need 917 to be propagated down from its ancestors), this will not provide a 918 check on location nesting. Prepopulate data should be prepared to 919 be correct. A set of candidate prepopulate data can be tested by 920 importing after prepopulate is run. 921 922 @param parent: A location_id to provide bounds suitable 923 for validating child locations 924 @return: bounding box and parent location name, as a list: 925 [lat_min, lon_min, lat_max, lon_max, parent_name] 926 927 @ToDo: Support Polygons (separate function?) 928 """ 929 930 table = current.s3db.gis_location 931 db = current.db 932 parent = db(table.id == parent).select(table.id, 933 table.level, 934 table.name, 935 table.parent, 936 table.path, 937 table.lon, 938 table.lat, 939 table.lon_min, 940 table.lat_min, 941 table.lon_max, 942 table.lat_max).first() 943 if parent.lon_min is None or \ 944 parent.lon_max is None or \ 945 parent.lat_min is None or \ 946 parent.lat_max is None or \ 947 parent.lon_min == parent.lon_max or \ 948 parent.lat_min == parent.lat_max: 949 # This is unsuitable - try higher parent 950 if parent.level == "L1": 951 if parent.parent: 952 # We can trust that L0 should have the data from prepop 953 L0 = db(table.id == parent.parent).select(table.name, 954 table.lon_min, 955 table.lat_min, 956 table.lon_max, 957 table.lat_max).first() 958 return L0.lat_min, L0.lon_min, L0.lat_max, L0.lon_max, L0.name 959 if parent.path: 960 path = parent.path 961 else: 962 # This will return None during prepopulate. 963 path = GIS.update_location_tree({"id": parent.id, 964 "level": parent.level, 965 }) 966 if path: 967 path_list = map(int, path.split("/")) 968 rows = db(table.id.belongs(path_list)).select(table.level, 969 table.name, 970 table.lat, 971 table.lon, 972 table.lon_min, 973 table.lat_min, 974 table.lon_max, 975 table.lat_max, 976 orderby=table.level) 977 row_list = rows.as_list() 978 row_list.reverse() 979 ok = False 980 for row in row_list: 981 if row["lon_min"] is not None and row["lon_max"] is not None and \ 982 row["lat_min"] is not None and row["lat_max"] is not None and \ 983 row["lon"] != row["lon_min"] != row["lon_max"] and \ 984 row["lat"] != row["lat_min"] != row["lat_max"]: 985 ok = True 986 break 987 988 if ok: 989 # This level is suitable 990 return row["lat_min"], row["lon_min"], row["lat_max"], row["lon_max"], row["name"] 991 992 else: 993 # This level is suitable 994 return parent.lat_min, parent.lon_min, parent.lat_max, parent.lon_max, parent.name 995 996 # No ancestor bounds available -- use the active gis_config. 997 config = GIS.get_config() 998 if config: 999 return config.lat_min, config.lon_min, config.lat_max, config.lon_max, None 1000 1001 # Last resort -- fall back to no restriction. 1002 return -90, -180, 90, 180, None
1003 1004 # ------------------------------------------------------------------------- 1005 @staticmethod
1006 - def _lookup_parent_path(feature_id):
1007 """ 1008 Helper that gets parent and path for a location. 1009 """ 1010 1011 db = current.db 1012 table = db.gis_location 1013 feature = db(table.id == feature_id).select(table.id, 1014 table.name, 1015 table.level, 1016 table.path, 1017 table.parent, 1018 limitby=(0, 1)).first() 1019 1020 return feature
1021 1022 # ------------------------------------------------------------------------- 1023 @staticmethod
1024 - def get_children(id, level=None):
1025 """ 1026 Return a list of IDs of all GIS Features which are children of 1027 the requested feature, using Materialized path for retrieving 1028 the children 1029 1030 This has been chosen over Modified Preorder Tree Traversal for 1031 greater efficiency: 1032 http://eden.sahanafoundation.org/wiki/HaitiGISToDo#HierarchicalTrees 1033 1034 @param: level - optionally filter by level 1035 1036 @return: Rows object containing IDs & Names 1037 Note: This does NOT include the parent location itself 1038 """ 1039 1040 db = current.db 1041 try: 1042 table = db.gis_location 1043 except: 1044 # Being run from CLI for debugging 1045 table = current.s3db.gis_location 1046 query = (table.deleted == False) 1047 if level: 1048 query &= (table.level == level) 1049 term = str(id) 1050 path = table.path 1051 query &= ((path.like(term + "/%")) | \ 1052 (path.like("%/" + term + "/%"))) 1053 children = db(query).select(table.id, 1054 table.name) 1055 return children
1056 1057 # ------------------------------------------------------------------------- 1058 @staticmethod
1059 - def get_parents(feature_id, feature=None, ids_only=False):
1060 """ 1061 Returns a list containing ancestors of the requested feature. 1062 1063 If the caller already has the location row, including path and 1064 parent fields, they can supply it via feature to avoid a db lookup. 1065 1066 If ids_only is false, each element in the list is a gluon.sql.Row 1067 containing the gis_location record of an ancestor of the specified 1068 location. 1069 1070 If ids_only is true, just returns a list of ids of the parents. 1071 This avoids a db lookup for the parents if the specified feature 1072 has a path. 1073 1074 List elements are in the opposite order as the location path and 1075 exclude the specified location itself, i.e. element 0 is the parent 1076 and the last element is the most distant ancestor. 1077 1078 Assists lazy update of a database without location paths by calling 1079 update_location_tree to get the path. 1080 1081 Note that during prepopulate, update_location_tree is disabled, 1082 in which case this will only return the immediate parent. 1083 """ 1084 1085 if not feature or "path" not in feature or "parent" not in feature: 1086 feature = GIS._lookup_parent_path(feature_id) 1087 1088 if feature and (feature.path or feature.parent): 1089 if feature.path: 1090 path = feature.path 1091 else: 1092 path = GIS.update_location_tree(feature) 1093 1094 if path: 1095 path_list = map(int, path.split("/")) 1096 if len(path_list) == 1: 1097 # No parents - path contains only this feature. 1098 return None 1099 # Get only ancestors 1100 path_list = path_list[:-1] 1101 # Get path in the desired -- reversed -- order. 1102 path_list.reverse() 1103 elif feature.parent: 1104 path_list = [feature.parent] 1105 else: 1106 return None 1107 1108 # If only ids are wanted, stop here. 1109 if ids_only: 1110 return path_list 1111 1112 # Retrieve parents - order in which they're returned is arbitrary. 1113 s3db = current.s3db 1114 table = s3db.gis_location 1115 query = (table.id.belongs(path_list)) 1116 fields = [table.id, table.name, table.level, table.lat, table.lon] 1117 unordered_parents = current.db(query).select(cache=s3db.cache, 1118 *fields) 1119 1120 # Reorder parents in order of reversed path. 1121 unordered_ids = [row.id for row in unordered_parents] 1122 parents = [unordered_parents[unordered_ids.index(path_id)] 1123 for path_id in path_list if path_id in unordered_ids] 1124 1125 return parents 1126 1127 else: 1128 return None
1129 1130 # -------------------------------------------------------------------------
1131 - def get_parent_per_level(self, results, feature_id, 1132 feature=None, 1133 ids=True, 1134 names=True):
1135 """ 1136 Adds ancestor of requested feature for each level to supplied dict. 1137 1138 If the caller already has the location row, including path and 1139 parent fields, they can supply it via feature to avoid a db lookup. 1140 1141 If a dict is not supplied in results, one is created. The results 1142 dict is returned in either case. 1143 1144 If ids=True and names=False (used by old S3LocationSelectorWidget): 1145 For each ancestor, an entry is added to results, like 1146 ancestor.level : ancestor.id 1147 1148 If ids=False and names=True (used by address_onvalidation): 1149 For each ancestor, an entry is added to results, like 1150 ancestor.level : ancestor.name 1151 1152 If ids=True and names=True (used by new S3LocationSelectorWidget): 1153 For each ancestor, an entry is added to results, like 1154 ancestor.level : {name : ancestor.name, id: ancestor.id} 1155 """ 1156 1157 if not results: 1158 results = {} 1159 1160 _id = feature_id 1161 # if we don't have a feature or a feature ID return the dict as-is 1162 if not feature_id and not feature: 1163 return results 1164 if not feature_id and "path" not in feature and "parent" in feature: 1165 # gis_location_onvalidation on a Create => no ID yet 1166 # Read the Parent's path instead 1167 feature = self._lookup_parent_path(feature.parent) 1168 _id = feature.id 1169 elif not feature or "path" not in feature or "parent" not in feature: 1170 feature = self._lookup_parent_path(feature_id) 1171 1172 if feature and (feature.path or feature.parent): 1173 if feature.path: 1174 path = feature.path 1175 else: 1176 path = self.update_location_tree(feature) 1177 1178 # Get ids of ancestors at each level. 1179 if feature.parent: 1180 strict = self.get_strict_hierarchy(feature.parent) 1181 else: 1182 strict = self.get_strict_hierarchy(_id) 1183 if path and strict and not names: 1184 # No need to do a db lookup for parents in this case -- we 1185 # know the levels of the parents from their position in path. 1186 # Note ids returned from db are ints, not strings, so be 1187 # consistent with that. 1188 path_ids = map(int, path.split("/")) 1189 # This skips the last path element, which is the supplied 1190 # location. 1191 for (i, _id) in enumerate(path_ids[:-1]): 1192 results["L%i" % i] = _id 1193 elif path: 1194 ancestors = self.get_parents(_id, feature=feature) 1195 if ancestors: 1196 for ancestor in ancestors: 1197 if ancestor.level and ancestor.level in self.hierarchy_level_keys: 1198 if names and ids: 1199 results[ancestor.level] = Storage() 1200 results[ancestor.level].name = ancestor.name 1201 results[ancestor.level].id = ancestor.id 1202 elif names: 1203 results[ancestor.level] = ancestor.name 1204 else: 1205 results[ancestor.level] = ancestor.id 1206 if not feature_id: 1207 # Add the Parent in (we only need the version required for gis_location onvalidation here) 1208 results[feature.level] = feature.name 1209 if names: 1210 # We need to have entries for all levels 1211 # (both for address onvalidation & new LocationSelector) 1212 hierarchy_level_keys = self.hierarchy_level_keys 1213 for key in hierarchy_level_keys: 1214 if not results.has_key(key): 1215 results[key] = None 1216 1217 return results
1218 1219 # -------------------------------------------------------------------------
1220 - def update_table_hierarchy_labels(self, tablename=None):
1221 """ 1222 Re-set table options that depend on location_hierarchy 1223 1224 Only update tables which are already defined 1225 """ 1226 1227 levels = ("L1", "L2", "L3", "L4", "L5") 1228 labels = self.get_location_hierarchy() 1229 1230 db = current.db 1231 if tablename and tablename in db: 1232 # Update the specific table which has just been defined 1233 table = db[tablename] 1234 if tablename == "gis_location": 1235 labels["L0"] = current.messages.COUNTRY 1236 table.level.requires = \ 1237 IS_EMPTY_OR(IS_IN_SET(labels)) 1238 else: 1239 for level in levels: 1240 table[level].label = labels[level] 1241 else: 1242 # Do all Tables which are already defined 1243 1244 # gis_location 1245 if "gis_location" in db: 1246 table = db.gis_location 1247 table.level.requires = \ 1248 IS_EMPTY_OR(IS_IN_SET(labels)) 1249 1250 # These tables store location hierarchy info for XSLT export. 1251 # Labels are used for PDF & XLS Reports 1252 tables = ["org_office", 1253 #"pr_person", 1254 "pr_address", 1255 "cr_shelter", 1256 "asset_asset", 1257 #"hms_hospital", 1258 ] 1259 1260 for tablename in tables: 1261 if tablename in db: 1262 table = db[tablename] 1263 for level in levels: 1264 table[level].label = labels[level]
1265 1266 # ------------------------------------------------------------------------- 1267 @staticmethod
1268 - def set_config(config_id=None, force_update_cache=False):
1269 """ 1270 Reads the specified GIS config from the DB, caches it in response. 1271 1272 Passing in a false or non-existent id will cause the personal config, 1273 if any, to be used, else the site config (uuid SITE_DEFAULT), else 1274 their fallback values defined in this class. 1275 1276 If force_update_cache is true, the config will be read and cached in 1277 response even if the specified config is the same as what's already 1278 cached. Used when the config was just written. 1279 1280 The config itself will be available in response.s3.gis.config. 1281 Scalar fields from the gis_config record and its linked 1282 gis_projection record have the same names as the fields in their 1283 tables and can be accessed as response.s3.gis.<fieldname>. 1284 1285 Returns the id of the config it actually used, if any. 1286 1287 @param: config_id. use '0' to set the SITE_DEFAULT 1288 1289 @ToDo: Merge configs for Event 1290 """ 1291 1292 _gis = current.response.s3.gis 1293 1294 # If an id has been supplied, try it first. If it matches what's in 1295 # response, there's no work to do. 1296 if config_id and not force_update_cache and \ 1297 _gis.config and \ 1298 _gis.config.id == config_id: 1299 return 1300 1301 db = current.db 1302 s3db = current.s3db 1303 ctable = s3db.gis_config 1304 mtable = s3db.gis_marker 1305 ptable = s3db.gis_projection 1306 stable = s3db.gis_style 1307 fields = (ctable.id, 1308 ctable.default_location_id, 1309 ctable.region_location_id, 1310 ctable.geocoder, 1311 ctable.lat_min, 1312 ctable.lat_max, 1313 ctable.lon_min, 1314 ctable.lon_max, 1315 ctable.zoom, 1316 ctable.lat, 1317 ctable.lon, 1318 ctable.pe_id, 1319 ctable.wmsbrowser_url, 1320 ctable.wmsbrowser_name, 1321 ctable.zoom_levels, 1322 ctable.merge, 1323 mtable.image, 1324 mtable.height, 1325 mtable.width, 1326 ptable.epsg, 1327 ptable.proj4js, 1328 ptable.maxExtent, 1329 ptable.units, 1330 ) 1331 1332 cache = Storage() 1333 row = None 1334 rows = None 1335 if config_id: 1336 # Merge this one with the Site Default 1337 query = (ctable.id == config_id) | \ 1338 (ctable.uuid == "SITE_DEFAULT") 1339 # May well not be complete, so Left Join 1340 left = (ptable.on(ptable.id == ctable.projection_id), 1341 stable.on((stable.config_id == ctable.id) & \ 1342 (stable.layer_id == None)), 1343 mtable.on(mtable.id == stable.marker_id), 1344 ) 1345 rows = db(query).select(*fields, 1346 left=left, 1347 orderby=ctable.pe_type, 1348 limitby=(0, 2)) 1349 if len(rows) == 1: 1350 # The requested config must be invalid, so just use site default 1351 row = rows.first() 1352 1353 elif config_id is 0: 1354 # Use site default 1355 query = (ctable.uuid == "SITE_DEFAULT") 1356 # May well not be complete, so Left Join 1357 left = (ptable.on(ptable.id == ctable.projection_id), 1358 stable.on((stable.config_id == ctable.id) & \ 1359 (stable.layer_id == None)), 1360 mtable.on(mtable.id == stable.marker_id), 1361 ) 1362 row = db(query).select(*fields, 1363 left=left, 1364 limitby=(0, 1)).first() 1365 if not row: 1366 # No configs found at all 1367 _gis.config = cache 1368 return cache 1369 1370 # If no id supplied, extend the site config with any personal or OU configs 1371 if not rows and not row: 1372 auth = current.auth 1373 if auth.is_logged_in(): 1374 # Read personalised config, if available. 1375 user = auth.user 1376 pe_id = user.get("pe_id") 1377 if pe_id: 1378 # Also look for OU configs 1379 pes = [] 1380 if user.organisation_id: 1381 # Add the user account's Org to the list 1382 # (Will take lower-priority than Personal) 1383 otable = s3db.org_organisation 1384 org = db(otable.id == user.organisation_id).select(otable.pe_id, 1385 limitby=(0, 1) 1386 ).first() 1387 try: 1388 pes.append(org.pe_id) 1389 except: 1390 current.log.warning("Unable to find Org %s" % user.organisation_id) 1391 if current.deployment_settings.get_org_branches(): 1392 # Also look for Parent Orgs 1393 ancestors = s3db.pr_get_ancestors(org.pe_id) 1394 pes += ancestors 1395 1396 if user.site_id: 1397 # Add the user account's Site to the list 1398 # (Will take lower-priority than Org/Personal) 1399 site_pe_id = s3db.pr_get_pe_id("org_site", user.site_id) 1400 if site_pe_id: 1401 pes.append(site_pe_id) 1402 1403 if user.org_group_id: 1404 # Add the user account's Org Group to the list 1405 # (Will take lower-priority than Site/Org/Personal) 1406 ogtable = s3db.org_group 1407 ogroup = db(ogtable.id == user.org_group_id).select(ogtable.pe_id, 1408 limitby=(0, 1) 1409 ).first() 1410 pes = list(pes) 1411 try: 1412 pes.append(ogroup.pe_id) 1413 except: 1414 current.log.warning("Unable to find Org Group %s" % user.org_group_id) 1415 1416 query = (ctable.uuid == "SITE_DEFAULT") | \ 1417 ((ctable.pe_id == pe_id) & \ 1418 (ctable.pe_default != False)) 1419 if len(pes) == 1: 1420 query |= (ctable.pe_id == pes[0]) 1421 else: 1422 query |= (ctable.pe_id.belongs(pes)) 1423 # Personal/OU may well not be complete, so Left Join 1424 left = (ptable.on(ptable.id == ctable.projection_id), 1425 stable.on((stable.config_id == ctable.id) & \ 1426 (stable.layer_id == None)), 1427 mtable.on(mtable.id == stable.marker_id), 1428 ) 1429 # Order by pe_type (defined in gis_config) 1430 # @ToDo: Sort orgs from the hierarchy? 1431 # (Currently we just have branch > non-branch in pe_type) 1432 rows = db(query).select(*fields, 1433 left=left, 1434 orderby=ctable.pe_type) 1435 if len(rows) == 1: 1436 row = rows.first() 1437 1438 if rows and not row: 1439 # Merge Configs 1440 merge = True 1441 cache["ids"] = [] 1442 for row in rows: 1443 if not merge: 1444 break 1445 config = row["gis_config"] 1446 if config.merge is False: # Backwards-compatibility 1447 merge = False 1448 if not config_id: 1449 config_id = config.id 1450 cache["ids"].append(config.id) 1451 for key in config: 1452 if key in ("delete_record", "gis_layer_config", "gis_menu", "update_record", "merge"): 1453 continue 1454 if key not in cache or cache[key] is None: 1455 cache[key] = config[key] 1456 if "epsg" not in cache or cache["epsg"] is None: 1457 projection = row["gis_projection"] 1458 for key in ["epsg", "units", "maxExtent", "proj4js"]: 1459 cache[key] = projection[key] if key in projection \ 1460 else None 1461 if "marker_image" not in cache or \ 1462 cache["marker_image"] is None: 1463 marker = row["gis_marker"] 1464 for key in ("image", "height", "width"): 1465 cache["marker_%s" % key] = marker[key] if key in marker \ 1466 else None 1467 # Add NULL values for any that aren't defined, to avoid KeyErrors 1468 for key in ("epsg", "units", "proj4js", "maxExtent", 1469 "marker_image", "marker_height", "marker_width", 1470 ): 1471 if key not in cache: 1472 cache[key] = None 1473 1474 if not row: 1475 # No personal config or not logged in. Use site default. 1476 query = (ctable.uuid == "SITE_DEFAULT") & \ 1477 (mtable.id == stable.marker_id) & \ 1478 (stable.config_id == ctable.id) & \ 1479 (stable.layer_id == None) & \ 1480 (ptable.id == ctable.projection_id) 1481 row = db(query).select(*fields, 1482 limitby=(0, 1)).first() 1483 1484 if not row: 1485 # No configs found at all 1486 _gis.config = cache 1487 return cache 1488 1489 if not cache: 1490 # We had a single row 1491 config = row["gis_config"] 1492 config_id = config.id 1493 cache["ids"] = [config_id] 1494 projection = row["gis_projection"] 1495 marker = row["gis_marker"] 1496 for key in config: 1497 cache[key] = config[key] 1498 for key in ("epsg", "maxExtent", "proj4js", "units"): 1499 cache[key] = projection[key] if key in projection else None 1500 for key in ("image", "height", "width"): 1501 cache["marker_%s" % key] = marker[key] if key in marker \ 1502 else None 1503 1504 # Store the values 1505 _gis.config = cache 1506 return cache
1507 1508 # ------------------------------------------------------------------------- 1509 @staticmethod
1510 - def get_config():
1511 """ 1512 Returns the current GIS config structure. 1513 1514 @ToDo: Config() class 1515 """ 1516 1517 _gis = current.response.s3.gis 1518 1519 if not _gis.config: 1520 # Ask set_config to put the appropriate config in response. 1521 if current.session.s3.gis_config_id: 1522 GIS.set_config(current.session.s3.gis_config_id) 1523 else: 1524 GIS.set_config() 1525 1526 return _gis.config
1527 1528 # -------------------------------------------------------------------------
1529 - def get_location_hierarchy(self, level=None, location=None):
1530 """ 1531 Returns the location hierarchy and it's labels 1532 1533 @param: level - a specific level for which to lookup the label 1534 @param: location - the location_id to lookup the location for 1535 currently only the actual location is supported 1536 @ToDo: Do a search of parents to allow this 1537 lookup for any location 1538 """ 1539 1540 _levels = self.hierarchy_levels 1541 _location = location 1542 1543 if not location and _levels: 1544 # Use cached value 1545 if level: 1546 if level in _levels: 1547 return _levels[level] 1548 else: 1549 return level 1550 else: 1551 return _levels 1552 1553 COUNTRY = current.messages.COUNTRY 1554 1555 if level == "L0": 1556 return COUNTRY 1557 1558 db = current.db 1559 s3db = current.s3db 1560 table = s3db.gis_hierarchy 1561 1562 fields = (table.uuid, 1563 table.L1, 1564 table.L2, 1565 table.L3, 1566 table.L4, 1567 table.L5, 1568 ) 1569 1570 query = (table.uuid == "SITE_DEFAULT") 1571 if not location: 1572 config = GIS.get_config() 1573 location = config.region_location_id 1574 if location: 1575 # Try the Region, but ensure we have the fallback available in a single query 1576 query = query | (table.location_id == location) 1577 rows = db(query).select(cache=s3db.cache, 1578 *fields) 1579 if len(rows) > 1: 1580 # Remove the Site Default 1581 _filter = lambda row: row.uuid == "SITE_DEFAULT" 1582 rows.exclude(_filter) 1583 elif not rows: 1584 # prepop hasn't run yet 1585 if level: 1586 return level 1587 levels = OrderedDict() 1588 hierarchy_level_keys = self.hierarchy_level_keys 1589 for key in hierarchy_level_keys: 1590 if key == "L0": 1591 levels[key] = COUNTRY 1592 else: 1593 levels[key] = key 1594 return levels 1595 1596 T = current.T 1597 row = rows.first() 1598 if level: 1599 try: 1600 return T(row[level]) 1601 except: 1602 return level 1603 else: 1604 levels = OrderedDict() 1605 hierarchy_level_keys = self.hierarchy_level_keys 1606 for key in hierarchy_level_keys: 1607 if key == "L0": 1608 levels[key] = COUNTRY 1609 elif key in row and row[key]: 1610 # Only include rows with values 1611 levels[key] = str(T(row[key])) 1612 if not _location: 1613 # Cache the value 1614 self.hierarchy_levels = levels 1615 if level: 1616 return levels[level] 1617 else: 1618 return levels
1619 1620 # -------------------------------------------------------------------------
1621 - def get_strict_hierarchy(self, location=None):
1622 """ 1623 Returns the strict hierarchy value from the current config. 1624 1625 @param: location - the location_id of the record to check 1626 """ 1627 1628 s3db = current.s3db 1629 table = s3db.gis_hierarchy 1630 1631 # Read the system default 1632 # @ToDo: Check for an active gis_config region? 1633 query = (table.uuid == "SITE_DEFAULT") 1634 if location: 1635 # Try the Location's Country, but ensure we have the fallback available in a single query 1636 query = query | (table.location_id == self.get_parent_country(location)) 1637 rows = current.db(query).select(table.uuid, 1638 table.strict_hierarchy, 1639 cache=s3db.cache) 1640 if len(rows) > 1: 1641 # Remove the Site Default 1642 _filter = lambda row: row.uuid == "SITE_DEFAULT" 1643 rows.exclude(_filter) 1644 row = rows.first() 1645 if row: 1646 strict = row.strict_hierarchy 1647 else: 1648 # Pre-pop hasn't run yet 1649 return False 1650 1651 return strict
1652 1653 # -------------------------------------------------------------------------
1654 - def get_max_hierarchy_level(self):
1655 """ 1656 Returns the deepest level key (i.e. Ln) in the current hierarchy. 1657 - used by gis_location_onvalidation() 1658 """ 1659 1660 location_hierarchy = self.get_location_hierarchy() 1661 return max(location_hierarchy)
1662 1663 # -------------------------------------------------------------------------
1664 - def get_all_current_levels(self, level=None):
1665 """ 1666 Get the current hierarchy levels plus non-hierarchy levels. 1667 """ 1668 1669 all_levels = OrderedDict() 1670 all_levels.update(self.get_location_hierarchy()) 1671 #T = current.T 1672 #all_levels["GR"] = T("Location Group") 1673 #all_levels["XX"] = T("Imported") 1674 1675 if level: 1676 try: 1677 return all_levels[level] 1678 except Exception, e: 1679 return level 1680 else: 1681 return all_levels
1682 1683 # -------------------------------------------------------------------------
1684 - def get_relevant_hierarchy_levels(self, as_dict=False):
1685 """ 1686 Get current location hierarchy levels relevant for the user 1687 """ 1688 1689 levels = self.relevant_hierarchy_levels 1690 1691 if not levels: 1692 levels = OrderedDict(self.get_location_hierarchy()) 1693 if len(current.deployment_settings.get_gis_countries()) == 1 or \ 1694 current.response.s3.gis.config.region_location_id: 1695 levels.pop("L0", None) 1696 self.relevant_hierarchy_levels = levels 1697 1698 if not as_dict: 1699 return levels.keys() 1700 else: 1701 return levels
1702 1703 # ------------------------------------------------------------------------- 1704 @staticmethod
1705 - def get_countries(key_type="id"):
1706 """ 1707 Returns country code or L0 location id versus name for all countries. 1708 1709 The lookup is cached in the session 1710 1711 If key_type is "code", these are returned as an OrderedDict with 1712 country code as the key. If key_type is "id", then the location id 1713 is the key. In all cases, the value is the name. 1714 """ 1715 1716 session = current.session 1717 if "gis" not in session: 1718 session.gis = Storage() 1719 gis = session.gis 1720 1721 if gis.countries_by_id: 1722 cached = True 1723 else: 1724 cached = False 1725 1726 if not cached: 1727 s3db = current.s3db 1728 table = s3db.gis_location 1729 ttable = s3db.gis_location_tag 1730 query = (table.level == "L0") & \ 1731 (ttable.tag == "ISO2") & \ 1732 (ttable.location_id == table.id) 1733 countries = current.db(query).select(table.id, 1734 table.name, 1735 ttable.value, 1736 orderby=table.name) 1737 if not countries: 1738 return [] 1739 1740 countries_by_id = OrderedDict() 1741 countries_by_code = OrderedDict() 1742 for row in countries: 1743 location = row["gis_location"] 1744 countries_by_id[location.id] = location.name 1745 countries_by_code[row["gis_location_tag"].value] = location.name 1746 1747 # Cache in the session 1748 gis.countries_by_id = countries_by_id 1749 gis.countries_by_code = countries_by_code 1750 1751 if key_type == "id": 1752 return countries_by_id 1753 else: 1754 return countries_by_code 1755 1756 elif key_type == "id": 1757 return gis.countries_by_id 1758 else: 1759 return gis.countries_by_code
1760 1761 # ------------------------------------------------------------------------- 1762 @staticmethod
1763 - def get_country(key, key_type="id"):
1764 """ 1765 Returns country name for given code or id from L0 locations. 1766 1767 The key can be either location id or country code, as specified 1768 by key_type. 1769 """ 1770 1771 if key: 1772 if current.gis.get_countries(key_type): 1773 if key_type == "id": 1774 return current.session.gis.countries_by_id[key] 1775 else: 1776 return current.session.gis.countries_by_code[key] 1777 1778 return None
1779 1780 # -------------------------------------------------------------------------
1781 - def get_parent_country(self, location, key_type="id"):
1782 """ 1783 Returns the parent country for a given record 1784 1785 @param: location: the location or id to search for 1786 @param: key_type: whether to return an id or code 1787 1788 @ToDo: Optimise to not use try/except 1789 """ 1790 1791 if not location: 1792 return None 1793 db = current.db 1794 s3db = current.s3db 1795 1796 # @ToDo: Avoid try/except here! 1797 # - separate parameters best as even isinstance is expensive 1798 try: 1799 # location is passed as integer (location_id) 1800 table = s3db.gis_location 1801 location = db(table.id == location).select(table.id, 1802 table.path, 1803 table.level, 1804 limitby=(0, 1), 1805 cache=s3db.cache).first() 1806 except: 1807 # location is passed as record 1808 pass 1809 1810 if location.level == "L0": 1811 if key_type == "id": 1812 return location.id 1813 elif key_type == "code": 1814 ttable = s3db.gis_location_tag 1815 query = (ttable.tag == "ISO2") & \ 1816 (ttable.location_id == location.id) 1817 tag = db(query).select(ttable.value, 1818 limitby=(0, 1)).first() 1819 try: 1820 return tag.value 1821 except: 1822 return None 1823 else: 1824 parents = self.get_parents(location.id, 1825 feature=location) 1826 if parents: 1827 for row in parents: 1828 if row.level == "L0": 1829 if key_type == "id": 1830 return row.id 1831 elif key_type == "code": 1832 ttable = s3db.gis_location_tag 1833 query = (ttable.tag == "ISO2") & \ 1834 (ttable.location_id == row.id) 1835 tag = db(query).select(ttable.value, 1836 limitby=(0, 1)).first() 1837 try: 1838 return tag.value 1839 except: 1840 return None 1841 return None
1842 1843 # -------------------------------------------------------------------------
1844 - def get_default_country(self, key_type="id"):
1845 """ 1846 Returns the default country for the active gis_config 1847 1848 @param: key_type: whether to return an id or code 1849 """ 1850 1851 config = GIS.get_config() 1852 1853 if config.default_location_id: 1854 return self.get_parent_country(config.default_location_id, 1855 key_type=key_type) 1856 1857 return None
1858 1859 # -------------------------------------------------------------------------
1860 - def get_features_in_polygon(self, location, tablename=None, category=None):
1861 """ 1862 Returns a gluon.sql.Rows of Features within a Polygon. 1863 The Polygon can be either a WKT string or the ID of a record in the 1864 gis_location table 1865 1866 Currently unused. 1867 @ToDo: Optimise to not use try/except 1868 """ 1869 1870 from shapely.geos import ReadingError 1871 from shapely.wkt import loads as wkt_loads 1872 1873 try: 1874 # Enable C-based speedups available from 1.2.10+ 1875 from shapely import speedups 1876 speedups.enable() 1877 except: 1878 current.log.info("S3GIS", 1879 "Upgrade Shapely for Performance enhancements") 1880 1881 db = current.db 1882 s3db = current.s3db 1883 locations = s3db.gis_location 1884 1885 try: 1886 location_id = int(location) 1887 # Check that the location is a polygon 1888 location = db(locations.id == location_id).select(locations.wkt, 1889 locations.lon_min, 1890 locations.lon_max, 1891 locations.lat_min, 1892 locations.lat_max, 1893 limitby=(0, 1) 1894 ).first() 1895 if location: 1896 wkt = location.wkt 1897 if wkt and (wkt.startswith("POLYGON") or \ 1898 wkt.startswith("MULTIPOLYGON")): 1899 # ok 1900 lon_min = location.lon_min 1901 lon_max = location.lon_max 1902 lat_min = location.lat_min 1903 lat_max = location.lat_max 1904 1905 else: 1906 current.log.error("Location searched within isn't a Polygon!") 1907 return None 1908 except: # @ToDo: need specific exception 1909 wkt = location 1910 if (wkt.startswith("POLYGON") or wkt.startswith("MULTIPOLYGON")): 1911 # ok 1912 lon_min = None 1913 else: 1914 current.log.error("This isn't a Polygon!") 1915 return None 1916 1917 try: 1918 polygon = wkt_loads(wkt) 1919 except: # @ToDo: need specific exception 1920 current.log.error("Invalid Polygon!") 1921 return None 1922 1923 table = s3db[tablename] 1924 1925 if "location_id" not in table.fields(): 1926 # @ToDo: Add any special cases to be able to find the linked location 1927 current.log.error("This table doesn't have a location_id!") 1928 return None 1929 1930 query = (table.location_id == locations.id) 1931 if "deleted" in table.fields: 1932 query &= (table.deleted == False) 1933 # @ToDo: Check AAA (do this as a resource filter?) 1934 1935 features = db(query).select(locations.wkt, 1936 locations.lat, 1937 locations.lon, 1938 table.ALL) 1939 output = Rows() 1940 # @ToDo: provide option to use PostGIS/Spatialite 1941 # settings = current.deployment_settings 1942 # if settings.gis.spatialdb and settings.database.db_type == "postgres": 1943 if lon_min is None: 1944 # We have no BBOX so go straight to the full geometry check 1945 for row in features: 1946 _location = row.gis_location 1947 wkt = _location.wkt 1948 if wkt is None: 1949 lat = _location.lat 1950 lon = _location.lon 1951 if lat is not None and lon is not None: 1952 wkt = self.latlon_to_wkt(lat, lon) 1953 else: 1954 continue 1955 try: 1956 shape = wkt_loads(wkt) 1957 if shape.intersects(polygon): 1958 # Save Record 1959 output.records.append(row) 1960 except ReadingError: 1961 current.log.error("Error reading wkt of location with id", 1962 value=row.id) 1963 else: 1964 # 1st check for Features included within the bbox (faster) 1965 def in_bbox(row): 1966 _location = row.gis_location 1967 return (_location.lon > lon_min) & \ 1968 (_location.lon < lon_max) & \ 1969 (_location.lat > lat_min) & \ 1970 (_location.lat < lat_max)
1971 for row in features.find(lambda row: in_bbox(row)): 1972 # Search within this subset with a full geometry check 1973 # Uses Shapely. 1974 _location = row.gis_location 1975 wkt = _location.wkt 1976 if wkt is None: 1977 lat = _location.lat 1978 lon = _location.lon 1979 if lat is not None and lon is not None: 1980 wkt = self.latlon_to_wkt(lat, lon) 1981 else: 1982 continue 1983 try: 1984 shape = wkt_loads(wkt) 1985 if shape.intersects(polygon): 1986 # Save Record 1987 output.records.append(row) 1988 except ReadingError: 1989 current.log.error("Error reading wkt of location with id", 1990 value = row.id) 1991 return output
1992 1993 # ------------------------------------------------------------------------- 1994 @staticmethod
1995 - def get_polygon_from_bounds(bbox):
1996 """ 1997 Given a gis_location record or a bounding box dict with keys 1998 lon_min, lon_max, lat_min, lat_max, construct a WKT polygon with 1999 points at the corners. 2000 """ 2001 2002 lon_min = bbox["lon_min"] 2003 lon_max = bbox["lon_max"] 2004 lat_min = bbox["lat_min"] 2005 lat_max = bbox["lat_max"] 2006 # Take the points in a counterclockwise direction. 2007 points = [(lon_min, lat_min), 2008 (lon_min, lat_max), 2009 (lon_max, lat_max), 2010 (lon_min, lat_max), 2011 (lon_min, lat_min)] 2012 pairs = ["%s %s" % (p[0], p[1]) for p in points] 2013 wkt = "POLYGON ((%s))" % ", ".join(pairs) 2014 return wkt
2015 2016 # ------------------------------------------------------------------------- 2017 @staticmethod
2018 - def get_bounds_from_radius(lat, lon, radius):
2019 """ 2020 Compute a bounding box given a Radius (in km) of a LatLon Location 2021 2022 Note the order of the parameters. 2023 2024 @return a dict containing the bounds with keys min_lon, max_lon, 2025 min_lat, max_lat 2026 2027 See: 2028 http://janmatuschek.de/LatitudeLongitudeBoundingCoordinates 2029 """ 2030 2031 import math 2032 2033 radians = math.radians 2034 degrees = math.degrees 2035 2036 MIN_LAT = radians(-90) # -PI/2 2037 MAX_LAT = radians(90) # PI/2 2038 MIN_LON = radians(-180) # -PI 2039 MAX_LON = radians(180) # PI 2040 2041 # Convert to radians for the calculation 2042 r = float(radius) / RADIUS_EARTH 2043 radLat = radians(lat) 2044 radLon = radians(lon) 2045 2046 # Calculate the bounding box 2047 minLat = radLat - r 2048 maxLat = radLat + r 2049 2050 if (minLat > MIN_LAT) and (maxLat < MAX_LAT): 2051 deltaLon = math.asin(math.sin(r) / math.cos(radLat)) 2052 minLon = radLon - deltaLon 2053 if (minLon < MIN_LON): 2054 minLon += 2 * math.pi 2055 maxLon = radLon + deltaLon 2056 if (maxLon > MAX_LON): 2057 maxLon -= 2 * math.pi 2058 else: 2059 # Special care for Poles & 180 Meridian: 2060 # http://janmatuschek.de/LatitudeLongitudeBoundingCoordinates#PolesAnd180thMeridian 2061 minLat = max(minLat, MIN_LAT) 2062 maxLat = min(maxLat, MAX_LAT) 2063 minLon = MIN_LON 2064 maxLon = MAX_LON 2065 2066 # Convert back to degrees 2067 minLat = degrees(minLat) 2068 minLon = degrees(minLon) 2069 maxLat = degrees(maxLat) 2070 maxLon = degrees(maxLon) 2071 2072 return {"lat_min": minLat, 2073 "lat_max": maxLat, 2074 "lon_min": minLon, 2075 "lon_max": maxLon, 2076 }
2077 2078 # -------------------------------------------------------------------------
2079 - def get_features_in_radius(self, lat, lon, radius, tablename=None, category=None):
2080 """ 2081 Returns Features within a Radius (in km) of a LatLon Location 2082 2083 Unused 2084 """ 2085 2086 import math 2087 2088 db = current.db 2089 settings = current.deployment_settings 2090 2091 if settings.get_gis_spatialdb() and \ 2092 settings.get_database_type() == "postgres": 2093 # Use PostGIS routine 2094 # The ST_DWithin function call will automatically include a bounding box comparison that will make use of any indexes that are available on the geometries. 2095 # @ToDo: Support optional Category (make this a generic filter?) 2096 2097 import psycopg2 2098 import psycopg2.extras 2099 2100 # Convert km to degrees (since we're using the_geom not the_geog) 2101 radius = math.degrees(float(radius) / RADIUS_EARTH) 2102 2103 dbstr = "dbname=%(database)s user=%(username)s " \ 2104 "password=%(password)s host=%(host)s port=%(port)s" % \ 2105 settings.db_params 2106 connection = psycopg2.connect(dbstr) 2107 2108 cursor = connection.cursor(cursor_factory=psycopg2.extras.DictCursor) 2109 info_string = "SELECT column_name, udt_name FROM information_schema.columns WHERE table_name = 'gis_location' or table_name = '%s';" % tablename 2110 cursor.execute(info_string) 2111 # @ToDo: Look at more optimal queries for just those fields we need 2112 if tablename: 2113 # Lookup the resource 2114 query_string = cursor.mogrify("SELECT * FROM gis_location, %s WHERE %s.location_id = gis_location.id and ST_DWithin (ST_GeomFromText ('POINT (%s %s)', 4326), the_geom, %s);" % (tablename, tablename, lon, lat, radius)) 2115 else: 2116 # Lookup the raw Locations 2117 query_string = cursor.mogrify("SELECT * FROM gis_location WHERE ST_DWithin (ST_GeomFromText ('POINT (%s %s)', 4326), the_geom, %s);" % (lon, lat, radius)) 2118 2119 cursor.execute(query_string) 2120 # @ToDo: Export Rows? 2121 features = [] 2122 for record in cursor: 2123 d = dict(record.items()) 2124 row = Storage() 2125 # @ToDo: Optional support for Polygons 2126 if tablename: 2127 row.gis_location = Storage() 2128 row.gis_location.id = d["id"] 2129 row.gis_location.lat = d["lat"] 2130 row.gis_location.lon = d["lon"] 2131 row.gis_location.lat_min = d["lat_min"] 2132 row.gis_location.lon_min = d["lon_min"] 2133 row.gis_location.lat_max = d["lat_max"] 2134 row.gis_location.lon_max = d["lon_max"] 2135 row[tablename] = Storage() 2136 row[tablename].id = d["id"] 2137 row[tablename].name = d["name"] 2138 else: 2139 row.name = d["name"] 2140 row.id = d["id"] 2141 row.lat = d["lat"] 2142 row.lon = d["lon"] 2143 row.lat_min = d["lat_min"] 2144 row.lon_min = d["lon_min"] 2145 row.lat_max = d["lat_max"] 2146 row.lon_max = d["lon_max"] 2147 features.append(row) 2148 2149 return features 2150 2151 #elif settings.database.db_type == "mysql": 2152 # Do the calculation in MySQL to pull back only the relevant rows 2153 # Raw MySQL Formula from: http://blog.peoplesdns.com/archives/24 2154 # PI = 3.141592653589793, mysql's pi() function returns 3.141593 2155 #pi = math.pi 2156 #query = """SELECT name, lat, lon, acos(SIN( PI()* 40.7383040 /180 )*SIN( PI()*lat/180 ))+(cos(PI()* 40.7383040 /180)*COS( PI()*lat/180) *COS(PI()*lon/180-PI()* -73.99319 /180))* 3963.191 2157 #AS distance 2158 #FROM gis_location 2159 #WHERE 1=1 2160 #AND 3963.191 * ACOS( (SIN(PI()* 40.7383040 /180)*SIN(PI() * lat/180)) + (COS(PI()* 40.7383040 /180)*cos(PI()*lat/180)*COS(PI() * lon/180-PI()* -73.99319 /180))) < = 1.5 2161 #ORDER BY 3963.191 * ACOS((SIN(PI()* 40.7383040 /180)*SIN(PI()*lat/180)) + (COS(PI()* 40.7383040 /180)*cos(PI()*lat/180)*COS(PI() * lon/180-PI()* -73.99319 /180)))""" 2162 # db.executesql(query) 2163 2164 else: 2165 # Calculate in Python 2166 # Pull back all the rows within a square bounding box (faster than checking all features manually) 2167 # Then check each feature within this subset 2168 # http://janmatuschek.de/LatitudeLongitudeBoundingCoordinates 2169 2170 # @ToDo: Support optional Category (make this a generic filter?) 2171 2172 bbox = self.get_bounds_from_radius(lat, lon, radius) 2173 2174 # shortcut 2175 locations = db.gis_location 2176 2177 query = (locations.lat > bbox["lat_min"]) & \ 2178 (locations.lat < bbox["lat_max"]) & \ 2179 (locations.lon > bbox["lon_min"]) & \ 2180 (locations.lon < bbox["lon_max"]) 2181 deleted = (locations.deleted == False) 2182 empty = (locations.lat != None) & (locations.lon != None) 2183 query = deleted & empty & query 2184 2185 if tablename: 2186 # Lookup the resource 2187 table = current.s3db[tablename] 2188 query &= (table.location_id == locations.id) 2189 records = db(query).select(table.ALL, 2190 locations.id, 2191 locations.name, 2192 locations.level, 2193 locations.lat, 2194 locations.lon, 2195 locations.lat_min, 2196 locations.lon_min, 2197 locations.lat_max, 2198 locations.lon_max) 2199 else: 2200 # Lookup the raw Locations 2201 records = db(query).select(locations.id, 2202 locations.name, 2203 locations.level, 2204 locations.lat, 2205 locations.lon, 2206 locations.lat_min, 2207 locations.lon_min, 2208 locations.lat_max, 2209 locations.lon_max) 2210 features = Rows() 2211 for row in records: 2212 # Calculate the Great Circle distance 2213 if tablename: 2214 distance = self.greatCircleDistance(lat, 2215 lon, 2216 row["gis_location.lat"], 2217 row["gis_location.lon"]) 2218 else: 2219 distance = self.greatCircleDistance(lat, 2220 lon, 2221 row.lat, 2222 row.lon) 2223 if distance < radius: 2224 features.records.append(row) 2225 else: 2226 # skip 2227 continue 2228 2229 return features
2230 2231 # -------------------------------------------------------------------------
2232 - def get_latlon(self, feature_id, filter=False):
2233 """ 2234 Returns the Lat/Lon for a Feature 2235 2236 used by display_feature() in gis controller 2237 2238 @param feature_id: the feature ID 2239 @param filter: Filter out results based on deployment_settings 2240 """ 2241 2242 db = current.db 2243 table = db.gis_location 2244 feature = db(table.id == feature_id).select(table.id, 2245 table.lat, 2246 table.lon, 2247 table.parent, 2248 table.path, 2249 limitby=(0, 1)).first() 2250 2251 # Zero is an allowed value, hence explicit test for None. 2252 if "lon" in feature and "lat" in feature and \ 2253 (feature.lat is not None) and (feature.lon is not None): 2254 return {"lon": feature.lon, 2255 "lat": feature.lat, 2256 } 2257 2258 else: 2259 # Step through ancestors to first with lon, lat. 2260 parents = self.get_parents(feature.id, feature=feature) 2261 if parents: 2262 for row in parents: 2263 lon = row.get("lon", None) 2264 lat = row.get("lat", None) 2265 if (lon is not None) and (lat is not None): 2266 return {"lon": lon, 2267 "lat": lat, 2268 } 2269 2270 # Invalid feature_id 2271 return None
2272 2273 # ------------------------------------------------------------------------- 2274 @staticmethod
2275 - def get_locations(table, 2276 query, 2277 join = True, 2278 geojson = True, 2279 ):
2280 """ 2281 Returns the locations for an XML export 2282 - used by GIS.get_location_data() and S3PivotTable.geojson() 2283 2284 @ToDo: Support multiple locations for a single resource 2285 (e.g. a Project working in multiple Communities) 2286 """ 2287 2288 db = current.db 2289 tablename = table._tablename 2290 gtable = current.s3db.gis_location 2291 settings = current.deployment_settings 2292 tolerance = settings.get_gis_simplify_tolerance() 2293 2294 output = {} 2295 2296 if settings.get_gis_spatialdb(): 2297 if geojson: 2298 precision = settings.get_gis_precision() 2299 if tolerance: 2300 # Do the Simplify & GeoJSON direct from the DB 2301 web2py_installed_version = parse_version(global_settings.web2py_version) 2302 web2py_installed_datetime = web2py_installed_version[4] # datetime_index = 4 2303 if web2py_installed_datetime >= datetime.datetime(2015, 1, 17, 0, 7, 4): 2304 # Use http://www.postgis.org/docs/ST_SimplifyPreserveTopology.html 2305 rows = db(query).select(table.id, 2306 gtable.the_geom.st_simplifypreservetopology(tolerance).st_asgeojson(precision=precision).with_alias("geojson")) 2307 else: 2308 # Use http://www.postgis.org/docs/ST_Simplify.html 2309 rows = db(query).select(table.id, 2310 gtable.the_geom.st_simplify(tolerance).st_asgeojson(precision=precision).with_alias("geojson")) 2311 else: 2312 # Do the GeoJSON direct from the DB 2313 rows = db(query).select(table.id, 2314 gtable.the_geom.st_asgeojson(precision=precision).with_alias("geojson")) 2315 for row in rows: 2316 key = row[tablename].id 2317 if key in output: 2318 output[key].append(row.geojson) 2319 else: 2320 output[key] = [row.geojson] 2321 else: 2322 if tolerance: 2323 # Do the Simplify direct from the DB 2324 rows = db(query).select(table.id, 2325 gtable.the_geom.st_simplify(tolerance).st_astext().with_alias("wkt")) 2326 else: 2327 rows = db(query).select(table.id, 2328 gtable.the_geom.st_astext().with_alias("wkt")) 2329 for row in rows: 2330 key = row[tablename].id 2331 if key in output: 2332 output[key].append(row.wkt) 2333 else: 2334 output[key] = [row.wkt] 2335 else: 2336 rows = db(query).select(table.id, 2337 gtable.wkt) 2338 simplify = GIS.simplify 2339 if geojson: 2340 # Simplify the polygon to reduce download size 2341 if join: 2342 for row in rows: 2343 g = simplify(row["gis_location"].wkt, 2344 tolerance=tolerance, 2345 output="geojson") 2346 if g: 2347 key = row[tablename].id 2348 if key in output: 2349 output[key].append(g) 2350 else: 2351 output[key] = [g] 2352 else: 2353 # gis_location: always single 2354 for row in rows: 2355 g = simplify(row.wkt, 2356 tolerance=tolerance, 2357 output="geojson") 2358 if g: 2359 output[row.id] = g 2360 2361 else: 2362 if join: 2363 if tolerance: 2364 # Simplify the polygon to reduce download size 2365 # & also to work around the recursion limit in libxslt 2366 # http://blog.gmane.org/gmane.comp.python.lxml.devel/day=20120309 2367 for row in rows: 2368 wkt = simplify(row["gis_location"].wkt) 2369 if wkt: 2370 key = row[tablename].id 2371 if key in output: 2372 output[key].append(wkt) 2373 else: 2374 output[key] = [wkt] 2375 else: 2376 for row in rows: 2377 wkt = row["gis_location"].wkt 2378 if wkt: 2379 key = row[tablename].id 2380 if key in output: 2381 output[key].append(wkt) 2382 else: 2383 output[key] = [wkt] 2384 else: 2385 # gis_location: always single 2386 if tolerance: 2387 for row in rows: 2388 wkt = simplify(row.wkt) 2389 if wkt: 2390 output[row.id] = wkt 2391 else: 2392 for row in rows: 2393 wkt = row.wkt 2394 if wkt: 2395 output[row.id] = wkt 2396 2397 return output
2398 2399 # ------------------------------------------------------------------------- 2400 @staticmethod
2401 - def get_location_data(resource, attr_fields=None, count=None):
2402 """ 2403 Returns the locations, markers and popup tooltips for an XML export 2404 e.g. Feature Layers or Search results (Feature Resources) 2405 e.g. Exports in KML, GeoRSS or GPX format 2406 2407 Called by S3REST: S3Resource.export_tree() 2408 @param: resource - S3Resource instance (required) 2409 @param: attr_fields - list of attr_fields to use instead of reading 2410 from get_vars or looking up in gis_layer_feature 2411 @param: count - total number of features 2412 (can actually be more if features have multiple locations) 2413 """ 2414 2415 tablename = resource.tablename 2416 if tablename == "gis_feature_query": 2417 # Requires no special handling: XSLT uses normal fields 2418 return {} 2419 2420 format = current.auth.permission.format 2421 geojson = format == "geojson" 2422 if geojson: 2423 if count and \ 2424 count > current.deployment_settings.get_gis_max_features(): 2425 headers = {"Content-Type": "application/json"} 2426 message = "Too Many Records" 2427 status = 509 2428 raise HTTP(status, 2429 body=current.xml.json_message(success=False, 2430 statuscode=status, 2431 message=message), 2432 web2py_error=message, 2433 **headers) 2434 # Lookups per layer not per record 2435 if len(tablename) > 19 and \ 2436 tablename.startswith("gis_layer_shapefile"): 2437 # GIS Shapefile Layer 2438 location_data = GIS.get_shapefile_geojson(resource) or {} 2439 return location_data 2440 elif tablename == "gis_theme_data": 2441 # GIS Theme Layer 2442 location_data = GIS.get_theme_geojson(resource) or {} 2443 return location_data 2444 else: 2445 # e.g. GIS Feature Layer 2446 # e.g. Search results 2447 # Lookup Data using this function 2448 pass 2449 elif format in ("georss", "kml", "gpx"): 2450 # Lookup Data using this function 2451 pass 2452 else: 2453 # @ToDo: Bulk lookup of LatLons for S3XML.latlon() 2454 return {} 2455 2456 NONE = current.messages["NONE"] 2457 #if DEBUG: 2458 # start = datetime.datetime.now() 2459 2460 db = current.db 2461 s3db = current.s3db 2462 request = current.request 2463 get_vars = request.get_vars 2464 2465 ftable = s3db.gis_layer_feature 2466 2467 layer = None 2468 2469 layer_id = get_vars.get("layer", None) 2470 if layer_id: 2471 # Feature Layer 2472 # e.g. Search results loaded as a Feature Resource layer 2473 layer = db(ftable.layer_id == layer_id).select(ftable.attr_fields, 2474 # @ToDo: Deprecate 2475 ftable.popup_fields, 2476 ftable.individual, 2477 ftable.points, 2478 ftable.trackable, 2479 limitby=(0, 1) 2480 ).first() 2481 2482 else: 2483 # e.g. KML, GeoRSS or GPX export 2484 # e.g. Volunteer Layer in Vulnerability module 2485 controller = request.controller 2486 function = request.function 2487 query = (ftable.controller == controller) & \ 2488 (ftable.function == function) 2489 layers = db(query).select(ftable.layer_id, 2490 ftable.attr_fields, 2491 ftable.popup_fields, # @ToDo: Deprecate 2492 ftable.style_default, # @ToDo: Rename as no longer really 'style' 2493 ftable.individual, 2494 ftable.points, 2495 ftable.trackable, 2496 ) 2497 if len(layers) > 1: 2498 layers.exclude(lambda row: row.style_default == False) 2499 if len(layers) > 1: 2500 # We can't provide details for the whole layer, but need to do a per-record check 2501 return None 2502 if layers: 2503 layer = layers.first() 2504 layer_id = layer.layer_id 2505 2506 if not attr_fields: 2507 # Try get_vars 2508 attr_fields = get_vars.get("attr", []) 2509 if attr_fields: 2510 attr_fields = attr_fields.split(",") 2511 popup_fields = get_vars.get("popup", []) 2512 if popup_fields: 2513 popup_fields = popup_fields.split(",") 2514 if layer: 2515 if not popup_fields: 2516 # Lookup from gis_layer_feature 2517 popup_fields = layer.popup_fields or [] 2518 if not attr_fields: 2519 # Lookup from gis_layer_feature 2520 # @ToDo: Consider parsing these from style.popup_format instead 2521 # - see S3Report.geojson() 2522 attr_fields = layer.attr_fields or [] 2523 individual = layer.individual 2524 points = layer.points 2525 trackable = layer.trackable 2526 else: 2527 if not popup_fields: 2528 popup_fields = ["name"] 2529 individual = False 2530 points = False 2531 trackable = False 2532 2533 table = resource.table 2534 pkey = table._id.name 2535 2536 attributes = {} 2537 markers = {} 2538 styles = {} 2539 _pkey = table[pkey] 2540 # Ensure there are no ID represents to confuse things 2541 _pkey.represent = None 2542 if geojson: 2543 # Build the Attributes now so that representations can be 2544 # looked-up in bulk rather than as a separate lookup per record 2545 if popup_fields: 2546 # Old-style 2547 attr_fields = list(set(popup_fields + attr_fields)) 2548 if attr_fields: 2549 attr = {} 2550 2551 # Make a copy for the pkey insertion 2552 fields = list(attr_fields) 2553 2554 if pkey not in fields: 2555 fields.insert(0, pkey) 2556 2557 data = resource.select(fields, 2558 limit = None, 2559 raw_data = True, 2560 represent = True, 2561 show_links = False) 2562 2563 attr_cols = {} 2564 for f in data["rfields"]: 2565 fname = f.fname 2566 selector = f.selector 2567 if fname in attr_fields or selector in attr_fields: 2568 fieldname = f.colname 2569 tname, fname = fieldname.split(".") 2570 try: 2571 ftype = db[tname][fname].type 2572 except AttributeError: 2573 # FieldMethod 2574 ftype = None 2575 except KeyError: 2576 current.log.debug("SGIS: Field %s doesn't exist in table %s" % (fname, tname)) 2577 continue 2578 attr_cols[fieldname] = (ftype, fname) 2579 2580 _pkey = str(_pkey) 2581 for row in data["rows"]: 2582 record_id = int(row[_pkey]) 2583 if attr_cols: 2584 attribute = {} 2585 for fieldname in attr_cols: 2586 represent = row[fieldname] 2587 if represent is not None and \ 2588 represent not in (NONE, ""): 2589 # Skip empty fields 2590 _attr = attr_cols[fieldname] 2591 ftype = _attr[0] 2592 if ftype == "integer": 2593 if isinstance(represent, lazyT): 2594 # Integer is just a lookup key 2595 represent = s3_str(represent) 2596 else: 2597 # Attributes should be numbers not strings 2598 # (@ToDo: Add a JS i18n formatter for the tooltips) 2599 # NB This also relies on decoding within geojson/export.xsl and S3XML.__element2json() 2600 represent = row["_row"][fieldname] 2601 elif ftype in ("double", "float"): 2602 # Attributes should be numbers not strings 2603 # (@ToDo: Add a JS i18n formatter for the tooltips) 2604 represent = row["_row"][fieldname] 2605 else: 2606 represent = s3_str(represent) 2607 attribute[_attr[1]] = represent 2608 attr[record_id] = attribute 2609 2610 attributes[tablename] = attr 2611 2612 #if DEBUG: 2613 # end = datetime.datetime.now() 2614 # duration = end - start 2615 # duration = "{:.2f}".format(duration.total_seconds()) 2616 # if layer_id: 2617 # layer_name = db(ftable.id == layer_id).select(ftable.name, 2618 # limitby=(0, 1) 2619 # ).first().name 2620 # else: 2621 # layer_name = "Unknown" 2622 # _debug("Attributes lookup of layer %s completed in %s seconds", 2623 # layer_name, 2624 # duration, 2625 # ) 2626 2627 _markers = get_vars.get("markers", None) 2628 if _markers: 2629 # Add a per-feature Marker 2630 marker_fn = s3db.get_config(tablename, "marker_fn") 2631 if marker_fn: 2632 m = {} 2633 for record in resource: 2634 m[record[pkey]] = marker_fn(record) 2635 else: 2636 # No configuration found so use default marker for all 2637 c, f = tablename.split("_", 1) 2638 m = GIS.get_marker(c, f) 2639 2640 markers[tablename] = m 2641 2642 if individual: 2643 # Add a per-feature Style 2644 # Optionally restrict to a specific Config? 2645 #config = GIS.get_config() 2646 stable = s3db.gis_style 2647 query = (stable.deleted == False) & \ 2648 (stable.layer_id == layer_id) & \ 2649 (stable.record_id.belongs(resource._ids)) 2650 #((stable.config_id == config.id) | 2651 # (stable.config_id == None)) 2652 rows = db(query).select(stable.record_id, 2653 stable.style) 2654 for row in rows: 2655 styles[row.record_id] = json.dumps(row.style, separators=SEPARATORS) 2656 2657 styles[tablename] = styles 2658 2659 else: 2660 # KML, GeoRSS or GPX 2661 marker_fn = s3db.get_config(tablename, "marker_fn") 2662 if marker_fn: 2663 # Add a per-feature Marker 2664 for record in resource: 2665 markers[record[pkey]] = marker_fn(record) 2666 else: 2667 # No configuration found so use default marker for all 2668 c, f = tablename.split("_", 1) 2669 markers = GIS.get_marker(c, f) 2670 2671 markers[tablename] = markers 2672 2673 # Lookup the LatLons now so that it can be done as a single 2674 # query rather than per record 2675 #if DEBUG: 2676 # start = datetime.datetime.now() 2677 latlons = {} 2678 #wkts = {} 2679 geojsons = {} 2680 gtable = s3db.gis_location 2681 if trackable: 2682 # Use S3Track 2683 ids = resource._ids 2684 # Ensure IDs in ascending order 2685 ids.sort() 2686 try: 2687 tracker = S3Trackable(table, record_ids=ids) 2688 except SyntaxError: 2689 # This table isn't trackable 2690 pass 2691 else: 2692 _latlons = tracker.get_location(_fields=[gtable.lat, 2693 gtable.lon], 2694 empty = False, 2695 ) 2696 index = 0 2697 for _id in ids: 2698 _location = _latlons[index] 2699 latlons[_id] = (_location.lat, _location.lon) 2700 index += 1 2701 2702 if not latlons: 2703 join = True 2704 #custom = False 2705 if "location_id" in table.fields: 2706 query = (table.id.belongs(resource._ids)) & \ 2707 (table.location_id == gtable.id) 2708 elif "site_id" in table.fields: 2709 stable = s3db.org_site 2710 query = (table.id.belongs(resource._ids)) & \ 2711 (table.site_id == stable.site_id) & \ 2712 (stable.location_id == gtable.id) 2713 elif tablename == "gis_location": 2714 join = False 2715 query = (table.id.belongs(resource._ids)) 2716 else: 2717 # Look at the Context 2718 context = resource.get_config("context") 2719 if context: 2720 location_context = context.get("location") 2721 else: 2722 location_context = None 2723 if not location_context: 2724 # Can't display this resource on the Map 2725 return None 2726 # @ToDo: Proper system rather than this hack_which_works_for_current_usecase 2727 # Resolve selector (which automatically attaches any required component) 2728 rfield = resource.resolve_selector(location_context) 2729 if "." in location_context: 2730 # Component 2731 alias, cfield = location_context.split(".", 1) 2732 try: 2733 component = resource.components[alias] 2734 except KeyError: 2735 # Invalid alias 2736 # Can't display this resource on the Map 2737 return None 2738 ctablename = component.tablename 2739 ctable = s3db[ctablename] 2740 query = (table.id.belongs(resource._ids)) & \ 2741 rfield.join[ctablename] & \ 2742 (ctable[cfield] == gtable.id) 2743 #custom = True 2744 # @ToDo: 2745 #elif "$" in location_context: 2746 else: 2747 # Can't display this resource on the Map 2748 return None 2749 2750 if geojson and not points: 2751 geojsons[tablename] = GIS.get_locations(table, query, join, geojson) 2752 # @ToDo: Support Polygons in KML, GPX & GeoRSS 2753 #else: 2754 # wkts[tablename] = GIS.get_locations(table, query, join, geojson) 2755 else: 2756 # Points 2757 rows = db(query).select(table.id, 2758 gtable.lat, 2759 gtable.lon) 2760 #if custom: 2761 # # Add geoJSONs 2762 #elif join: 2763 if join: 2764 for row in rows: 2765 # @ToDo: Support records with multiple locations 2766 # (e.g. an Org with multiple Facs) 2767 _location = row["gis_location"] 2768 latlons[row[tablename].id] = (_location.lat, _location.lon) 2769 else: 2770 # gis_location: Always single 2771 for row in rows: 2772 latlons[row.id] = (row.lat, row.lon) 2773 2774 _latlons = {} 2775 if latlons: 2776 _latlons[tablename] = latlons 2777 2778 #if DEBUG: 2779 # end = datetime.datetime.now() 2780 # duration = end - start 2781 # duration = "{:.2f}".format(duration.total_seconds()) 2782 # _debug("latlons lookup of layer %s completed in %s seconds", 2783 # layer_name, 2784 # duration, 2785 # ) 2786 2787 # Used by S3XML's gis_encode() 2788 return {"geojsons": geojsons, 2789 "latlons": _latlons, 2790 #"wkts": wkts, 2791 "attributes": attributes, 2792 "markers": markers, 2793 "styles": styles, 2794 }
2795 2796 # ------------------------------------------------------------------------- 2797 @staticmethod
2798 - def get_marker(controller=None, 2799 function=None, 2800 filter=None, 2801 ):
2802 """ 2803 Returns a Marker dict 2804 - called by xml.gis_encode() for non-geojson resources 2805 - called by S3Map.widget() if no marker_fn supplied 2806 """ 2807 2808 marker = None 2809 if controller and function: 2810 # Lookup marker in the gis_style table 2811 db = current.db 2812 s3db = current.s3db 2813 ftable = s3db.gis_layer_feature 2814 stable = s3db.gis_style 2815 mtable = s3db.gis_marker 2816 config = GIS.get_config() 2817 query = (ftable.controller == controller) & \ 2818 (ftable.function == function) & \ 2819 (ftable.aggregate == False) 2820 left = (stable.on((stable.layer_id == ftable.layer_id) & \ 2821 (stable.record_id == None) & \ 2822 ((stable.config_id == config.id) | \ 2823 (stable.config_id == None))), 2824 mtable.on(mtable.id == stable.marker_id), 2825 ) 2826 if filter: 2827 query &= (ftable.filter == filter) 2828 if current.deployment_settings.get_database_type() == "postgres": 2829 # None is last 2830 orderby = stable.config_id 2831 else: 2832 # None is 1st 2833 orderby = ~stable.config_id 2834 layers = db(query).select(mtable.image, 2835 mtable.height, 2836 mtable.width, 2837 ftable.style_default, 2838 stable.gps_marker, 2839 left=left, 2840 orderby=orderby) 2841 if len(layers) > 1: 2842 layers.exclude(lambda row: row["gis_layer_feature.style_default"] == False) 2843 if len(layers) == 1: 2844 layer = layers.first() 2845 else: 2846 # Can't differentiate 2847 layer = None 2848 2849 if layer: 2850 _marker = layer["gis_marker"] 2851 if _marker.image: 2852 marker = {"image": _marker.image, 2853 "height": _marker.height, 2854 "width": _marker.width, 2855 "gps_marker": layer["gis_style"].gps_marker, 2856 } 2857 2858 if not marker: 2859 # Default 2860 marker = Marker().as_dict() 2861 2862 return marker
2863 2864 # ------------------------------------------------------------------------- 2865 @staticmethod
2866 - def get_style(layer_id=None, 2867 aggregate=None, 2868 ):
2869 """ 2870 Returns a Style dict 2871 - called by S3Report.geojson() 2872 """ 2873 2874 style = None 2875 if layer_id: 2876 style = Style(layer_id=layer_id, 2877 aggregate=aggregate).as_dict() 2878 2879 if not style: 2880 # Default 2881 style = Style().as_dict() 2882 2883 return style
2884 2885 # ------------------------------------------------------------------------- 2886 @staticmethod
2887 - def get_screenshot(config_id, temp=True, height=None, width=None):
2888 """ 2889 Save a Screenshot of a saved map 2890 2891 @requires: 2892 PhantomJS http://phantomjs.org 2893 Selenium https://pypi.python.org/pypi/selenium 2894 """ 2895 2896 # @ToDo: allow selection of map_id 2897 map_id = "default_map" 2898 2899 #from selenium import webdriver 2900 # We include a Custom version which is patched to access native PhantomJS functions from: 2901 # https://github.com/watsonmw/ghostdriver/commit/d9b65ed014ed9ff8a5e852cc40e59a0fd66d0cf1 2902 from webdriver import WebDriver 2903 from selenium.common.exceptions import TimeoutException, WebDriverException 2904 from selenium.webdriver.support.ui import WebDriverWait 2905 2906 request = current.request 2907 2908 cachepath = os.path.join(request.folder, "static", "cache", "jpg") 2909 2910 if not os.path.exists(cachepath): 2911 try: 2912 os.mkdir(cachepath) 2913 except OSError, os_error: 2914 error = "GIS: JPEG files cannot be saved: %s %s" % \ 2915 (cachepath, os_error) 2916 current.log.error(error) 2917 current.session.error = error 2918 redirect(URL(c="gis", f="index", vars={"config": config_id})) 2919 2920 # Copy the current working directory to revert back to later 2921 cwd = os.getcwd() 2922 # Change to the Cache folder (can't render directly there from execute_phantomjs) 2923 os.chdir(cachepath) 2924 2925 #driver = webdriver.PhantomJS() 2926 # Disable Proxy for Win32 Network Latency issue 2927 driver = WebDriver(service_args=["--proxy-type=none"]) 2928 2929 # Change back for other parts 2930 os.chdir(cwd) 2931 2932 settings = current.deployment_settings 2933 if height is None: 2934 # Set the size of the browser to match the map 2935 height = settings.get_gis_map_height() 2936 if width is None: 2937 width = settings.get_gis_map_width() 2938 # For Screenshots 2939 #height = 410 2940 #width = 820 2941 driver.set_window_size(width + 5, height + 20) 2942 2943 # Load the homepage 2944 # (Cookie needs to be set on same domain as it takes effect) 2945 base_url = "%s/%s" % (settings.get_base_public_url(), 2946 request.application) 2947 driver.get(base_url) 2948 2949 response = current.response 2950 session_id = response.session_id 2951 if not current.auth.override: 2952 # Reuse current session to allow access to ACL-controlled resources 2953 driver.add_cookie({"name": response.session_id_name, 2954 "value": session_id, 2955 "path": "/", 2956 }) 2957 # For sync connections 2958 current.session._unlock(response) 2959 2960 # Load the map 2961 url = "%s/gis/map_viewing_client?print=1&config=%s" % (base_url, 2962 config_id) 2963 driver.get(url) 2964 2965 # Wait for map to load (including it's layers) 2966 # Alternative approach: https://raw.githubusercontent.com/ariya/phantomjs/master/examples/waitfor.js 2967 def map_loaded(driver): 2968 test = '''return S3.gis.maps['%s'].s3.loaded''' % map_id 2969 try: 2970 result = driver.execute_script(test) 2971 except WebDriverException, e: 2972 result = False 2973 return result
2974 2975 try: 2976 # Wait for up to 100s (large screenshots take a long time for layers to load) 2977 WebDriverWait(driver, 100).until(map_loaded) 2978 except TimeoutException, e: 2979 driver.quit() 2980 current.log.error("Timeout: %s" % e) 2981 return None 2982 2983 # Save the Output 2984 # @ToDo: Can we use StringIO instead of cluttering filesystem? 2985 # @ToDo: Allow option of PDF (as well as JPG) 2986 # https://github.com/ariya/phantomjs/blob/master/examples/rasterize.js 2987 if temp: 2988 filename = "%s.jpg" % session_id 2989 else: 2990 filename = "config_%s.jpg" % config_id 2991 2992 # Cannot control file size (no access to clipRect) or file format 2993 #driver.save_screenshot(os.path.join(cachepath, filename)) 2994 2995 #driver.page.clipRect = {"top": 10, 2996 # "left": 5, 2997 # "width": width, 2998 # "height": height 2999 # } 3000 #driver.page.render(filename, {"format": "jpeg", "quality": "100"}) 3001 3002 script = ''' 3003 var page = this; 3004 page.clipRect = {top: 10, 3005 left: 5, 3006 width: %(width)s, 3007 height: %(height)s 3008 }; 3009 page.render('%(filename)s', {format: 'jpeg', quality: '100'});''' % \ 3010 {"width": width, 3011 "height": height, 3012 "filename": filename, 3013 } 3014 try: 3015 result = driver.execute_phantomjs(script) 3016 except WebDriverException, e: 3017 driver.quit() 3018 current.log.error("WebDriver crashed: %s" % e) 3019 return None 3020 3021 driver.quit() 3022 3023 if temp: 3024 # This was a temporary config for creating the screenshot, then delete it now 3025 ctable = current.s3db.gis_config 3026 the_set = current.db(ctable.id == config_id) 3027 config = the_set.select(ctable.temp, 3028 limitby=(0, 1) 3029 ).first() 3030 try: 3031 if config.temp: 3032 the_set.delete() 3033 except: 3034 # Record not found? 3035 pass 3036 3037 # Pass the result back to the User 3038 return filename 3039 3040 # ------------------------------------------------------------------------- 3041 @staticmethod
3042 - def get_shapefile_geojson(resource):
3043 """ 3044 Lookup Shapefile Layer polygons once per layer and not per-record 3045 3046 Called by S3REST: S3Resource.export_tree() 3047 3048 @ToDo: Vary simplification level & precision by Zoom level 3049 - store this in the style? 3050 """ 3051 3052 db = current.db 3053 #tablename = "gis_layer_shapefile_%s" % resource._ids[0] 3054 tablename = resource.tablename 3055 table = db[tablename] 3056 query = resource.get_query() 3057 fields = [] 3058 fappend = fields.append 3059 for f in table.fields: 3060 if f not in ("layer_id", "lat", "lon"): 3061 fappend(f) 3062 3063 attributes = {} 3064 geojsons = {} 3065 settings = current.deployment_settings 3066 tolerance = settings.get_gis_simplify_tolerance() 3067 if settings.get_gis_spatialdb(): 3068 # Do the Simplify & GeoJSON direct from the DB 3069 fields.remove("the_geom") 3070 fields.remove("wkt") 3071 _fields = [table[f] for f in fields] 3072 rows = db(query).select(table.the_geom.st_simplify(tolerance).st_asgeojson(precision=4).with_alias("geojson"), 3073 *_fields) 3074 for row in rows: 3075 _row = row[tablename] 3076 _id = _row.id 3077 geojsons[_id] = row.geojson 3078 _attributes = {} 3079 for f in fields: 3080 if f not in ("id"): 3081 _attributes[f] = _row[f] 3082 attributes[_id] = _attributes 3083 else: 3084 _fields = [table[f] for f in fields] 3085 rows = db(query).select(*_fields) 3086 simplify = GIS.simplify 3087 for row in rows: 3088 # Simplify the polygon to reduce download size 3089 geojson = simplify(row.wkt, tolerance=tolerance, 3090 output="geojson") 3091 _id = row.id 3092 if geojson: 3093 geojsons[_id] = geojson 3094 _attributes = {} 3095 for f in fields: 3096 if f not in ("id", "wkt"): 3097 _attributes[f] = row[f] 3098 attributes[_id] = _attributes 3099 3100 _attributes = {} 3101 _attributes[tablename] = attributes 3102 _geojsons = {} 3103 _geojsons[tablename] = geojsons 3104 3105 # return 'locations' 3106 return {"attributes": _attributes, 3107 "geojsons": _geojsons, 3108 }
3109 3110 # ------------------------------------------------------------------------- 3111 @staticmethod
3112 - def get_theme_geojson(resource):
3113 """ 3114 Lookup Theme Layer polygons once per layer and not per-record 3115 3116 Called by S3REST: S3Resource.export_tree() 3117 3118 @ToDo: Vary precision by Lx 3119 - store this (& tolerance map) in the style? 3120 """ 3121 3122 s3db = current.s3db 3123 tablename = "gis_theme_data" 3124 table = s3db.gis_theme_data 3125 gtable = s3db.gis_location 3126 query = (table.id.belongs(resource._ids)) & \ 3127 (table.location_id == gtable.id) 3128 3129 geojsons = {} 3130 # @ToDo: How to get the tolerance to vary by level? 3131 # - add Stored Procedure? 3132 #if current.deployment_settings.get_gis_spatialdb(): 3133 # # Do the Simplify & GeoJSON direct from the DB 3134 # rows = current.db(query).select(table.id, 3135 # gtable.the_geom.st_simplify(0.01).st_asgeojson(precision=4).with_alias("geojson")) 3136 # for row in rows: 3137 # geojsons[row["gis_theme_data.id"]] = row.geojson 3138 #else: 3139 rows = current.db(query).select(table.id, 3140 gtable.level, 3141 gtable.wkt) 3142 simplify = GIS.simplify 3143 tolerance = {"L0": 0.01, 3144 "L1": 0.005, 3145 "L2": 0.00125, 3146 "L3": 0.000625, 3147 "L4": 0.0003125, 3148 "L5": 0.00015625, 3149 } 3150 for row in rows: 3151 grow = row.gis_location 3152 # Simplify the polygon to reduce download size 3153 geojson = simplify(grow.wkt, 3154 tolerance=tolerance[grow.level], 3155 output="geojson") 3156 if geojson: 3157 geojsons[row["gis_theme_data.id"]] = geojson 3158 3159 _geojsons = {} 3160 _geojsons[tablename] = geojsons 3161 3162 # Return 'locations' 3163 return {"geojsons": _geojsons, 3164 }
3165 3166 # ------------------------------------------------------------------------- 3167 @staticmethod
3168 - def greatCircleDistance(lat1, lon1, lat2, lon2, quick=True):
3169 """ 3170 Calculate the shortest distance (in km) over the earth's sphere between 2 points 3171 Formulae from: http://www.movable-type.co.uk/scripts/latlong.html 3172 (NB We could also use PostGIS functions, where possible, instead of this query) 3173 """ 3174 3175 import math 3176 3177 # shortcuts 3178 cos = math.cos 3179 sin = math.sin 3180 radians = math.radians 3181 3182 if quick: 3183 # Spherical Law of Cosines (accurate down to around 1m & computationally quick) 3184 lat1 = radians(lat1) 3185 lat2 = radians(lat2) 3186 lon1 = radians(lon1) 3187 lon2 = radians(lon2) 3188 distance = math.acos(sin(lat1) * sin(lat2) + cos(lat1) * cos(lat2) * cos(lon2 - lon1)) * RADIUS_EARTH 3189 return distance 3190 3191 else: 3192 # Haversine 3193 #asin = math.asin 3194 sqrt = math.sqrt 3195 pow = math.pow 3196 dLat = radians(lat2 - lat1) 3197 dLon = radians(lon2 - lon1) 3198 a = pow(sin(dLat / 2), 2) + cos(radians(lat1)) * cos(radians(lat2)) * pow(sin(dLon / 2), 2) 3199 c = 2 * math.atan2(sqrt(a), sqrt(1 - a)) 3200 #c = 2 * asin(sqrt(a)) # Alternate version 3201 # Convert radians to kilometers 3202 distance = RADIUS_EARTH * c 3203 return distance
3204 3205 # ------------------------------------------------------------------------- 3206 @staticmethod
3207 - def create_poly(feature):
3208 """ 3209 Create a .poly file for OpenStreetMap exports 3210 http://wiki.openstreetmap.org/wiki/Osmosis/Polygon_Filter_File_Format 3211 """ 3212 3213 from shapely.wkt import loads as wkt_loads 3214 3215 try: 3216 # Enable C-based speedups available from 1.2.10+ 3217 from shapely import speedups 3218 speedups.enable() 3219 except: 3220 current.log.info("S3GIS", 3221 "Upgrade Shapely for Performance enhancements") 3222 3223 name = feature.name 3224 3225 if "wkt" in feature: 3226 wkt = feature.wkt 3227 else: 3228 # WKT not included by default in feature, so retrieve this now 3229 table = current.s3db.gis_location 3230 wkt = current.db(table.id == feature.id).select(table.wkt, 3231 limitby=(0, 1) 3232 ).first().wkt 3233 3234 try: 3235 shape = wkt_loads(wkt) 3236 except: 3237 error = "Invalid WKT: %s" % name 3238 current.log.error(error) 3239 return error 3240 3241 geom_type = shape.geom_type 3242 if geom_type == "MultiPolygon": 3243 polygons = shape.geoms 3244 elif geom_type == "Polygon": 3245 polygons = [shape] 3246 else: 3247 error = "Unsupported Geometry: %s, %s" % (name, geom_type) 3248 current.log.error(error) 3249 return error 3250 if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp 3251 TEMP = os.path.join(os.getcwd(), "temp") 3252 else: 3253 import tempfile 3254 TEMP = tempfile.gettempdir() 3255 filename = "%s.poly" % name 3256 filepath = os.path.join(TEMP, filename) 3257 File = open(filepath, "w") 3258 File.write("%s\n" % filename) 3259 count = 1 3260 for polygon in polygons: 3261 File.write("%s\n" % count) 3262 points = polygon.exterior.coords 3263 for point in points: 3264 File.write("\t%s\t%s\n" % (point[0], point[1])) 3265 File.write("END\n") 3266 count += 1 3267 File.write("END\n") 3268 File.close() 3269 3270 return None
3271 3272 # ------------------------------------------------------------------------- 3273 @staticmethod
3274 - def export_admin_areas(countries=[], 3275 levels=("L0", "L1", "L2", "L3"), 3276 format="geojson", 3277 simplify=0.01, 3278 precision=4, 3279 ):
3280 """ 3281 Export admin areas to /static/cache for use by interactive web-mapping services 3282 - designed for use by the Vulnerability Mapping 3283 3284 @param countries: list of ISO2 country codes 3285 @param levels: list of which Lx levels to export 3286 @param format: Only GeoJSON supported for now (may add KML &/or OSM later) 3287 @param simplify: tolerance for the simplification algorithm. False to disable simplification 3288 @param precision: number of decimal points to include in the coordinates 3289 """ 3290 3291 db = current.db 3292 s3db = current.s3db 3293 table = s3db.gis_location 3294 ifield = table.id 3295 if countries: 3296 ttable = s3db.gis_location_tag 3297 cquery = (table.level == "L0") & \ 3298 (table.end_date == None) & \ 3299 (ttable.location_id == ifield) & \ 3300 (ttable.tag == "ISO2") & \ 3301 (ttable.value.belongs(countries)) 3302 else: 3303 # All countries 3304 cquery = (table.level == "L0") & \ 3305 (table.end_date == None) & \ 3306 (table.deleted != True) 3307 3308 if current.deployment_settings.get_gis_spatialdb(): 3309 spatial = True 3310 _field = table.the_geom 3311 if simplify: 3312 # Do the Simplify & GeoJSON direct from the DB 3313 field = _field.st_simplify(simplify).st_asgeojson(precision=precision).with_alias("geojson") 3314 else: 3315 # Do the GeoJSON direct from the DB 3316 field = _field.st_asgeojson(precision=precision).with_alias("geojson") 3317 else: 3318 spatial = False 3319 field = table.wkt 3320 if simplify: 3321 _simplify = GIS.simplify 3322 else: 3323 from shapely.wkt import loads as wkt_loads 3324 from ..geojson import dumps 3325 try: 3326 # Enable C-based speedups available from 1.2.10+ 3327 from shapely import speedups 3328 speedups.enable() 3329 except: 3330 current.log.info("S3GIS", 3331 "Upgrade Shapely for Performance enhancements") 3332 3333 folder = os.path.join(current.request.folder, "static", "cache") 3334 3335 features = [] 3336 append = features.append 3337 3338 if "L0" in levels: 3339 # Reduce the decimals in output by 1 3340 _decimals = precision -1 3341 if spatial: 3342 if simplify: 3343 field = _field.st_simplify(simplify).st_asgeojson(precision=_decimals).with_alias("geojson") 3344 else: 3345 field = _field.st_asgeojson(precision=_decimals).with_alias("geojson") 3346 3347 countries = db(cquery).select(ifield, 3348 field) 3349 for row in countries: 3350 if spatial: 3351 id = row["gis_location"].id 3352 geojson = row.geojson 3353 elif simplify: 3354 id = row.id 3355 wkt = row.wkt 3356 if wkt: 3357 geojson = _simplify(wkt, tolerance=simplify, 3358 precision=_decimals, 3359 output="geojson") 3360 else: 3361 name = db(table.id == id).select(table.name, 3362 limitby=(0, 1)).first().name 3363 sys.stderr.write("No WKT: L0 %s %s\n" % (name, id)) 3364 continue 3365 else: 3366 id = row.id 3367 shape = wkt_loads(row.wkt) 3368 # Compact Encoding 3369 geojson = dumps(shape, separators=SEPARATORS) 3370 if geojson: 3371 f = {"type": "Feature", 3372 "properties": {"id": id}, 3373 "geometry": json.loads(geojson), 3374 } 3375 append(f) 3376 3377 if features: 3378 data = {"type": "FeatureCollection", 3379 "features": features, 3380 } 3381 # Output to file 3382 filename = os.path.join(folder, "countries.geojson") 3383 File = open(filename, "w") 3384 File.write(json.dumps(data, separators=SEPARATORS)) 3385 File.close() 3386 3387 q1 = (table.level == "L1") & \ 3388 (table.deleted != True) & \ 3389 (table.end_date == None) 3390 q2 = (table.level == "L2") & \ 3391 (table.deleted != True) & \ 3392 (table.end_date == None) 3393 q3 = (table.level == "L3") & \ 3394 (table.deleted != True) & \ 3395 (table.end_date == None) 3396 q4 = (table.level == "L4") & \ 3397 (table.deleted != True) & \ 3398 (table.end_date == None) 3399 3400 if "L1" in levels: 3401 if "L0" not in levels: 3402 countries = db(cquery).select(ifield) 3403 if simplify: 3404 # We want greater precision when zoomed-in more 3405 simplify = simplify / 2 # 0.005 with default setting 3406 if spatial: 3407 field = _field.st_simplify(simplify).st_asgeojson(precision=precision).with_alias("geojson") 3408 for country in countries: 3409 if not spatial or "L0" not in levels: 3410 _id = country.id 3411 else: 3412 _id = country["gis_location"].id 3413 query = q1 & (table.parent == _id) 3414 features = [] 3415 append = features.append 3416 rows = db(query).select(ifield, 3417 field) 3418 for row in rows: 3419 if spatial: 3420 id = row["gis_location"].id 3421 geojson = row.geojson 3422 elif simplify: 3423 id = row.id 3424 wkt = row.wkt 3425 if wkt: 3426 geojson = _simplify(wkt, tolerance=simplify, 3427 precision=precision, 3428 output="geojson") 3429 else: 3430 name = db(table.id == id).select(table.name, 3431 limitby=(0, 1)).first().name 3432 sys.stderr.write("No WKT: L1 %s %s\n" % (name, id)) 3433 continue 3434 else: 3435 id = row.id 3436 shape = wkt_loads(row.wkt) 3437 # Compact Encoding 3438 geojson = dumps(shape, separators=SEPARATORS) 3439 if geojson: 3440 f = {"type": "Feature", 3441 "properties": {"id": id}, 3442 "geometry": json.loads(geojson) 3443 } 3444 append(f) 3445 3446 if features: 3447 data = {"type": "FeatureCollection", 3448 "features": features 3449 } 3450 # Output to file 3451 filename = os.path.join(folder, "1_%s.geojson" % _id) 3452 File = open(filename, "w") 3453 File.write(json.dumps(data, separators=SEPARATORS)) 3454 File.close() 3455 else: 3456 current.log.debug("No L1 features in %s" % _id) 3457 3458 if "L2" in levels: 3459 if "L0" not in levels and "L1" not in levels: 3460 countries = db(cquery).select(ifield) 3461 if simplify: 3462 # We want greater precision when zoomed-in more 3463 simplify = simplify / 4 # 0.00125 with default setting 3464 if spatial: 3465 field = _field.st_simplify(simplify).st_asgeojson(precision=precision).with_alias("geojson") 3466 for country in countries: 3467 if not spatial or "L0" not in levels: 3468 id = country.id 3469 else: 3470 id = country["gis_location"].id 3471 query = q1 & (table.parent == id) 3472 l1s = db(query).select(ifield) 3473 for l1 in l1s: 3474 query = q2 & (table.parent == l1.id) 3475 features = [] 3476 append = features.append 3477 rows = db(query).select(ifield, 3478 field) 3479 for row in rows: 3480 if spatial: 3481 id = row["gis_location"].id 3482 geojson = row.geojson 3483 elif simplify: 3484 id = row.id 3485 wkt = row.wkt 3486 if wkt: 3487 geojson = _simplify(wkt, tolerance=simplify, 3488 precision=precision, 3489 output="geojson") 3490 else: 3491 name = db(table.id == id).select(table.name, 3492 limitby=(0, 1)).first().name 3493 sys.stderr.write("No WKT: L2 %s %s\n" % (name, id)) 3494 continue 3495 else: 3496 id = row.id 3497 shape = wkt_loads(row.wkt) 3498 # Compact Encoding 3499 geojson = dumps(shape, separators=SEPARATORS) 3500 if geojson: 3501 f = {"type": "Feature", 3502 "properties": {"id": id}, 3503 "geometry": json.loads(geojson), 3504 } 3505 append(f) 3506 3507 if features: 3508 data = {"type": "FeatureCollection", 3509 "features": features, 3510 } 3511 # Output to file 3512 filename = os.path.join(folder, "2_%s.geojson" % l1.id) 3513 File = open(filename, "w") 3514 File.write(json.dumps(data, separators=SEPARATORS)) 3515 File.close() 3516 else: 3517 current.log.debug("No L2 features in %s" % l1.id) 3518 3519 if "L3" in levels: 3520 if "L0" not in levels and "L1" not in levels and "L2" not in levels: 3521 countries = db(cquery).select(ifield) 3522 if simplify: 3523 # We want greater precision when zoomed-in more 3524 simplify = simplify / 2 # 0.000625 with default setting 3525 if spatial: 3526 field = _field.st_simplify(simplify).st_asgeojson(precision=precision).with_alias("geojson") 3527 for country in countries: 3528 if not spatial or "L0" not in levels: 3529 id = country.id 3530 else: 3531 id = country["gis_location"].id 3532 query = q1 & (table.parent == id) 3533 l1s = db(query).select(ifield) 3534 for l1 in l1s: 3535 query = q2 & (table.parent == l1.id) 3536 l2s = db(query).select(ifield) 3537 for l2 in l2s: 3538 query = q3 & (table.parent == l2.id) 3539 features = [] 3540 append = features.append 3541 rows = db(query).select(ifield, 3542 field) 3543 for row in rows: 3544 if spatial: 3545 id = row["gis_location"].id 3546 geojson = row.geojson 3547 elif simplify: 3548 id = row.id 3549 wkt = row.wkt 3550 if wkt: 3551 geojson = _simplify(wkt, tolerance=simplify, 3552 precision=precision, 3553 output="geojson") 3554 else: 3555 name = db(table.id == id).select(table.name, 3556 limitby=(0, 1)).first().name 3557 sys.stderr.write("No WKT: L3 %s %s\n" % (name, id)) 3558 continue 3559 else: 3560 id = row.id 3561 shape = wkt_loads(row.wkt) 3562 # Compact Encoding 3563 geojson = dumps(shape, separators=SEPARATORS) 3564 if geojson: 3565 f = {"type": "Feature", 3566 "properties": {"id": id}, 3567 "geometry": json.loads(geojson), 3568 } 3569 append(f) 3570 3571 if features: 3572 data = {"type": "FeatureCollection", 3573 "features": features, 3574 } 3575 # Output to file 3576 filename = os.path.join(folder, "3_%s.geojson" % l2.id) 3577 File = open(filename, "w") 3578 File.write(json.dumps(data, separators=SEPARATORS)) 3579 File.close() 3580 else: 3581 current.log.debug("No L3 features in %s" % l2.id) 3582 3583 if "L4" in levels: 3584 if "L0" not in levels and "L1" not in levels and "L2" not in levels and "L3" not in levels: 3585 countries = db(cquery).select(ifield) 3586 if simplify: 3587 # We want greater precision when zoomed-in more 3588 simplify = simplify / 2 # 0.0003125 with default setting 3589 if spatial: 3590 field = _field.st_simplify(simplify).st_asgeojson(precision=precision).with_alias("geojson") 3591 for country in countries: 3592 if not spatial or "L0" not in levels: 3593 id = country.id 3594 else: 3595 id = country["gis_location"].id 3596 query = q1 & (table.parent == id) 3597 l1s = db(query).select(ifield) 3598 for l1 in l1s: 3599 query = q2 & (table.parent == l1.id) 3600 l2s = db(query).select(ifield) 3601 for l2 in l2s: 3602 query = q3 & (table.parent == l2.id) 3603 l3s = db(query).select(ifield) 3604 for l3 in l3s: 3605 query = q4 & (table.parent == l3.id) 3606 features = [] 3607 append = features.append 3608 rows = db(query).select(ifield, 3609 field) 3610 for row in rows: 3611 if spatial: 3612 id = row["gis_location"].id 3613 geojson = row.geojson 3614 elif simplify: 3615 id = row.id 3616 wkt = row.wkt 3617 if wkt: 3618 geojson = _simplify(wkt, tolerance=simplify, 3619 precision=precision, 3620 output="geojson") 3621 else: 3622 name = db(table.id == id).select(table.name, 3623 limitby=(0, 1)).first().name 3624 sys.stderr.write("No WKT: L4 %s %s\n" % (name, id)) 3625 continue 3626 else: 3627 id = row.id 3628 shape = wkt_loads(row.wkt) 3629 # Compact Encoding 3630 geojson = dumps(shape, separators=SEPARATORS) 3631 if geojson: 3632 f = {"type": "Feature", 3633 "properties": {"id": id}, 3634 "geometry": json.loads(geojson), 3635 } 3636 append(f) 3637 3638 if features: 3639 data = {"type": "FeatureCollection", 3640 "features": features, 3641 } 3642 # Output to file 3643 filename = os.path.join(folder, "4_%s.geojson" % l3.id) 3644 File = open(filename, "w") 3645 File.write(json.dumps(data, separators=SEPARATORS)) 3646 File.close() 3647 else: 3648 current.log.debug("No L4 features in %s" % l3.id)
3649 3650 # -------------------------------------------------------------------------
3651 - def import_admin_areas(self, 3652 source="gadmv1", 3653 countries=[], 3654 levels=["L0", "L1", "L2"] 3655 ):
3656 """ 3657 Import Admin Boundaries into the Locations table 3658 3659 @param source - Source to get the data from. 3660 Currently only GADM is supported: http://gadm.org 3661 @param countries - List of ISO2 countrycodes to download data for 3662 defaults to all countries 3663 @param levels - Which levels of the hierarchy to import. 3664 defaults to all 3 supported levels 3665 """ 3666 3667 if source == "gadmv1": 3668 try: 3669 from osgeo import ogr 3670 except: 3671 current.log.error("Unable to import ogr. Please install python-gdal bindings: GDAL-1.8.1+") 3672 return 3673 3674 if "L0" in levels: 3675 self.import_gadm1_L0(ogr, countries=countries) 3676 if "L1" in levels: 3677 self.import_gadm1(ogr, "L1", countries=countries) 3678 if "L2" in levels: 3679 self.import_gadm1(ogr, "L2", countries=countries) 3680 3681 current.log.debug("All done!") 3682 3683 elif source == "gadmv1": 3684 try: 3685 from osgeo import ogr 3686 except: 3687 current.log.error("Unable to import ogr. Please install python-gdal bindings: GDAL-1.8.1+") 3688 return 3689 3690 if "L0" in levels: 3691 self.import_gadm2(ogr, "L0", countries=countries) 3692 if "L1" in levels: 3693 self.import_gadm2(ogr, "L1", countries=countries) 3694 if "L2" in levels: 3695 self.import_gadm2(ogr, "L2", countries=countries) 3696 3697 current.log.debug("All done!") 3698 3699 else: 3700 current.log.warning("Only GADM is currently supported") 3701 return 3702 3703 return
3704 3705 # ------------------------------------------------------------------------- 3706 @staticmethod
3707 - def import_gadm1_L0(ogr, countries=[]):
3708 """ 3709 Import L0 Admin Boundaries into the Locations table from GADMv1 3710 - designed to be called from import_admin_areas() 3711 - assumes that basic prepop has been done, so that no new records need to be created 3712 3713 @param ogr - The OGR Python module 3714 @param countries - List of ISO2 countrycodes to download data for 3715 defaults to all countries 3716 """ 3717 3718 db = current.db 3719 s3db = current.s3db 3720 ttable = s3db.gis_location_tag 3721 table = db.gis_location 3722 3723 layer = { 3724 "url" : "http://gadm.org/data/gadm_v1_lev0_shp.zip", 3725 "zipfile" : "gadm_v1_lev0_shp.zip", 3726 "shapefile" : "gadm1_lev0", 3727 "codefield" : "ISO2", # This field is used to uniquely identify the L0 for updates 3728 "code2field" : "ISO" # This field is used to uniquely identify the L0 for parenting the L1s 3729 } 3730 3731 # Copy the current working directory to revert back to later 3732 cwd = os.getcwd() 3733 3734 # Create the working directory 3735 TEMP = os.path.join(cwd, "temp") 3736 if not os.path.exists(TEMP): # use web2py/temp/GADMv1 as a cache 3737 import tempfile 3738 TEMP = tempfile.gettempdir() 3739 tempPath = os.path.join(TEMP, "GADMv1") 3740 if not os.path.exists(tempPath): 3741 try: 3742 os.mkdir(tempPath) 3743 except OSError: 3744 current.log.error("Unable to create temp folder %s!" % tempPath) 3745 return 3746 3747 # Set the current working directory 3748 os.chdir(tempPath) 3749 3750 layerName = layer["shapefile"] 3751 3752 # Check if file has already been downloaded 3753 fileName = layer["zipfile"] 3754 if not os.path.isfile(fileName): 3755 # Download the file 3756 from gluon.tools import fetch 3757 url = layer["url"] 3758 current.log.debug("Downloading %s" % url) 3759 try: 3760 file = fetch(url) 3761 except urllib2.URLError, exception: 3762 current.log.error(exception) 3763 return 3764 fp = StringIO(file) 3765 else: 3766 current.log.debug("Using existing file %s" % fileName) 3767 fp = open(fileName) 3768 3769 # Unzip it 3770 current.log.debug("Unzipping %s" % layerName) 3771 import zipfile 3772 myfile = zipfile.ZipFile(fp) 3773 for ext in ("dbf", "prj", "sbn", "sbx", "shp", "shx"): 3774 fileName = "%s.%s" % (layerName, ext) 3775 file = myfile.read(fileName) 3776 f = open(fileName, "w") 3777 f.write(file) 3778 f.close() 3779 myfile.close() 3780 3781 # Use OGR to read Shapefile 3782 current.log.debug("Opening %s.shp" % layerName) 3783 ds = ogr.Open("%s.shp" % layerName) 3784 if ds is None: 3785 current.log.error("Open failed.\n") 3786 return 3787 3788 lyr = ds.GetLayerByName(layerName) 3789 3790 lyr.ResetReading() 3791 3792 codeField = layer["codefield"] 3793 code2Field = layer["code2field"] 3794 for feat in lyr: 3795 code = feat.GetField(codeField) 3796 if not code: 3797 # Skip the entries which aren't countries 3798 continue 3799 if countries and code not in countries: 3800 # Skip the countries which we're not interested in 3801 continue 3802 3803 geom = feat.GetGeometryRef() 3804 if geom is not None: 3805 if geom.GetGeometryType() == ogr.wkbPoint: 3806 pass 3807 else: 3808 query = (table.id == ttable.location_id) & \ 3809 (ttable.tag == "ISO2") & \ 3810 (ttable.value == code) 3811 wkt = geom.ExportToWkt() 3812 if wkt.startswith("LINESTRING"): 3813 gis_feature_type = 2 3814 elif wkt.startswith("POLYGON"): 3815 gis_feature_type = 3 3816 elif wkt.startswith("MULTIPOINT"): 3817 gis_feature_type = 4 3818 elif wkt.startswith("MULTILINESTRING"): 3819 gis_feature_type = 5 3820 elif wkt.startswith("MULTIPOLYGON"): 3821 gis_feature_type = 6 3822 elif wkt.startswith("GEOMETRYCOLLECTION"): 3823 gis_feature_type = 7 3824 code2 = feat.GetField(code2Field) 3825 #area = feat.GetField("Shape_Area") 3826 try: 3827 id = db(query).select(table.id, 3828 limitby=(0, 1)).first().id 3829 query = (table.id == id) 3830 db(query).update(gis_feature_type=gis_feature_type, 3831 wkt=wkt) 3832 ttable.insert(location_id = id, 3833 tag = "ISO3", 3834 value = code2) 3835 #ttable.insert(location_id = location_id, 3836 # tag = "area", 3837 # value = area) 3838 except db._adapter.driver.OperationalError, e: 3839 current.log.error(sys.exc_info[1]) 3840 3841 else: 3842 current.log.debug("No geometry\n") 3843 3844 # Close the shapefile 3845 ds.Destroy() 3846 3847 db.commit() 3848 3849 # Revert back to the working directory as before. 3850 os.chdir(cwd) 3851 3852 return
3853 3854 # -------------------------------------------------------------------------
3855 - def import_gadm1(self, ogr, level="L1", countries=[]):
3856 """ 3857 Import L1 Admin Boundaries into the Locations table from GADMv1 3858 - designed to be called from import_admin_areas() 3859 - assumes a fresh database with just Countries imported 3860 3861 @param ogr - The OGR Python module 3862 @param level - "L1" or "L2" 3863 @param countries - List of ISO2 countrycodes to download data for 3864 defaults to all countries 3865 """ 3866 3867 if level == "L1": 3868 layer = { 3869 "url" : "http://gadm.org/data/gadm_v1_lev1_shp.zip", 3870 "zipfile" : "gadm_v1_lev1_shp.zip", 3871 "shapefile" : "gadm1_lev1", 3872 "namefield" : "NAME_1", 3873 # Uniquely identify the L1 for updates 3874 "sourceCodeField" : "ID_1", 3875 "edenCodeField" : "GADM1", 3876 # Uniquely identify the L0 for parenting the L1s 3877 "parent" : "L0", 3878 "parentSourceCodeField" : "ISO", 3879 "parentEdenCodeField" : "ISO3", 3880 } 3881 elif level == "L2": 3882 layer = { 3883 "url" : "http://biogeo.ucdavis.edu/data/gadm/gadm_v1_lev2_shp.zip", 3884 "zipfile" : "gadm_v1_lev2_shp.zip", 3885 "shapefile" : "gadm_v1_lev2", 3886 "namefield" : "NAME_2", 3887 # Uniquely identify the L2 for updates 3888 "sourceCodeField" : "ID_2", 3889 "edenCodeField" : "GADM2", 3890 # Uniquely identify the L0 for parenting the L1s 3891 "parent" : "L1", 3892 "parentSourceCodeField" : "ID_1", 3893 "parentEdenCodeField" : "GADM1", 3894 } 3895 else: 3896 current.log.warning("Level %s not supported!" % level) 3897 return 3898 3899 import csv 3900 import shutil 3901 import zipfile 3902 3903 db = current.db 3904 s3db = current.s3db 3905 cache = s3db.cache 3906 table = s3db.gis_location 3907 ttable = s3db.gis_location_tag 3908 3909 csv.field_size_limit(2**20 * 100) # 100 megs 3910 3911 # Not all the data is encoded like this 3912 # (unable to determine encoding - appears to be damaged in source): 3913 # Azerbaijan L1 3914 # Vietnam L1 & L2 3915 ENCODING = "cp1251" 3916 3917 # from http://docs.python.org/library/csv.html#csv-examples 3918 def latin_csv_reader(unicode_csv_data, dialect=csv.excel, **kwargs): 3919 for row in csv.reader(unicode_csv_data): 3920 yield [unicode(cell, ENCODING) for cell in row]
3921 3922 def latin_dict_reader(data, dialect=csv.excel, **kwargs): 3923 reader = latin_csv_reader(data, dialect=dialect, **kwargs) 3924 headers = reader.next() 3925 for r in reader: 3926 yield dict(zip(headers, r)) 3927 3928 # Copy the current working directory to revert back to later 3929 cwd = os.getcwd() 3930 3931 # Create the working directory 3932 TEMP = os.path.join(cwd, "temp") 3933 if not os.path.exists(TEMP): # use web2py/temp/GADMv1 as a cache 3934 import tempfile 3935 TEMP = tempfile.gettempdir() 3936 tempPath = os.path.join(TEMP, "GADMv1") 3937 if not os.path.exists(tempPath): 3938 try: 3939 os.mkdir(tempPath) 3940 except OSError: 3941 current.log.error("Unable to create temp folder %s!" % tempPath) 3942 return 3943 3944 # Set the current working directory 3945 os.chdir(tempPath) 3946 3947 # Remove any existing CSV folder to allow the new one to be created 3948 try: 3949 shutil.rmtree("CSV") 3950 except OSError: 3951 # Folder doesn't exist, so should be creatable 3952 pass 3953 3954 layerName = layer["shapefile"] 3955 3956 # Check if file has already been downloaded 3957 fileName = layer["zipfile"] 3958 if not os.path.isfile(fileName): 3959 # Download the file 3960 from gluon.tools import fetch 3961 url = layer["url"] 3962 current.log.debug("Downloading %s" % url) 3963 try: 3964 file = fetch(url) 3965 except urllib2.URLError, exception: 3966 current.log.error(exception) 3967 # Revert back to the working directory as before. 3968 os.chdir(cwd) 3969 return 3970 fp = StringIO(file) 3971 else: 3972 current.log.debug("Using existing file %s" % fileName) 3973 fp = open(fileName) 3974 3975 # Unzip it 3976 current.log.debug("Unzipping %s" % layerName) 3977 myfile = zipfile.ZipFile(fp) 3978 for ext in ("dbf", "prj", "sbn", "sbx", "shp", "shx"): 3979 fileName = "%s.%s" % (layerName, ext) 3980 file = myfile.read(fileName) 3981 f = open(fileName, "w") 3982 f.write(file) 3983 f.close() 3984 myfile.close() 3985 3986 # Convert to CSV 3987 current.log.debug("Converting %s.shp to CSV" % layerName) 3988 # Simplified version of generic Shapefile Importer: 3989 # http://svn.osgeo.org/gdal/trunk/gdal/swig/python/samples/ogr2ogr.py 3990 bSkipFailures = False 3991 nGroupTransactions = 200 3992 nFIDToFetch = ogr.NullFID 3993 inputFileName = "%s.shp" % layerName 3994 inputDS = ogr.Open(inputFileName, False) 3995 outputFileName = "CSV" 3996 outputDriver = ogr.GetDriverByName("CSV") 3997 outputDS = outputDriver.CreateDataSource(outputFileName, options=[]) 3998 # GADM only has 1 layer/source 3999 inputLayer = inputDS.GetLayer(0) 4000 inputFDefn = inputLayer.GetLayerDefn() 4001 # Create the output Layer 4002 outputLayer = outputDS.CreateLayer(layerName) 4003 # Copy all Fields 4004 #papszFieldTypesToString = [] 4005 inputFieldCount = inputFDefn.GetFieldCount() 4006 panMap = [-1 for i in range(inputFieldCount)] 4007 outputFDefn = outputLayer.GetLayerDefn() 4008 nDstFieldCount = 0 4009 if outputFDefn is not None: 4010 nDstFieldCount = outputFDefn.GetFieldCount() 4011 for iField in range(inputFieldCount): 4012 inputFieldDefn = inputFDefn.GetFieldDefn(iField) 4013 oFieldDefn = ogr.FieldDefn(inputFieldDefn.GetNameRef(), 4014 inputFieldDefn.GetType()) 4015 oFieldDefn.SetWidth(inputFieldDefn.GetWidth()) 4016 oFieldDefn.SetPrecision(inputFieldDefn.GetPrecision()) 4017 # The field may have been already created at layer creation 4018 iDstField = -1; 4019 if outputFDefn is not None: 4020 iDstField = outputFDefn.GetFieldIndex(oFieldDefn.GetNameRef()) 4021 if iDstField >= 0: 4022 panMap[iField] = iDstField 4023 elif outputLayer.CreateField(oFieldDefn) == 0: 4024 # now that we've created a field, GetLayerDefn() won't return NULL 4025 if outputFDefn is None: 4026 outputFDefn = outputLayer.GetLayerDefn() 4027 panMap[iField] = nDstFieldCount 4028 nDstFieldCount = nDstFieldCount + 1 4029 # Transfer features 4030 nFeaturesInTransaction = 0 4031 #iSrcZField = -1 4032 inputLayer.ResetReading() 4033 if nGroupTransactions > 0: 4034 outputLayer.StartTransaction() 4035 while True: 4036 poDstFeature = None 4037 if nFIDToFetch != ogr.NullFID: 4038 # Only fetch feature on first pass. 4039 if nFeaturesInTransaction == 0: 4040 poFeature = inputLayer.GetFeature(nFIDToFetch) 4041 else: 4042 poFeature = None 4043 else: 4044 poFeature = inputLayer.GetNextFeature() 4045 if poFeature is None: 4046 break 4047 nParts = 0 4048 nIters = 1 4049 for iPart in range(nIters): 4050 nFeaturesInTransaction = nFeaturesInTransaction + 1 4051 if nFeaturesInTransaction == nGroupTransactions: 4052 outputLayer.CommitTransaction() 4053 outputLayer.StartTransaction() 4054 nFeaturesInTransaction = 0 4055 poDstFeature = ogr.Feature(outputLayer.GetLayerDefn()) 4056 if poDstFeature.SetFromWithMap(poFeature, 1, panMap) != 0: 4057 if nGroupTransactions > 0: 4058 outputLayer.CommitTransaction() 4059 current.log.error("Unable to translate feature %d from layer %s" % \ 4060 (poFeature.GetFID(), inputFDefn.GetName())) 4061 # Revert back to the working directory as before. 4062 os.chdir(cwd) 4063 return 4064 poDstGeometry = poDstFeature.GetGeometryRef() 4065 if poDstGeometry is not None: 4066 if nParts > 0: 4067 # For -explodecollections, extract the iPart(th) of the geometry 4068 poPart = poDstGeometry.GetGeometryRef(iPart).Clone() 4069 poDstFeature.SetGeometryDirectly(poPart) 4070 poDstGeometry = poPart 4071 if outputLayer.CreateFeature(poDstFeature) != 0 and \ 4072 not bSkipFailures: 4073 if nGroupTransactions > 0: 4074 outputLayer.RollbackTransaction() 4075 # Revert back to the working directory as before. 4076 os.chdir(cwd) 4077 return 4078 if nGroupTransactions > 0: 4079 outputLayer.CommitTransaction() 4080 # Cleanup 4081 outputDS.Destroy() 4082 inputDS.Destroy() 4083 4084 fileName = "%s.csv" % layerName 4085 filePath = os.path.join("CSV", fileName) 4086 os.rename(filePath, fileName) 4087 os.removedirs("CSV") 4088 4089 # Use OGR to read SHP for geometry 4090 current.log.debug("Opening %s.shp" % layerName) 4091 ds = ogr.Open("%s.shp" % layerName) 4092 if ds is None: 4093 current.log.debug("Open failed.\n") 4094 # Revert back to the working directory as before. 4095 os.chdir(cwd) 4096 return 4097 4098 lyr = ds.GetLayerByName(layerName) 4099 4100 lyr.ResetReading() 4101 4102 # Use CSV for Name 4103 current.log.debug("Opening %s.csv" % layerName) 4104 rows = latin_dict_reader(open("%s.csv" % layerName)) 4105 4106 nameField = layer["namefield"] 4107 sourceCodeField = layer["sourceCodeField"] 4108 edenCodeField = layer["edenCodeField"] 4109 parentSourceCodeField = layer["parentSourceCodeField"] 4110 parentLevel = layer["parent"] 4111 parentEdenCodeField = layer["parentEdenCodeField"] 4112 parentCodeQuery = (ttable.tag == parentEdenCodeField) 4113 count = 0 4114 for row in rows: 4115 # Read Attributes 4116 feat = lyr[count] 4117 4118 parentCode = feat.GetField(parentSourceCodeField) 4119 query = (table.level == parentLevel) & \ 4120 parentCodeQuery & \ 4121 (ttable.value == parentCode) 4122 parent = db(query).select(table.id, 4123 ttable.value, 4124 limitby=(0, 1), 4125 cache=cache).first() 4126 if not parent: 4127 # Skip locations for which we don't have a valid parent 4128 current.log.warning("Skipping - cannot find parent with key: %s, value: %s" % \ 4129 (parentEdenCodeField, parentCode)) 4130 count += 1 4131 continue 4132 4133 if countries: 4134 # Skip the countries which we're not interested in 4135 if level == "L1": 4136 if parent["gis_location_tag"].value not in countries: 4137 #current.log.warning("Skipping %s as not in countries list" % parent["gis_location_tag"].value) 4138 count += 1 4139 continue 4140 else: 4141 # Check grandparent 4142 country = self.get_parent_country(parent.id, 4143 key_type="code") 4144 if country not in countries: 4145 count += 1 4146 continue 4147 4148 # This is got from CSV in order to be able to handle the encoding 4149 name = row.pop(nameField) 4150 name.encode("utf8") 4151 4152 code = feat.GetField(sourceCodeField) 4153 #area = feat.GetField("Shape_Area") 4154 4155 geom = feat.GetGeometryRef() 4156 if geom is not None: 4157 if geom.GetGeometryType() == ogr.wkbPoint: 4158 lat = geom.GetX() 4159 lon = geom.GetY() 4160 id = table.insert(name=name, 4161 level=level, 4162 gis_feature_type=1, 4163 lat=lat, 4164 lon=lon, 4165 parent=parent.id) 4166 ttable.insert(location_id = id, 4167 tag = edenCodeField, 4168 value = code) 4169 # ttable.insert(location_id = id, 4170 # tag = "area", 4171 # value = area) 4172 else: 4173 wkt = geom.ExportToWkt() 4174 if wkt.startswith("LINESTRING"): 4175 gis_feature_type = 2 4176 elif wkt.startswith("POLYGON"): 4177 gis_feature_type = 3 4178 elif wkt.startswith("MULTIPOINT"): 4179 gis_feature_type = 4 4180 elif wkt.startswith("MULTILINESTRING"): 4181 gis_feature_type = 5 4182 elif wkt.startswith("MULTIPOLYGON"): 4183 gis_feature_type = 6 4184 elif wkt.startswith("GEOMETRYCOLLECTION"): 4185 gis_feature_type = 7 4186 id = table.insert(name=name, 4187 level=level, 4188 gis_feature_type=gis_feature_type, 4189 wkt=wkt, 4190 parent=parent.id) 4191 ttable.insert(location_id = id, 4192 tag = edenCodeField, 4193 value = code) 4194 # ttable.insert(location_id = id, 4195 # tag = "area", 4196 # value = area) 4197 else: 4198 current.log.debug("No geometry\n") 4199 4200 count += 1 4201 4202 # Close the shapefile 4203 ds.Destroy() 4204 4205 db.commit() 4206 4207 current.log.debug("Updating Location Tree...") 4208 try: 4209 self.update_location_tree() 4210 except MemoryError: 4211 # If doing all L2s, it can break memory limits 4212 # @ToDo: Check now that we're doing by level 4213 current.log.critical("Memory error when trying to update_location_tree()!") 4214 4215 db.commit() 4216 4217 # Revert back to the working directory as before. 4218 os.chdir(cwd) 4219 4220 return 4221 4222 # ------------------------------------------------------------------------- 4223 @staticmethod
4224 - def import_gadm2(ogr, level="L0", countries=[]):
4225 """ 4226 Import Admin Boundaries into the Locations table from GADMv2 4227 - designed to be called from import_admin_areas() 4228 - assumes that basic prepop has been done, so that no new L0 records need to be created 4229 4230 @param ogr - The OGR Python module 4231 @param level - The OGR Python module 4232 @param countries - List of ISO2 countrycodes to download data for 4233 defaults to all countries 4234 4235 @ToDo: Complete this 4236 - not currently possible to get all data from the 1 file easily 4237 - no ISO2 4238 - needs updating for gis_location_tag model 4239 - only the lowest available levels accessible 4240 - use GADMv1 for L0, L1, L2 & GADMv2 for specific lower? 4241 """ 4242 4243 if level == "L0": 4244 codeField = "ISO2" # This field is used to uniquely identify the L0 for updates 4245 code2Field = "ISO" # This field is used to uniquely identify the L0 for parenting the L1s 4246 elif level == "L1": 4247 #nameField = "NAME_1" 4248 codeField = "ID_1" # This field is used to uniquely identify the L1 for updates 4249 code2Field = "ISO" # This field is used to uniquely identify the L0 for parenting the L1s 4250 #parent = "L0" 4251 #parentCode = "code2" 4252 elif level == "L2": 4253 #nameField = "NAME_2" 4254 codeField = "ID_2" # This field is used to uniquely identify the L2 for updates 4255 code2Field = "ID_1" # This field is used to uniquely identify the L1 for parenting the L2s 4256 #parent = "L1" 4257 #parentCode = "code" 4258 else: 4259 current.log.error("Level %s not supported!" % level) 4260 return 4261 4262 db = current.db 4263 s3db = current.s3db 4264 table = s3db.gis_location 4265 4266 url = "http://gadm.org/data2/gadm_v2_shp.zip" 4267 zipfile = "gadm_v2_shp.zip" 4268 shapefile = "gadm2" 4269 4270 # Copy the current working directory to revert back to later 4271 old_working_directory = os.getcwd() 4272 4273 # Create the working directory 4274 if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp/GADMv2 as a cache 4275 TEMP = os.path.join(os.getcwd(), "temp") 4276 else: 4277 import tempfile 4278 TEMP = tempfile.gettempdir() 4279 tempPath = os.path.join(TEMP, "GADMv2") 4280 try: 4281 os.mkdir(tempPath) 4282 except OSError: 4283 # Folder already exists - reuse 4284 pass 4285 4286 # Set the current working directory 4287 os.chdir(tempPath) 4288 4289 layerName = shapefile 4290 4291 # Check if file has already been downloaded 4292 fileName = zipfile 4293 if not os.path.isfile(fileName): 4294 # Download the file 4295 from gluon.tools import fetch 4296 current.log.debug("Downloading %s" % url) 4297 try: 4298 file = fetch(url) 4299 except urllib2.URLError, exception: 4300 current.log.error(exception) 4301 return 4302 fp = StringIO(file) 4303 else: 4304 current.log.debug("Using existing file %s" % fileName) 4305 fp = open(fileName) 4306 4307 # Unzip it 4308 current.log.debug("Unzipping %s" % layerName) 4309 import zipfile 4310 myfile = zipfile.ZipFile(fp) 4311 for ext in ("dbf", "prj", "sbn", "sbx", "shp", "shx"): 4312 fileName = "%s.%s" % (layerName, ext) 4313 file = myfile.read(fileName) 4314 f = open(fileName, "w") 4315 f.write(file) 4316 f.close() 4317 myfile.close() 4318 4319 # Use OGR to read Shapefile 4320 current.log.debug("Opening %s.shp" % layerName) 4321 ds = ogr.Open("%s.shp" % layerName) 4322 if ds is None: 4323 current.log.debug("Open failed.\n") 4324 return 4325 4326 lyr = ds.GetLayerByName(layerName) 4327 4328 lyr.ResetReading() 4329 4330 for feat in lyr: 4331 code = feat.GetField(codeField) 4332 if not code: 4333 # Skip the entries which aren't countries 4334 continue 4335 if countries and code not in countries: 4336 # Skip the countries which we're not interested in 4337 continue 4338 4339 geom = feat.GetGeometryRef() 4340 if geom is not None: 4341 if geom.GetGeometryType() == ogr.wkbPoint: 4342 pass 4343 else: 4344 ## FIXME 4345 ##query = (table.code == code) 4346 wkt = geom.ExportToWkt() 4347 if wkt.startswith("LINESTRING"): 4348 gis_feature_type = 2 4349 elif wkt.startswith("POLYGON"): 4350 gis_feature_type = 3 4351 elif wkt.startswith("MULTIPOINT"): 4352 gis_feature_type = 4 4353 elif wkt.startswith("MULTILINESTRING"): 4354 gis_feature_type = 5 4355 elif wkt.startswith("MULTIPOLYGON"): 4356 gis_feature_type = 6 4357 elif wkt.startswith("GEOMETRYCOLLECTION"): 4358 gis_feature_type = 7 4359 #code2 = feat.GetField(code2Field) 4360 #area = feat.GetField("Shape_Area") 4361 try: 4362 ## FIXME 4363 db(query).update(gis_feature_type=gis_feature_type, 4364 wkt=wkt) 4365 #code2=code2, 4366 #area=area 4367 except db._adapter.driver.OperationalError, exception: 4368 current.log.error(exception) 4369 4370 else: 4371 current.log.debug("No geometry\n") 4372 4373 # Close the shapefile 4374 ds.Destroy() 4375 4376 db.commit() 4377 4378 # Revert back to the working directory as before. 4379 os.chdir(old_working_directory) 4380 4381 return
4382 4383 # -------------------------------------------------------------------------
4384 - def import_geonames(self, country, level=None):
4385 """ 4386 Import Locations from the Geonames database 4387 4388 @param country: the 2-letter country code 4389 @param level: the ADM level to import 4390 4391 Designed to be run from the CLI 4392 Levels should be imported sequentially. 4393 It is assumed that L0 exists in the DB already 4394 L1-L3 may have been imported from Shapefiles with Polygon info 4395 Geonames can then be used to populate the lower levels of hierarchy 4396 """ 4397 4398 import codecs 4399 4400 from shapely.geometry import point 4401 from shapely.geos import ReadingError 4402 from shapely.wkt import loads as wkt_loads 4403 4404 try: 4405 # Enable C-based speedups available from 1.2.10+ 4406 from shapely import speedups 4407 speedups.enable() 4408 except: 4409 current.log.info("S3GIS", 4410 "Upgrade Shapely for Performance enhancements") 4411 4412 db = current.db 4413 s3db = current.s3db 4414 #cache = s3db.cache 4415 request = current.request 4416 #settings = current.deployment_settings 4417 table = s3db.gis_location 4418 ttable = s3db.gis_location_tag 4419 4420 url = "http://download.geonames.org/export/dump/" + country + ".zip" 4421 4422 cachepath = os.path.join(request.folder, "cache") 4423 filename = country + ".txt" 4424 filepath = os.path.join(cachepath, filename) 4425 if os.access(filepath, os.R_OK): 4426 cached = True 4427 else: 4428 cached = False 4429 if not os.access(cachepath, os.W_OK): 4430 current.log.error("Folder not writable", cachepath) 4431 return 4432 4433 if not cached: 4434 # Download File 4435 from gluon.tools import fetch 4436 try: 4437 f = fetch(url) 4438 except (urllib2.URLError,): 4439 e = sys.exc_info()[1] 4440 current.log.error("URL Error", e) 4441 return 4442 except (urllib2.HTTPError,): 4443 e = sys.exc_info()[1] 4444 current.log.error("HTTP Error", e) 4445 return 4446 4447 # Unzip File 4448 if f[:2] == "PK": 4449 # Unzip 4450 fp = StringIO(f) 4451 import zipfile 4452 myfile = zipfile.ZipFile(fp) 4453 try: 4454 # Python 2.6+ only :/ 4455 # For now, 2.5 users need to download/unzip manually to cache folder 4456 myfile.extract(filename, cachepath) 4457 myfile.close() 4458 except IOError: 4459 current.log.error("Zipfile contents don't seem correct!") 4460 myfile.close() 4461 return 4462 4463 f = codecs.open(filepath, encoding="utf-8") 4464 # Downloaded file is worth keeping 4465 #os.remove(filepath) 4466 4467 if level == "L1": 4468 fc = "ADM1" 4469 parent_level = "L0" 4470 elif level == "L2": 4471 fc = "ADM2" 4472 parent_level = "L1" 4473 elif level == "L3": 4474 fc = "ADM3" 4475 parent_level = "L2" 4476 elif level == "L4": 4477 fc = "ADM4" 4478 parent_level = "L3" 4479 else: 4480 # 5 levels of hierarchy or 4? 4481 # @ToDo make more extensible still 4482 #gis_location_hierarchy = self.get_location_hierarchy() 4483 try: 4484 #label = gis_location_hierarchy["L5"] 4485 level = "L5" 4486 parent_level = "L4" 4487 except: 4488 # ADM4 data in Geonames isn't always good (e.g. PK bad) 4489 level = "L4" 4490 parent_level = "L3" 4491 finally: 4492 fc = "PPL" 4493 4494 deleted = (table.deleted == False) 4495 query = deleted & (table.level == parent_level) 4496 # Do the DB query once (outside loop) 4497 all_parents = db(query).select(table.wkt, 4498 table.lon_min, 4499 table.lon_max, 4500 table.lat_min, 4501 table.lat_max, 4502 table.id) 4503 if not all_parents: 4504 # No locations in the parent level found 4505 # - use the one higher instead 4506 parent_level = "L" + str(int(parent_level[1:]) + 1) 4507 query = deleted & (table.level == parent_level) 4508 all_parents = db(query).select(table.wkt, 4509 table.lon_min, 4510 table.lon_max, 4511 table.lat_min, 4512 table.lat_max, 4513 table.id) 4514 4515 # Parse File 4516 current_row = 0 4517 for line in f: 4518 current_row += 1 4519 # Format of file: http://download.geonames.org/export/dump/readme.txt 4520 geonameid, \ 4521 name, \ 4522 asciiname, \ 4523 alternatenames, \ 4524 lat, \ 4525 lon, \ 4526 feature_class, \ 4527 feature_code, \ 4528 country_code, \ 4529 cc2, \ 4530 admin1_code, \ 4531 admin2_code, \ 4532 admin3_code, \ 4533 admin4_code, \ 4534 population, \ 4535 elevation, \ 4536 gtopo30, \ 4537 timezone, \ 4538 modification_date = line.split("\t") 4539 4540 if feature_code == fc: 4541 # Add WKT 4542 lat = float(lat) 4543 lon = float(lon) 4544 wkt = self.latlon_to_wkt(lat, lon) 4545 4546 shape = point.Point(lon, lat) 4547 4548 # Add Bounds 4549 lon_min = lon_max = lon 4550 lat_min = lat_max = lat 4551 4552 # Locate Parent 4553 parent = "" 4554 # 1st check for Parents whose bounds include this location (faster) 4555 def in_bbox(row): 4556 return (row.lon_min < lon_min) & \ 4557 (row.lon_max > lon_max) & \ 4558 (row.lat_min < lat_min) & \ 4559 (row.lat_max > lat_max)
4560 for row in all_parents.find(lambda row: in_bbox(row)): 4561 # Search within this subset with a full geometry check 4562 # Uses Shapely. 4563 # @ToDo provide option to use PostGIS/Spatialite 4564 try: 4565 parent_shape = wkt_loads(row.wkt) 4566 if parent_shape.intersects(shape): 4567 parent = row.id 4568 # Should be just a single parent 4569 break 4570 except ReadingError: 4571 current.log.error("Error reading wkt of location with id", row.id) 4572 4573 # Add entry to database 4574 new_id = table.insert(name=name, 4575 level=level, 4576 parent=parent, 4577 lat=lat, 4578 lon=lon, 4579 wkt=wkt, 4580 lon_min=lon_min, 4581 lon_max=lon_max, 4582 lat_min=lat_min, 4583 lat_max=lat_max) 4584 ttable.insert(location_id=new_id, 4585 tag="geonames", 4586 value=geonameid) 4587 else: 4588 continue 4589 4590 current.log.debug("All done!") 4591 return 4592 4593 # ------------------------------------------------------------------------- 4594 @staticmethod
4595 - def latlon_to_wkt(lat, lon):
4596 """ 4597 Convert a LatLon to a WKT string 4598 4599 >>> s3gis.latlon_to_wkt(6, 80) 4600 'POINT(80 6)' 4601 """ 4602 WKT = "POINT(%f %f)" % (lon, lat) 4603 return WKT
4604 4605 # ------------------------------------------------------------------------- 4606 @staticmethod
4607 - def parse_location(wkt, lon=None, lat=None):
4608 """ 4609 Parses a location from wkt, returning wkt, lat, lon, bounding box and type. 4610 For points, wkt may be None if lat and lon are provided; wkt will be generated. 4611 For lines and polygons, the lat, lon returned represent the shape's centroid. 4612 Centroid and bounding box will be None if Shapely is not available. 4613 """ 4614 4615 if not wkt: 4616 if not lon is not None and lat is not None: 4617 raise RuntimeError, "Need wkt or lon+lat to parse a location" 4618 wkt = "POINT(%f %f)" % (lon, lat) 4619 geom_type = GEOM_TYPES["point"] 4620 bbox = (lon, lat, lon, lat) 4621 else: 4622 try: 4623 from shapely.wkt import loads as wkt_loads 4624 SHAPELY = True 4625 except: 4626 SHAPELY = False 4627 4628 if SHAPELY: 4629 shape = wkt_loads(wkt) 4630 centroid = shape.centroid 4631 lat = centroid.y 4632 lon = centroid.x 4633 geom_type = GEOM_TYPES[shape.type.lower()] 4634 bbox = shape.bounds 4635 else: 4636 lat = None 4637 lon = None 4638 geom_type = GEOM_TYPES[wkt.split("(")[0].lower()] 4639 bbox = None 4640 4641 res = {"wkt": wkt, "lat": lat, "lon": lon, "gis_feature_type": geom_type} 4642 if bbox: 4643 res["lon_min"], res["lat_min"], res["lon_max"], res["lat_max"] = bbox 4644 4645 return res
4646 4647 # ------------------------------------------------------------------------- 4648 @staticmethod
4649 - def update_location_tree(feature=None, all_locations=False, propagating=False):
4650 """ 4651 Update GIS Locations' Materialized path, Lx locations, Lat/Lon & the_geom 4652 4653 @param feature: a feature dict to update the tree for 4654 - if not provided then update the whole tree 4655 @param all_locations: passed to recursive calls to indicate that this 4656 is an update of the whole tree. Used to avoid repeated attempts to 4657 update hierarchy locations with missing data (e.g. lacking some 4658 ancestor level). 4659 @param propagating: passed to recursive calls to indicate that this 4660 is a propagation update. Used to avoid repeated attempts to 4661 update hierarchy locations with missing data (e.g. lacking some 4662 ancestor level). 4663 4664 returns the path of the feature 4665 4666 Called onaccept for locations (async, where-possible) 4667 """ 4668 4669 # During prepopulate, for efficiency, we don't update the location 4670 # tree, but rather leave that til after prepopulate is complete. 4671 if GIS.disable_update_location_tree: 4672 return None 4673 4674 db = current.db 4675 try: 4676 table = db.gis_location 4677 except: 4678 table = current.s3db.gis_location 4679 update_location_tree = GIS.update_location_tree 4680 wkt_centroid = GIS.wkt_centroid 4681 4682 fields = (table.id, 4683 table.name, 4684 table.level, 4685 table.path, 4686 table.parent, 4687 table.L0, 4688 table.L1, 4689 table.L2, 4690 table.L3, 4691 table.L4, 4692 table.L5, 4693 table.lat, 4694 table.lon, 4695 table.wkt, 4696 table.inherited 4697 ) 4698 4699 # --------------------------------------------------------------------- 4700 def fixup(feature): 4701 """ 4702 Fix all the issues with a Feature, assuming that 4703 - the corrections are in the feature 4704 - or they are Bounds / Centroid / WKT / the_geom issues 4705 """ 4706 4707 form = Storage() 4708 form.vars = form_vars = feature 4709 form.errors = Storage() 4710 if not form_vars.get("wkt"): 4711 # Point 4712 form_vars.update(gis_feature_type="1") 4713 4714 # Calculate Bounds / Centroid / WKT / the_geom 4715 wkt_centroid(form) 4716 4717 if form.errors: 4718 current.log.error("S3GIS: %s" % form.errors) 4719 else: 4720 wkt = form_vars.wkt 4721 if wkt and not wkt.startswith("POI"): 4722 # Polygons aren't inherited 4723 form_vars.update(inherited = False) 4724 if "update_record" in form_vars: 4725 # Must be a Row 4726 new_vars = {} 4727 table_fields = table.fields 4728 for v in form_vars: 4729 if v in table_fields: 4730 new_vars[v] = form_vars[v] 4731 form_vars = new_vars 4732 4733 try: 4734 db(table.id == feature.id).update(**form_vars) 4735 except MemoryError: 4736 current.log.error("S3GIS: Unable to set bounds & centroid for feature %s: MemoryError" % feature.id)
4737 4738 # --------------------------------------------------------------------- 4739 def propagate(parent): 4740 """ 4741 Propagate Lat/Lon down to any Features which inherit from this one 4742 4743 @param parent: gis_location id of parent 4744 """ 4745 4746 # No need to filter out deleted since the parent FK is None for these records 4747 query = (table.parent == parent) & \ 4748 (table.inherited == True) 4749 rows = db(query).select(*fields) 4750 for row in rows: 4751 try: 4752 update_location_tree(row, propagating=True) 4753 except RuntimeError: 4754 current.log.error("Cannot propagate inherited latlon to child %s of location ID %s: too much recursion" % \ 4755 (row.id, parent)) 4756 4757 4758 if not feature: 4759 # We are updating all locations. 4760 all_locations = True 4761 # Do in chunks to save memory and also do in correct order 4762 all_fields = (table.id, table.name, table.gis_feature_type, 4763 table.L0, table.L1, table.L2, table.L3, table.L4, 4764 table.lat, table.lon, table.wkt, table.inherited, 4765 # Handle Countries which start with Bounds set, yet are Points 4766 table.lat_min, table.lon_min, table.lat_max, table.lon_max, 4767 table.path, table.parent) 4768 for level in ("L0", "L1", "L2", "L3", "L4", "L5", None): 4769 query = (table.level == level) & (table.deleted == False) 4770 try: 4771 features = db(query).select(*all_fields) 4772 except MemoryError: 4773 current.log.error("S3GIS: Unable to update Location Tree for level %s: MemoryError" % level) 4774 else: 4775 for feature in features: 4776 feature["level"] = level 4777 wkt = feature["wkt"] 4778 if wkt and not wkt.startswith("POI"): 4779 # Polygons aren't inherited 4780 feature["inherited"] = False 4781 update_location_tree(feature) # all_locations is False here 4782 # All Done! 4783 return 4784 4785 4786 # Single Feature 4787 id = str(feature["id"]) if "id" in feature else None 4788 if not id: 4789 # Nothing we can do 4790 raise ValueError 4791 4792 feature_get = feature.get 4793 4794 # L0 4795 level = feature_get("level", False) 4796 name = feature_get("name", False) 4797 path = feature_get("path", False) 4798 # If we're processing all locations, and this is a hierarchy location, 4799 # and has already been processed (as evidenced by having a path) do not 4800 # process it again. Locations with a gap in their ancestor levels will 4801 # be regarded as missing data and sent through update_location_tree 4802 # recursively, but that missing data will not be filled in after the 4803 # location is processed once during the all-locations call. 4804 if all_locations and path and level: 4805 # This hierarchy location is already finalized. 4806 return path 4807 lat = feature_get("lat", False) 4808 lon = feature_get("lon", False) 4809 wkt = feature_get("wkt", False) 4810 L0 = feature_get("L0", False) 4811 if level == "L0": 4812 if name is False or path is False or lat is False or lon is False or \ 4813 wkt is False or L0 is False: 4814 # Get the whole feature 4815 feature = db(table.id == id).select(table.id, 4816 table.name, 4817 table.path, 4818 table.lat, 4819 table.lon, 4820 table.wkt, 4821 table.L0, 4822 limitby=(0, 1)).first() 4823 name = feature.name 4824 path = feature.path 4825 lat = feature.lat 4826 lon = feature.lon 4827 wkt = feature.wkt 4828 L0 = feature.L0 4829 4830 if path != id or L0 != name or not wkt or lat is None: 4831 # Fix everything up 4832 path = id 4833 if lat is False: 4834 lat = None 4835 if lon is False: 4836 lon = None 4837 fix_vars = {"inherited": False, 4838 "path": path, 4839 "lat": lat, 4840 "lon": lon, 4841 "wkt": wkt or None, 4842 "L0": name, 4843 "L1": None, 4844 "L2": None, 4845 "L3": None, 4846 "L4": None, 4847 "L5": None, 4848 } 4849 feature.update(**fix_vars) 4850 fixup(feature) 4851 4852 if not all_locations: 4853 # Ensure that any locations which inherit their latlon from this one get updated 4854 propagate(id) 4855 4856 return path 4857 4858 4859 fixup_required = False 4860 4861 # L1 4862 inherited = feature_get("inherited", None) 4863 parent = feature_get("parent", False) 4864 L1 = feature_get("L1", False) 4865 if level == "L1": 4866 if inherited is None or name is False or parent is False or path is False or \ 4867 lat is False or lon is False or wkt is False or \ 4868 L0 is False or L1 is False: 4869 # Get the whole feature 4870 feature = db(table.id == id).select(table.id, 4871 table.inherited, 4872 table.name, 4873 table.parent, 4874 table.path, 4875 table.lat, 4876 table.lon, 4877 table.wkt, 4878 table.L0, 4879 table.L1, 4880 limitby=(0, 1)).first() 4881 inherited = feature.inherited 4882 name = feature.name 4883 parent = feature.parent 4884 path = feature.path 4885 lat = feature.lat 4886 lon = feature.lon 4887 wkt = feature.wkt 4888 L0 = feature.L0 4889 L1 = feature.L1 4890 4891 if parent: 4892 _path = "%s/%s" % (parent, id) 4893 _L0 = db(table.id == parent).select(table.name, 4894 table.lat, 4895 table.lon, 4896 limitby=(0, 1)).first() 4897 L0_name = _L0.name 4898 L0_lat = _L0.lat 4899 L0_lon = _L0.lon 4900 else: 4901 _path = id 4902 L0_name = None 4903 L0_lat = None 4904 L0_lon = None 4905 4906 if inherited or lat is None or lon is None: 4907 fixup_required = True 4908 inherited = True 4909 lat = L0_lat 4910 lon = L0_lon 4911 elif path != _path or L0 != L0_name or L1 != name or not wkt: 4912 fixup_required = True 4913 4914 if fixup_required: 4915 # Fix everything up 4916 if lat is False: 4917 lat = None 4918 if lon is False: 4919 lon = None 4920 fix_vars = {"inherited": inherited, 4921 "path": _path, 4922 "lat": lat, 4923 "lon": lon, 4924 "wkt": wkt or None, 4925 "L0": L0_name, 4926 "L1": name, 4927 "L2": None, 4928 "L3": None, 4929 "L4": None, 4930 "L5": None, 4931 } 4932 feature.update(**fix_vars) 4933 fixup(feature) 4934 4935 if not all_locations: 4936 # Ensure that any locations which inherit their latlon from this one get updated 4937 propagate(id) 4938 4939 return _path 4940 4941 4942 # L2 4943 L2 = feature_get("L2", False) 4944 if level == "L2": 4945 if inherited is None or name is False or parent is False or path is False or \ 4946 lat is False or lon is False or wkt is False or \ 4947 L0 is False or L1 is False or L2 is False: 4948 # Get the whole feature 4949 feature = db(table.id == id).select(table.id, 4950 table.inherited, 4951 table.name, 4952 table.parent, 4953 table.path, 4954 table.lat, 4955 table.lon, 4956 table.wkt, 4957 table.L0, 4958 table.L1, 4959 table.L2, 4960 limitby=(0, 1)).first() 4961 inherited = feature.inherited 4962 name = feature.name 4963 parent = feature.parent 4964 path = feature.path 4965 lat = feature.lat 4966 lon = feature.lon 4967 wkt = feature.wkt 4968 L0 = feature.L0 4969 L1 = feature.L1 4970 L2 = feature.L2 4971 4972 if parent: 4973 Lx = db(table.id == parent).select(table.name, 4974 table.level, 4975 table.parent, 4976 table.lat, 4977 table.lon, 4978 limitby=(0, 1)).first() 4979 if Lx.level == "L1": 4980 L1_name = Lx.name 4981 _parent = Lx.parent 4982 if _parent: 4983 _path = "%s/%s/%s" % (_parent, parent, id) 4984 L0_name = db(table.id == _parent).select(table.name, 4985 limitby=(0, 1), 4986 cache=current.s3db.cache 4987 ).first().name 4988 else: 4989 _path = "%s/%s" % (parent, id) 4990 L0_name = None 4991 elif Lx.level == "L0": 4992 _path = "%s/%s" % (parent, id) 4993 L0_name = Lx.name 4994 L1_name = None 4995 else: 4996 current.log.error("Parent of L2 Location ID %s has invalid level: %s is %s" % \ 4997 (id, parent, Lx.level)) 4998 #raise ValueError 4999 return "%s/%s" % (parent, id) 5000 Lx_lat = Lx.lat 5001 Lx_lon = Lx.lon 5002 else: 5003 _path = id 5004 L0_name = None 5005 L1_name = None 5006 Lx_lat = None 5007 Lx_lon = None 5008 5009 if inherited or lat is None or lon is None: 5010 fixup_required = True 5011 inherited = True 5012 lat = Lx_lat 5013 lon = Lx_lon 5014 wkt = None 5015 elif path != _path or L0 != L0_name or L1 != L1_name or L2 != name or not wkt: 5016 fixup_required = True 5017 5018 if fixup_required: 5019 # Fix everything up 5020 if lat is False: 5021 lat = None 5022 if lon is False: 5023 lon = None 5024 fix_vars = {"inherited": inherited, 5025 "path": _path, 5026 "lat": lat, 5027 "lon": lon, 5028 "wkt": wkt or None, 5029 "L0": L0_name, 5030 "L1": L1_name, 5031 "L2": name, 5032 "L3": None, 5033 "L4": None, 5034 "L5": None, 5035 } 5036 feature.update(**fix_vars) 5037 fixup(feature) 5038 5039 if not all_locations: 5040 # Ensure that any locations which inherit their latlon from this one get updated 5041 propagate(id) 5042 5043 return _path 5044 5045 5046 # L3 5047 L3 = feature_get("L3", False) 5048 if level == "L3": 5049 if inherited is None or name is False or parent is False or path is False or \ 5050 lat is False or lon is False or wkt is False or \ 5051 L0 is False or L1 is False or L2 is False or L3 is False: 5052 # Get the whole feature 5053 feature = db(table.id == id).select(table.id, 5054 table.inherited, 5055 table.name, 5056 table.parent, 5057 table.path, 5058 table.lat, 5059 table.lon, 5060 table.wkt, 5061 table.L0, 5062 table.L1, 5063 table.L2, 5064 table.L3, 5065 limitby=(0, 1)).first() 5066 inherited = feature.inherited 5067 name = feature.name 5068 parent = feature.parent 5069 path = feature.path 5070 lat = feature.lat 5071 lon = feature.lon 5072 wkt = feature.wkt 5073 L0 = feature.L0 5074 L1 = feature.L1 5075 L2 = feature.L2 5076 L3 = feature.L3 5077 5078 if parent: 5079 Lx = db(table.id == parent).select(table.id, 5080 table.name, 5081 table.level, 5082 table.parent, 5083 table.path, 5084 table.lat, 5085 table.lon, 5086 table.L0, 5087 table.L1, 5088 limitby=(0, 1)).first() 5089 if Lx.level == "L2": 5090 L0_name = Lx.L0 5091 L1_name = Lx.L1 5092 L2_name = Lx.name 5093 _path = Lx.path 5094 # Don't try to fixup ancestors when we're coming from a propagate 5095 if propagating or (_path and L0_name and L1_name): 5096 _path = "%s/%s" % (_path, id) 5097 else: 5098 # This feature needs to be updated 5099 _path = update_location_tree(Lx, all_locations) 5100 _path = "%s/%s" % (_path, id) 5101 # Query again 5102 Lx = db(table.id == parent).select(table.L0, 5103 table.L1, 5104 table.lat, 5105 table.lon, 5106 limitby=(0, 1) 5107 ).first() 5108 L0_name = Lx.L0 5109 L1_name = Lx.L1 5110 elif Lx.level == "L1": 5111 L0_name = Lx.L0 5112 L1_name = Lx.name 5113 L2_name = None 5114 _path = Lx.path 5115 # Don't try to fixup ancestors when we're coming from a propagate 5116 if propagating or (_path and L0_name): 5117 _path = "%s/%s" % (_path, id) 5118 else: 5119 # This feature needs to be updated 5120 _path = update_location_tree(Lx, all_locations) 5121 _path = "%s/%s" % (_path, id) 5122 # Query again 5123 Lx = db(table.id == parent).select(table.L0, 5124 table.lat, 5125 table.lon, 5126 limitby=(0, 1) 5127 ).first() 5128 L0_name = Lx.L0 5129 elif Lx.level == "L0": 5130 _path = "%s/%s" % (parent, id) 5131 L0_name = Lx.name 5132 L1_name = None 5133 L2_name = None 5134 else: 5135 current.log.error("Parent of L3 Location ID %s has invalid level: %s is %s" % \ 5136 (id, parent, Lx.level)) 5137 #raise ValueError 5138 return "%s/%s" % (parent, id) 5139 Lx_lat = Lx.lat 5140 Lx_lon = Lx.lon 5141 else: 5142 _path = id 5143 L0_name = None 5144 L1_name = None 5145 L2_name = None 5146 Lx_lat = None 5147 Lx_lon = None 5148 5149 if inherited or lat is None or lon is None: 5150 fixup_required = True 5151 inherited = True 5152 lat = Lx_lat 5153 lon = Lx_lon 5154 wkt = None 5155 elif path != _path or L0 != L0_name or L1 != L1_name or L2 != L2_name or L3 != name or not wkt: 5156 fixup_required = True 5157 5158 if fixup_required: 5159 # Fix everything up 5160 if lat is False: 5161 lat = None 5162 if lon is False: 5163 lon = None 5164 fix_vars = {"inherited": inherited, 5165 "path": _path, 5166 "lat": lat, 5167 "lon": lon, 5168 "wkt": wkt or None, 5169 "L0": L0_name, 5170 "L1": L1_name, 5171 "L2": L2_name, 5172 "L3": name, 5173 "L4": None, 5174 "L5": None, 5175 } 5176 feature.update(**fix_vars) 5177 fixup(feature) 5178 5179 if not all_locations: 5180 # Ensure that any locations which inherit their latlon from this one get updated 5181 propagate(id) 5182 5183 return _path 5184 5185 5186 # L4 5187 L4 = feature_get("L4", False) 5188 if level == "L4": 5189 if inherited is None or name is False or parent is False or path is False or \ 5190 lat is False or lon is False or wkt is False or \ 5191 L0 is False or L1 is False or L2 is False or L3 is False or L4 is False: 5192 # Get the whole feature 5193 feature = db(table.id == id).select(table.id, 5194 table.inherited, 5195 table.name, 5196 table.parent, 5197 table.path, 5198 table.lat, 5199 table.lon, 5200 table.wkt, 5201 table.L0, 5202 table.L1, 5203 table.L2, 5204 table.L3, 5205 table.L4, 5206 limitby=(0, 1)).first() 5207 inherited = feature.inherited 5208 name = feature.name 5209 parent = feature.parent 5210 path = feature.path 5211 lat = feature.lat 5212 lon = feature.lon 5213 wkt = feature.wkt 5214 L0 = feature.L0 5215 L1 = feature.L1 5216 L2 = feature.L2 5217 L3 = feature.L3 5218 L4 = feature.L4 5219 5220 if parent: 5221 Lx = db(table.id == parent).select(table.id, 5222 table.name, 5223 table.level, 5224 table.parent, 5225 table.path, 5226 table.lat, 5227 table.lon, 5228 table.L0, 5229 table.L1, 5230 table.L2, 5231 limitby=(0, 1)).first() 5232 if Lx.level == "L3": 5233 L0_name = Lx.L0 5234 L1_name = Lx.L1 5235 L2_name = Lx.L2 5236 L3_name = Lx.name 5237 _path = Lx.path 5238 # Don't try to fixup ancestors when we're coming from a propagate 5239 if propagating or (_path and L0_name and L1_name and L2_name): 5240 _path = "%s/%s" % (_path, id) 5241 else: 5242 # This feature needs to be updated 5243 _path = update_location_tree(Lx, all_locations) 5244 _path = "%s/%s" % (_path, id) 5245 # Query again 5246 Lx = db(table.id == parent).select(table.L0, 5247 table.L1, 5248 table.L2, 5249 table.lat, 5250 table.lon, 5251 limitby=(0, 1) 5252 ).first() 5253 L0_name = Lx.L0 5254 L1_name = Lx.L1 5255 L2_name = Lx.L2 5256 elif Lx.level == "L2": 5257 L0_name = Lx.L0 5258 L1_name = Lx.L1 5259 L2_name = Lx.name 5260 L3_name = None 5261 _path = Lx.path 5262 # Don't try to fixup ancestors when we're coming from a propagate 5263 if propagating or (_path and L0_name and L1_name): 5264 _path = "%s/%s" % (_path, id) 5265 else: 5266 # This feature needs to be updated 5267 _path = update_location_tree(Lx, all_locations) 5268 _path = "%s/%s" % (_path, id) 5269 # Query again 5270 Lx = db(table.id == parent).select(table.L0, 5271 table.L1, 5272 table.lat, 5273 table.lon, 5274 limitby=(0, 1) 5275 ).first() 5276 L0_name = Lx.L0 5277 L1_name = Lx.L1 5278 elif Lx.level == "L1": 5279 L0_name = Lx.L0 5280 L1_name = Lx.name 5281 L2_name = None 5282 L3_name = None 5283 _path = Lx.path 5284 # Don't try to fixup ancestors when we're coming from a propagate 5285 if propagating or (_path and L0_name): 5286 _path = "%s/%s" % (_path, id) 5287 else: 5288 # This feature needs to be updated 5289 _path = update_location_tree(Lx, all_locations) 5290 _path = "%s/%s" % (_path, id) 5291 # Query again 5292 Lx = db(table.id == parent).select(table.L0, 5293 table.lat, 5294 table.lon, 5295 limitby=(0, 1) 5296 ).first() 5297 L0_name = Lx.L0 5298 elif Lx.level == "L0": 5299 _path = "%s/%s" % (parent, id) 5300 L0_name = Lx.name 5301 L1_name = None 5302 L2_name = None 5303 L3_name = None 5304 else: 5305 current.log.error("Parent of L3 Location ID %s has invalid level: %s is %s" % \ 5306 (id, parent, Lx.level)) 5307 #raise ValueError 5308 return "%s/%s" % (parent, id) 5309 Lx_lat = Lx.lat 5310 Lx_lon = Lx.lon 5311 else: 5312 _path = id 5313 L0_name = None 5314 L1_name = None 5315 L2_name = None 5316 L3_name = None 5317 Lx_lat = None 5318 Lx_lon = None 5319 5320 if inherited or lat is None or lon is None: 5321 fixup_required = True 5322 inherited = True 5323 lat = Lx_lat 5324 lon = Lx_lon 5325 wkt = None 5326 elif path != _path or L0 != L0_name or L1 != L1_name or L2 != L2_name or L3 != L3_name or L4 != name or not wkt: 5327 fixup_required = True 5328 5329 if fixup_required: 5330 # Fix everything up 5331 if lat is False: 5332 lat = None 5333 if lon is False: 5334 lon = None 5335 fix_vars = {"inherited": inherited, 5336 "path": _path, 5337 "lat": lat, 5338 "lon": lon, 5339 "wkt": wkt or None, 5340 "L0": L0_name, 5341 "L1": L1_name, 5342 "L2": L2_name, 5343 "L3": L3_name, 5344 "L4": name, 5345 "L5": None, 5346 } 5347 feature.update(**fix_vars) 5348 fixup(feature) 5349 5350 if not all_locations: 5351 # Ensure that any locations which inherit their latlon from this one get updated 5352 propagate(id) 5353 5354 return _path 5355 5356 5357 # L5 5358 L5 = feature_get("L5", False) 5359 if level == "L5": 5360 if inherited is None or name is False or parent is False or path is False or \ 5361 lat is False or lon is False or wkt is False or \ 5362 L0 is False or L1 is False or L2 is False or L3 is False or L4 is False or L5 is False: 5363 # Get the whole feature 5364 feature = db(table.id == id).select(table.id, 5365 table.inherited, 5366 table.name, 5367 table.parent, 5368 table.path, 5369 table.lat, 5370 table.lon, 5371 table.wkt, 5372 table.L0, 5373 table.L1, 5374 table.L2, 5375 table.L3, 5376 table.L4, 5377 table.L5, 5378 limitby=(0, 1)).first() 5379 inherited = feature.inherited 5380 name = feature.name 5381 parent = feature.parent 5382 path = feature.path 5383 lat = feature.lat 5384 lon = feature.lon 5385 wkt = feature.wkt 5386 L0 = feature.L0 5387 L1 = feature.L1 5388 L2 = feature.L2 5389 L3 = feature.L3 5390 L4 = feature.L4 5391 L5 = feature.L5 5392 5393 if parent: 5394 Lx = db(table.id == parent).select(table.id, 5395 table.name, 5396 table.level, 5397 table.parent, 5398 table.path, 5399 table.lat, 5400 table.lon, 5401 table.L0, 5402 table.L1, 5403 table.L2, 5404 table.L3, 5405 limitby=(0, 1)).first() 5406 if Lx.level == "L4": 5407 L0_name = Lx.L0 5408 L1_name = Lx.L1 5409 L2_name = Lx.L2 5410 L3_name = Lx.L3 5411 L4_name = Lx.name 5412 _path = Lx.path 5413 # Don't try to fixup ancestors when we're coming from a propagate 5414 if propagating or (_path and L0_name and L1_name and L2_name and L3_name): 5415 _path = "%s/%s" % (_path, id) 5416 else: 5417 # This feature needs to be updated 5418 _path = update_location_tree(Lx, all_locations) 5419 _path = "%s/%s" % (_path, id) 5420 # Query again 5421 Lx = db(table.id == parent).select(table.L0, 5422 table.L1, 5423 table.L2, 5424 table.L3, 5425 table.lat, 5426 table.lon, 5427 limitby=(0, 1) 5428 ).first() 5429 L0_name = Lx.L0 5430 L1_name = Lx.L1 5431 L2_name = Lx.L2 5432 L3_name = Lx.L3 5433 elif Lx.level == "L3": 5434 L0_name = Lx.L0 5435 L1_name = Lx.L1 5436 L2_name = Lx.L2 5437 L3_name = Lx.name 5438 L4_name = None 5439 _path = Lx.path 5440 # Don't try to fixup ancestors when we're coming from a propagate 5441 if propagating or (_path and L0_name and L1_name and L2_name): 5442 _path = "%s/%s" % (_path, id) 5443 else: 5444 # This feature needs to be updated 5445 _path = update_location_tree(Lx, all_locations) 5446 _path = "%s/%s" % (_path, id) 5447 # Query again 5448 Lx = db(table.id == parent).select(table.L0, 5449 table.L1, 5450 table.L2, 5451 table.lat, 5452 table.lon, 5453 limitby=(0, 1) 5454 ).first() 5455 L0_name = Lx.L0 5456 L1_name = Lx.L1 5457 L2_name = Lx.L2 5458 elif Lx.level == "L2": 5459 L0_name = Lx.L0 5460 L1_name = Lx.L1 5461 L2_name = Lx.name 5462 L3_name = None 5463 L4_name = None 5464 _path = Lx.path 5465 # Don't try to fixup ancestors when we're coming from a propagate 5466 if propagating or (_path and L0_name and L1_name): 5467 _path = "%s/%s" % (_path, id) 5468 else: 5469 # This feature needs to be updated 5470 _path = update_location_tree(Lx, all_locations) 5471 _path = "%s/%s" % (_path, id) 5472 # Query again 5473 Lx = db(table.id == parent).select(table.L0, 5474 table.L1, 5475 table.lat, 5476 table.lon, 5477 limitby=(0, 1) 5478 ).first() 5479 L0_name = Lx.L0 5480 L1_name = Lx.L1 5481 elif Lx.level == "L1": 5482 L0_name = Lx.L0 5483 L1_name = Lx.name 5484 L2_name = None 5485 L3_name = None 5486 L4_name = None 5487 _path = Lx.path 5488 # Don't try to fixup ancestors when we're coming from a propagate 5489 if propagating or (_path and L0_name): 5490 _path = "%s/%s" % (_path, id) 5491 else: 5492 # This feature needs to be updated 5493 _path = update_location_tree(Lx, all_locations) 5494 _path = "%s/%s" % (_path, id) 5495 # Query again 5496 Lx = db(table.id == parent).select(table.L0, 5497 table.lat, 5498 table.lon, 5499 limitby=(0, 1) 5500 ).first() 5501 L0_name = Lx.L0 5502 elif Lx.level == "L0": 5503 _path = "%s/%s" % (parent, id) 5504 L0_name = Lx.name 5505 L1_name = None 5506 L2_name = None 5507 L3_name = None 5508 L4_name = None 5509 else: 5510 current.log.error("Parent of L3 Location ID %s has invalid level: %s is %s" % \ 5511 (id, parent, Lx.level)) 5512 #raise ValueError 5513 return "%s/%s" % (parent, id) 5514 Lx_lat = Lx.lat 5515 Lx_lon = Lx.lon 5516 else: 5517 _path = id 5518 L0_name = None 5519 L1_name = None 5520 L2_name = None 5521 L3_name = None 5522 L4_name = None 5523 Lx_lat = None 5524 Lx_lon = None 5525 5526 if inherited or lat is None or lon is None: 5527 fixup_required = True 5528 inherited = True 5529 lat = Lx_lat 5530 lon = Lx_lon 5531 wkt = None 5532 elif path != _path or L0 != L0_name or L1 != L1_name or L2 != L2_name or L3 != L3_name or L4 != L4_name or L5 != name or not wkt: 5533 fixup_required = True 5534 5535 if fixup_required: 5536 # Fix everything up 5537 if lat is False: 5538 lat = None 5539 if lon is False: 5540 lon = None 5541 fix_vars = {"inherited": inherited, 5542 "path": _path, 5543 "lat": lat, 5544 "lon": lon, 5545 "wkt": wkt or None, 5546 "L0": L0_name, 5547 "L1": L1_name, 5548 "L2": L2_name, 5549 "L3": L3_name, 5550 "L4": L4_name, 5551 "L5": name, 5552 } 5553 feature.update(**fix_vars) 5554 fixup(feature) 5555 5556 if not all_locations: 5557 # Ensure that any locations which inherit their latlon from this one get updated 5558 propagate(id) 5559 5560 return _path 5561 5562 5563 # Specific Location 5564 # - or unspecified (which we should avoid happening as inefficient) 5565 if inherited is None or level is False or name is False or parent is False or path is False or \ 5566 lat is False or lon is False or wkt is False or \ 5567 L0 is False or L1 is False or L2 is False or L3 is False or L4 is False or L5 is False: 5568 # Get the whole feature 5569 feature = db(table.id == id).select(table.id, 5570 table.inherited, 5571 table.level, 5572 table.name, 5573 table.parent, 5574 table.path, 5575 table.lat, 5576 table.lon, 5577 table.wkt, 5578 table.L0, 5579 table.L1, 5580 table.L2, 5581 table.L3, 5582 table.L4, 5583 table.L5, 5584 limitby=(0, 1)).first() 5585 inherited = feature.inherited 5586 level = feature.level 5587 name = feature.name 5588 parent = feature.parent 5589 path = feature.path 5590 lat = feature.lat 5591 lon = feature.lon 5592 wkt = feature.wkt 5593 L0 = feature.L0 5594 L1 = feature.L1 5595 L2 = feature.L2 5596 L3 = feature.L3 5597 L4 = feature.L4 5598 L5 = feature.L5 5599 5600 L0_name = name if level == "L0" else None 5601 L1_name = name if level == "L1" else None 5602 L2_name = name if level == "L2" else None 5603 L3_name = name if level == "L3" else None 5604 L4_name = name if level == "L4" else None 5605 L5_name = name if level == "L5" else None 5606 5607 if parent: 5608 Lx = db(table.id == parent).select(table.id, 5609 table.name, 5610 table.level, 5611 table.parent, 5612 table.path, 5613 table.lat, 5614 table.lon, 5615 table.L0, 5616 table.L1, 5617 table.L2, 5618 table.L3, 5619 table.L4, 5620 limitby=(0, 1)).first() 5621 if Lx.level == "L5": 5622 L0_name = Lx.L0 5623 L1_name = Lx.L1 5624 L2_name = Lx.L2 5625 L3_name = Lx.L3 5626 L4_name = Lx.L4 5627 L5_name = Lx.name 5628 _path = Lx.path 5629 # Don't try to fixup ancestors when we're coming from a propagate 5630 if propagating or (_path and L0_name and L1_name and L2_name and L3_name and L4_name): 5631 _path = "%s/%s" % (_path, id) 5632 else: 5633 # This feature needs to be updated 5634 _path = update_location_tree(Lx, all_locations) 5635 _path = "%s/%s" % (_path, id) 5636 # Query again 5637 Lx = db(table.id == parent).select(table.L0, 5638 table.L1, 5639 table.L2, 5640 table.L3, 5641 table.L4, 5642 table.lat, 5643 table.lon, 5644 limitby=(0, 1) 5645 ).first() 5646 L0_name = Lx.L0 5647 L1_name = Lx.L1 5648 L2_name = Lx.L2 5649 L3_name = Lx.L3 5650 L4_name = Lx.L4 5651 elif Lx.level == "L4": 5652 L0_name = Lx.L0 5653 L1_name = Lx.L1 5654 L2_name = Lx.L2 5655 L3_name = Lx.L3 5656 L4_name = Lx.name 5657 _path = Lx.path 5658 # Don't try to fixup ancestors when we're coming from a propagate 5659 if propagating or (_path and L0_name and L1_name and L2_name and L3_name): 5660 _path = "%s/%s" % (_path, id) 5661 else: 5662 # This feature needs to be updated 5663 _path = update_location_tree(Lx, all_locations) 5664 _path = "%s/%s" % (_path, id) 5665 # Query again 5666 Lx = db(table.id == parent).select(table.L0, 5667 table.L1, 5668 table.L2, 5669 table.L3, 5670 table.lat, 5671 table.lon, 5672 limitby=(0, 1) 5673 ).first() 5674 L0_name = Lx.L0 5675 L1_name = Lx.L1 5676 L2_name = Lx.L2 5677 L3_name = Lx.L3 5678 elif Lx.level == "L3": 5679 L0_name = Lx.L0 5680 L1_name = Lx.L1 5681 L2_name = Lx.L2 5682 L3_name = Lx.name 5683 _path = Lx.path 5684 # Don't try to fixup ancestors when we're coming from a propagate 5685 if propagating or (_path and L0_name and L1_name and L2_name): 5686 _path = "%s/%s" % (_path, id) 5687 else: 5688 # This feature needs to be updated 5689 _path = update_location_tree(Lx, all_locations) 5690 _path = "%s/%s" % (_path, id) 5691 # Query again 5692 Lx = db(table.id == parent).select(table.L0, 5693 table.L1, 5694 table.L2, 5695 table.lat, 5696 table.lon, 5697 limitby=(0, 1) 5698 ).first() 5699 L0_name = Lx.L0 5700 L1_name = Lx.L1 5701 L2_name = Lx.L2 5702 elif Lx.level == "L2": 5703 L0_name = Lx.L0 5704 L1_name = Lx.L1 5705 L2_name = Lx.name 5706 _path = Lx.path 5707 # Don't try to fixup ancestors when we're coming from a propagate 5708 if propagating or (_path and L0_name and L1_name): 5709 _path = "%s/%s" % (_path, id) 5710 else: 5711 # This feature needs to be updated 5712 _path = update_location_tree(Lx, all_locations) 5713 _path = "%s/%s" % (_path, id) 5714 # Query again 5715 Lx = db(table.id == parent).select(table.L0, 5716 table.L1, 5717 table.lat, 5718 table.lon, 5719 limitby=(0, 1) 5720 ).first() 5721 L0_name = Lx.L0 5722 L1_name = Lx.L1 5723 elif Lx.level == "L1": 5724 L0_name = Lx.L0 5725 L1_name = Lx.name 5726 _path = Lx.path 5727 # Don't try to fixup ancestors when we're coming from a propagate 5728 if propagating or (_path and L0_name): 5729 _path = "%s/%s" % (_path, id) 5730 else: 5731 # This feature needs to be updated 5732 _path = update_location_tree(Lx, all_locations) 5733 _path = "%s/%s" % (_path, id) 5734 # Query again 5735 Lx = db(table.id == parent).select(table.L0, 5736 table.lat, 5737 table.lon, 5738 limitby=(0, 1) 5739 ).first() 5740 L0_name = Lx.L0 5741 elif Lx.level == "L0": 5742 _path = "%s/%s" % (parent, id) 5743 L0_name = Lx.name 5744 else: 5745 current.log.error("Parent of L3 Location ID %s has invalid level: %s is %s" % \ 5746 (id, parent, Lx.level)) 5747 #raise ValueError 5748 return "%s/%s" % (parent, id) 5749 Lx_lat = Lx.lat 5750 Lx_lon = Lx.lon 5751 else: 5752 _path = id 5753 Lx_lat = None 5754 Lx_lon = None 5755 5756 if inherited or lat is None or lon is None: 5757 fixup_required = True 5758 inherited = True 5759 lat = Lx_lat 5760 lon = Lx_lon 5761 wkt = None 5762 elif path != _path or L0 != L0_name or L1 != L1_name or L2 != L2_name or L3 != L3_name or L4 != L4_name or L5 != L5_name or not wkt: 5763 fixup_required = True 5764 5765 if fixup_required: 5766 # Fix everything up 5767 if lat is False: 5768 lat = None 5769 if lon is False: 5770 lon = None 5771 fix_vars = {"inherited": inherited, 5772 "path": _path, 5773 "lat": lat, 5774 "lon": lon, 5775 "wkt": wkt or None, 5776 "L0": L0_name, 5777 "L1": L1_name, 5778 "L2": L2_name, 5779 "L3": L3_name, 5780 "L4": L4_name, 5781 "L5": L5_name, 5782 } 5783 feature.update(**fix_vars) 5784 fixup(feature) 5785 5786 if not all_locations: 5787 # Ensure that any locations which inherit their latlon from this one get updated 5788 propagate(id) 5789 5790 return _path 5791 5792 # ------------------------------------------------------------------------- 5793 @staticmethod
5794 - def wkt_centroid(form):
5795 """ 5796 OnValidation callback: 5797 If a WKT is defined: validate the format, 5798 calculate the LonLat of the Centroid, and set bounds 5799 Else if a LonLat is defined: calculate the WKT for the Point. 5800 """ 5801 5802 form_vars = form.vars 5803 5804 if form_vars.get("gis_feature_type", None) == "1": 5805 # Point 5806 lat = form_vars.get("lat", None) 5807 lon = form_vars.get("lon", None) 5808 if (lon is None and lat is None) or \ 5809 (lon == "" and lat == ""): 5810 # No Geometry available 5811 # Don't clobber existing records (e.g. in Prepop) 5812 #form_vars.gis_feature_type = "0" 5813 # Cannot create WKT, so Skip 5814 return 5815 elif lat is None or lat == "": 5816 # Can't just have lon without lat 5817 form.errors["lat"] = current.messages.lat_empty 5818 elif lon is None or lon == "": 5819 form.errors["lon"] = current.messages.lon_empty 5820 else: 5821 form_vars.wkt = "POINT(%(lon)s %(lat)s)" % form_vars 5822 radius = form_vars.get("radius", None) 5823 if radius: 5824 bbox = GIS.get_bounds_from_radius(lat, lon, radius) 5825 form_vars.lat_min = bbox["lat_min"] 5826 form_vars.lon_min = bbox["lon_min"] 5827 form_vars.lat_max = bbox["lat_max"] 5828 form_vars.lon_max = bbox["lon_max"] 5829 else: 5830 if "lon_min" not in form_vars or form_vars.lon_min is None: 5831 form_vars.lon_min = lon 5832 if "lon_max" not in form_vars or form_vars.lon_max is None: 5833 form_vars.lon_max = lon 5834 if "lat_min" not in form_vars or form_vars.lat_min is None: 5835 form_vars.lat_min = lat 5836 if "lat_max" not in form_vars or form_vars.lat_max is None: 5837 form_vars.lat_max = lat 5838 5839 else: 5840 wkt = form_vars.get("wkt", None) 5841 if wkt: 5842 if wkt[0] == "{": 5843 # This is a GeoJSON geometry 5844 from shapely.geometry import shape as shape_loads 5845 try: 5846 js = json.load(wkt) 5847 shape = shape_loads(js) 5848 except: 5849 form.errors["wkt"] = current.messages.invalid_wkt 5850 return 5851 else: 5852 form_vars.wkt = shape.wkt 5853 else: 5854 # Assume WKT 5855 warning = None 5856 from shapely.wkt import loads as wkt_loads 5857 try: 5858 shape = wkt_loads(wkt) 5859 except: 5860 try: 5861 # Perhaps this is really a LINESTRING (e.g. OSM import of an unclosed Way) 5862 linestring = "LINESTRING%s" % wkt[8:-1] 5863 shape = wkt_loads(linestring) 5864 form_vars.wkt = linestring 5865 except: 5866 form.errors["wkt"] = current.messages.invalid_wkt 5867 return 5868 else: 5869 if shape.wkt != form_vars.wkt: # If this is too heavy a check for some deployments, add a deployment_setting to disable the check & just do it silently 5870 # Use Shapely to clean up the defective WKT (e.g. trailing chars) 5871 warning = s3_str(current.T("Source WKT has been cleaned by Shapely")) 5872 form_vars.wkt = shape.wkt 5873 5874 if shape.has_z: 5875 # Shapely export of WKT is 2D only 5876 if warning: 5877 warning = "%s, %s" % s3_str(current.T("Only 2D geometry stored as PostGIS cannot handle 3D geometries")) 5878 else: 5879 warning = s3_str(current.T("Only 2D geometry stored as PostGIS cannot handle 3D geometries")) 5880 5881 if warning: 5882 current.session.warning = warning 5883 5884 gis_feature_type = shape.type 5885 if gis_feature_type == "Point": 5886 form_vars.gis_feature_type = 1 5887 elif gis_feature_type == "LineString": 5888 form_vars.gis_feature_type = 2 5889 elif gis_feature_type == "Polygon": 5890 form_vars.gis_feature_type = 3 5891 elif gis_feature_type == "MultiPoint": 5892 form_vars.gis_feature_type = 4 5893 elif gis_feature_type == "MultiLineString": 5894 form_vars.gis_feature_type = 5 5895 elif gis_feature_type == "MultiPolygon": 5896 form_vars.gis_feature_type = 6 5897 elif gis_feature_type == "GeometryCollection": 5898 form_vars.gis_feature_type = 7 5899 try: 5900 centroid_point = shape.centroid 5901 form_vars.lon = centroid_point.x 5902 form_vars.lat = centroid_point.y 5903 bounds = shape.bounds 5904 if gis_feature_type != "Point" or \ 5905 "lon_min" not in form_vars or form_vars.lon_min is None or \ 5906 form_vars.lon_min == form_vars.lon_max: 5907 # Update bounds unless we have a 'Point' which has already got wider Bounds specified (such as a country) 5908 form_vars.lon_min = bounds[0] 5909 form_vars.lat_min = bounds[1] 5910 form_vars.lon_max = bounds[2] 5911 form_vars.lat_max = bounds[3] 5912 except: 5913 form.errors.gis_feature_type = current.messages.centroid_error 5914 5915 else: 5916 lat = form_vars.get("lat", None) 5917 lon = form_vars.get("lon", None) 5918 if (lon is None and lat is None) or \ 5919 (lon == "" and lat == ""): 5920 # No Geometry available 5921 # Don't clobber existing records (e.g. in Prepop) 5922 #form_vars.gis_feature_type = "0" 5923 # Cannot create WKT, so Skip 5924 return 5925 else: 5926 # Point 5927 form_vars.gis_feature_type = "1" 5928 if lat is None or lat == "": 5929 form.errors["lat"] = current.messages.lat_empty 5930 elif lon is None or lon == "": 5931 form.errors["lon"] = current.messages.lon_empty 5932 else: 5933 form_vars.wkt = "POINT(%(lon)s %(lat)s)" % form_vars 5934 if "lon_min" not in form_vars or form_vars.lon_min is None: 5935 form_vars.lon_min = lon 5936 if "lon_max" not in form_vars or form_vars.lon_max is None: 5937 form_vars.lon_max = lon 5938 if "lat_min" not in form_vars or form_vars.lat_min is None: 5939 form_vars.lat_min = lat 5940 if "lat_max" not in form_vars or form_vars.lat_max is None: 5941 form_vars.lat_max = lat 5942 5943 if current.deployment_settings.get_gis_spatialdb(): 5944 # Also populate the spatial field 5945 form_vars.the_geom = form_vars.wkt
5946 5947 # ------------------------------------------------------------------------- 5948 @staticmethod
5949 - def query_features_by_bbox(lon_min, lat_min, lon_max, lat_max):
5950 """ 5951 Returns a query of all Locations inside the given bounding box 5952 """ 5953 5954 table = current.s3db.gis_location 5955 query = (table.lat_min <= lat_max) & \ 5956 (table.lat_max >= lat_min) & \ 5957 (table.lon_min <= lon_max) & \ 5958 (table.lon_max >= lon_min) 5959 return query
5960 5961 # ------------------------------------------------------------------------- 5962 @staticmethod
5963 - def get_features_by_bbox(lon_min, lat_min, lon_max, lat_max):
5964 """ 5965 Returns Rows of Locations whose shape intersects the given bbox. 5966 """ 5967 5968 query = current.gis.query_features_by_bbox(lon_min, 5969 lat_min, 5970 lon_max, 5971 lat_max) 5972 return current.db(query).select()
5973 5974 # ------------------------------------------------------------------------- 5975 @staticmethod
5976 - def get_features_by_shape(shape):
5977 """ 5978 Returns Rows of locations which intersect the given shape. 5979 5980 Relies on Shapely for wkt parsing and intersection. 5981 @ToDo: provide an option to use PostGIS/Spatialite 5982 """ 5983 5984 from shapely.geos import ReadingError 5985 from shapely.wkt import loads as wkt_loads 5986 5987 try: 5988 # Enable C-based speedups available from 1.2.10+ 5989 from shapely import speedups 5990 speedups.enable() 5991 except: 5992 current.log.info("S3GIS", 5993 "Upgrade Shapely for Performance enhancements") 5994 5995 table = current.s3db.gis_location 5996 in_bbox = current.gis.query_features_by_bbox(*shape.bounds) 5997 has_wkt = (table.wkt != None) & (table.wkt != "") 5998 5999 for loc in current.db(in_bbox & has_wkt).select(): 6000 try: 6001 location_shape = wkt_loads(loc.wkt) 6002 if location_shape.intersects(shape): 6003 yield loc 6004 except ReadingError: 6005 current.log.error("Error reading wkt of location with id", loc.id)
6006 6007 # ------------------------------------------------------------------------- 6008 @staticmethod
6009 - def get_features_by_latlon(lat, lon):
6010 """ 6011 Returns a generator of locations whose shape intersects the given LatLon. 6012 6013 Relies on Shapely. 6014 @todo: provide an option to use PostGIS/Spatialite 6015 """ 6016 6017 from shapely.geometry import point 6018 6019 return current.gis.get_features_by_shape(point.Point(lon, lat))
6020 6021 # ------------------------------------------------------------------------- 6022 @staticmethod
6023 - def get_features_by_feature(feature):
6024 """ 6025 Returns all Locations whose geometry intersects the given feature. 6026 6027 Relies on Shapely. 6028 @ToDo: provide an option to use PostGIS/Spatialite 6029 """ 6030 6031 from shapely.wkt import loads as wkt_loads 6032 6033 shape = wkt_loads(feature.wkt) 6034 return current.gis.get_features_by_shape(shape)
6035 6036 # ------------------------------------------------------------------------- 6037 @staticmethod
6038 - def set_all_bounds():
6039 """ 6040 Sets bounds for all locations without them. 6041 6042 If shapely is present, and a location has wkt, bounds of the geometry 6043 are used. Otherwise, the (lat, lon) are used as bounds. 6044 """ 6045 6046 try: 6047 from shapely.wkt import loads as wkt_loads 6048 SHAPELY = True 6049 except: 6050 SHAPELY = False 6051 6052 db = current.db 6053 table = current.s3db.gis_location 6054 6055 # Query to find all locations without bounds set 6056 no_bounds = (table.lon_min == None) & \ 6057 (table.lat_min == None) & \ 6058 (table.lon_max == None) & \ 6059 (table.lat_max == None) & \ 6060 (table.lat != None) & \ 6061 (table.lon != None) 6062 if SHAPELY: 6063 # Refine to those locations with a WKT field 6064 wkt_no_bounds = no_bounds & (table.wkt != None) & (table.wkt != "") 6065 for location in db(wkt_no_bounds).select(table.wkt): 6066 try : 6067 shape = wkt_loads(location.wkt) 6068 except: 6069 current.log.error("Error reading WKT", location.wkt) 6070 continue 6071 bounds = shape.bounds 6072 table[location.id] = {"lon_min": bounds[0], 6073 "lat_min": bounds[1], 6074 "lon_max": bounds[2], 6075 "lat_max": bounds[3], 6076 } 6077 6078 # Anything left, we assume is a Point, so set the bounds to be the same 6079 db(no_bounds).update(lon_min=table.lon, 6080 lat_min=table.lat, 6081 lon_max=table.lon, 6082 lat_max=table.lat)
6083 6084 # ------------------------------------------------------------------------- 6085 @staticmethod
6086 - def simplify(wkt, 6087 tolerance=None, 6088 preserve_topology=True, 6089 output="wkt", 6090 precision=None 6091 ):
6092 """ 6093 Simplify a complex Polygon using the Douglas-Peucker algorithm 6094 - NB This uses Python, better performance will be gained by doing 6095 this direct from the database if you are using PostGIS: 6096 ST_Simplify() is available as 6097 db(query).select(table.the_geom.st_simplify(tolerance).st_astext().with_alias('wkt')).first().wkt 6098 db(query).select(table.the_geom.st_simplify(tolerance).st_asgeojson().with_alias('geojson')).first().geojson 6099 6100 @param wkt: the WKT string to be simplified (usually coming from a gis_location record) 6101 @param tolerance: how aggressive a simplification to perform 6102 @param preserve_topology: whether the simplified geometry should be maintained 6103 @param output: whether to output as WKT or GeoJSON format 6104 @param precision: the number of decimal places to include in the output 6105 """ 6106 6107 from shapely.geometry import Point, LineString, Polygon, MultiPolygon 6108 from shapely.wkt import loads as wkt_loads 6109 6110 try: 6111 # Enable C-based speedups available from 1.2.10+ 6112 from shapely import speedups 6113 speedups.enable() 6114 except: 6115 current.log.info("S3GIS", 6116 "Upgrade Shapely for Performance enhancements") 6117 6118 try: 6119 shape = wkt_loads(wkt) 6120 except: 6121 wkt = wkt[10] if wkt else wkt 6122 current.log.error("Invalid Shape: %s" % wkt) 6123 return None 6124 6125 settings = current.deployment_settings 6126 6127 if not precision: 6128 precision = settings.get_gis_precision() 6129 6130 if tolerance is None: 6131 tolerance = settings.get_gis_simplify_tolerance() 6132 6133 if tolerance: 6134 shape = shape.simplify(tolerance, preserve_topology) 6135 6136 # Limit the number of decimal places 6137 formatter = ".%sf" % precision 6138 def shrink_polygon(shape): 6139 """ Helper Function """ 6140 points = shape.exterior.coords 6141 coords = [] 6142 cappend = coords.append 6143 for point in points: 6144 x = float(format(point[0], formatter)) 6145 y = float(format(point[1], formatter)) 6146 cappend((x, y)) 6147 return Polygon(LineString(coords))
6148 6149 geom_type = shape.geom_type 6150 if geom_type == "MultiPolygon": 6151 polygons = shape.geoms 6152 p = [] 6153 pappend = p.append 6154 for polygon in polygons: 6155 pappend(shrink_polygon(polygon)) 6156 shape = MultiPolygon([s for s in p]) 6157 elif geom_type == "Polygon": 6158 shape = shrink_polygon(shape) 6159 elif geom_type == "LineString": 6160 points = shape.coords 6161 coords = [] 6162 cappend = coords.append 6163 for point in points: 6164 x = float(format(point[0], formatter)) 6165 y = float(format(point[1], formatter)) 6166 cappend((x, y)) 6167 shape = LineString(coords) 6168 elif geom_type == "Point": 6169 x = float(format(shape.x, formatter)) 6170 y = float(format(shape.y, formatter)) 6171 shape = Point(x, y) 6172 else: 6173 current.log.info("Cannot yet shrink Geometry: %s" % geom_type) 6174 6175 # Output 6176 if output == "wkt": 6177 output = shape.to_wkt() 6178 elif output == "geojson": 6179 from ..geojson import dumps 6180 # Compact Encoding 6181 output = dumps(shape, separators=SEPARATORS) 6182 6183 return output 6184 6185 # -------------------------------------------------------------------------
6186 - def show_map(self, 6187 id = "default_map", 6188 height = None, 6189 width = None, 6190 bbox = {}, 6191 lat = None, 6192 lon = None, 6193 zoom = None, 6194 projection = None, 6195 add_feature = False, 6196 add_feature_active = False, 6197 add_line = False, 6198 add_line_active = False, 6199 add_polygon = False, 6200 add_polygon_active = False, 6201 add_circle = False, 6202 add_circle_active = False, 6203 features = None, 6204 feature_queries = None, 6205 feature_resources = None, 6206 wms_browser = {}, 6207 catalogue_layers = False, 6208 legend = False, 6209 toolbar = False, 6210 area = False, 6211 color_picker = False, 6212 clear_layers = None, 6213 nav = None, 6214 print_control = None, 6215 print_mode = False, 6216 save = False, 6217 search = False, 6218 mouse_position = None, 6219 overview = None, 6220 permalink = None, 6221 scaleline = None, 6222 zoomcontrol = None, 6223 zoomWheelEnabled = True, 6224 mgrs = {}, 6225 window = False, 6226 window_hide = False, 6227 closable = True, 6228 maximizable = True, 6229 collapsed = False, 6230 callback = "DEFAULT", 6231 plugins = None, 6232 ):
6233 """ 6234 Returns the HTML to display a map 6235 6236 Normally called in the controller as: map = gis.show_map() 6237 In the view, put: {{=XML(map)}} 6238 6239 @param id: ID to uniquely identify this map if there are several on a page 6240 @param height: Height of viewport (if not provided then the default deployment setting is used) 6241 @param width: Width of viewport (if not provided then the default deployment setting is used) 6242 @param bbox: default Bounding Box of viewport (if not provided then the Lat/Lon/Zoom are used) (Dict): 6243 {"lon_min" : float, 6244 "lat_min" : float, 6245 "lon_max" : float, 6246 "lat_max" : float, 6247 } 6248 @param lat: default Latitude of viewport (if not provided then the default setting from the Map Service Catalogue is used) 6249 @param lon: default Longitude of viewport (if not provided then the default setting from the Map Service Catalogue is used) 6250 @param zoom: default Zoom level of viewport (if not provided then the default setting from the Map Service Catalogue is used) 6251 @param projection: EPSG code for the Projection to use (if not provided then the default setting from the Map Service Catalogue is used) 6252 @param add_feature: Whether to include a DrawFeature control to allow adding a marker to the map 6253 @param add_feature_active: Whether the DrawFeature control should be active by default 6254 @param add_polygon: Whether to include a DrawFeature control to allow drawing a polygon over the map 6255 @param add_polygon_active: Whether the DrawFeature control should be active by default 6256 @param add_circle: Whether to include a DrawFeature control to allow drawing a circle over the map 6257 @param add_circle_active: Whether the DrawFeature control should be active by default 6258 @param features: Simple Features to overlay on Map (no control over appearance & not interactive) 6259 [wkt] 6260 @param feature_queries: Feature Queries to overlay onto the map & their options (List of Dicts): 6261 [{"name" : T("MyLabel"), # A string: the label for the layer 6262 "query" : query, # A gluon.sql.Rows of gis_locations, which can be from a simple query or a Join. 6263 # Extra fields can be added for 'popup_url', 'popup_label' & either 6264 # 'marker' (url/height/width) or 'shape' (with optional 'colour' & 'size') 6265 "active" : True, # Is the feed displayed upon load or needs ticking to load afterwards? 6266 "marker" : None, # Optional: A per-Layer marker query or marker_id for the icon used to display the feature 6267 "opacity" : 1, # Optional 6268 "cluster_attribute", # Optional 6269 "cluster_distance", # Optional 6270 "cluster_threshold" # Optional 6271 }] 6272 @param feature_resources: REST URLs for (filtered) resources to overlay onto the map & their options (List of Dicts): 6273 [{"name" : T("MyLabel"), # A string: the label for the layer 6274 "id" : "search", # A string: the id for the layer (for manipulation by JavaScript) 6275 "active" : True, # Is the feed displayed upon load or needs ticking to load afterwards? 6276 EITHER: 6277 "layer_id" : 1, # An integer: the layer_id to load (optional alternative to specifying URL/tablename/marker) 6278 "filter" : "filter", # A string: an optional URL filter which *replaces* any in the layer 6279 OR: 6280 "tablename" : "module_resource", # A string: the tablename (used to determine whether to locate via location_id or site_id) 6281 "url" : "/eden/module/resource.geojson?filter", # A URL to load the resource 6282 6283 "marker" : None, # Optional: A per-Layer marker dict for the icon used to display the feature (overrides layer_id if-set) 6284 "opacity" : 1, # Optional (overrides layer_id if-set) 6285 "cluster_attribute", # Optional (overrides layer_id if-set) 6286 "cluster_distance", # Optional (overrides layer_id if-set) 6287 "cluster_threshold", # Optional (overrides layer_id if-set) 6288 "dir", # Optional (overrides layer_id if-set) 6289 "style", # Optional (overrides layer_id if-set) 6290 }] 6291 @param wms_browser: WMS Server's GetCapabilities & options (dict) 6292 {"name": T("MyLabel"), # Name for the Folder in LayerTree 6293 "url": string # URL of GetCapabilities 6294 } 6295 @param catalogue_layers: Show all the enabled Layers from the GIS Catalogue 6296 Defaults to False: Just show the default Base layer 6297 @param legend: True: Show the GeoExt Legend panel, False: No Panel, "float": New floating Legend Panel 6298 @param toolbar: Show the Icon Toolbar of Controls 6299 @param area: Show the Area tool on the Toolbar 6300 @param color_picker: Show the Color Picker tool on the Toolbar (used for S3LocationSelector...pick up in postprocess) 6301 If a style is provided then this is used as the default style 6302 @param nav: Show the Navigation controls on the Toolbar 6303 @param save: Show the Save tool on the Toolbar 6304 @param search: Show the Geonames search box (requires a username to be configured) 6305 @param mouse_position: Show the current coordinates in the bottom-right of the map. 3 Options: 'normal', 'mgrs', False (defaults to checking deployment_settings, which defaults to 'normal') 6306 @param overview: Show the Overview Map (defaults to checking deployment_settings, which defaults to True) 6307 @param permalink: Show the Permalink control (defaults to checking deployment_settings, which defaults to True) 6308 @param scaleline: Show the ScaleLine control (defaults to checking deployment_settings, which defaults to True) 6309 @param zoomcontrol: Show the Zoom control (defaults to checking deployment_settings, which defaults to True) 6310 @param mgrs: Use the MGRS Control to select PDFs 6311 {"name": string, # Name for the Control 6312 "url": string # URL of PDF server 6313 } 6314 @ToDo: Also add MGRS Search support: http://gxp.opengeo.org/master/examples/mgrs.html 6315 @param window: Have viewport pop out of page into a resizable window 6316 @param window_hide: Have the window hidden by default, ready to appear (e.g. on clicking a button) 6317 @param closable: In Window mode, whether the window is closable or not 6318 @param collapsed: Start the Tools panel (West region) collapsed 6319 @param callback: Code to run once the Map JavaScript has loaded 6320 @param plugins: an iterable of objects which support the following methods: 6321 .extend_gis_map(map) 6322 Client-side portion suppoprts the following methods: 6323 .addToMapWindow(items) 6324 .setup(map) 6325 6326 """ 6327 6328 return MAP(id = id, 6329 height = height, 6330 width = width, 6331 bbox = bbox, 6332 lat = lat, 6333 lon = lon, 6334 zoom = zoom, 6335 projection = projection, 6336 add_feature = add_feature, 6337 add_feature_active = add_feature_active, 6338 add_line = add_line, 6339 add_line_active = add_line_active, 6340 add_polygon = add_polygon, 6341 add_polygon_active = add_polygon_active, 6342 add_circle = add_circle, 6343 add_circle_active = add_circle_active, 6344 features = features, 6345 feature_queries = feature_queries, 6346 feature_resources = feature_resources, 6347 wms_browser = wms_browser, 6348 catalogue_layers = catalogue_layers, 6349 legend = legend, 6350 toolbar = toolbar, 6351 area = area, 6352 color_picker = color_picker, 6353 clear_layers = clear_layers, 6354 nav = nav, 6355 print_control = print_control, 6356 print_mode = print_mode, 6357 save = save, 6358 search = search, 6359 mouse_position = mouse_position, 6360 overview = overview, 6361 permalink = permalink, 6362 scaleline = scaleline, 6363 zoomcontrol = zoomcontrol, 6364 zoomWheelEnabled = zoomWheelEnabled, 6365 mgrs = mgrs, 6366 window = window, 6367 window_hide = window_hide, 6368 closable = closable, 6369 maximizable = maximizable, 6370 collapsed = collapsed, 6371 callback = callback, 6372 plugins = plugins, 6373 )
6374
6375 # ============================================================================= 6376 -class MAP(DIV):
6377 """ 6378 HTML Helper to render a Map 6379 - allows the Map to be generated only when being rendered 6380 - used by gis.show_map() 6381 """ 6382
6383 - def __init__(self, **opts):
6384 """ 6385 :param **opts: options to pass to the Map for server-side processing 6386 """ 6387 6388 # We haven't yet run _setup() 6389 self.setup = False 6390 self.callback = None 6391 6392 # Options for server-side processing 6393 self.opts = opts 6394 self.id = map_id = opts.get("id", "default_map") 6395 6396 # Options for client-side processing 6397 self.options = {} 6398 6399 # Components 6400 # Map (Embedded not Window) 6401 components = [DIV(DIV(_class="map_loader"), 6402 _id="%s_panel" % map_id) 6403 ] 6404 6405 self.components = components 6406 for c in components: 6407 self._setnode(c) 6408 6409 # Adapt CSS to size of Map 6410 _class = "map_wrapper" 6411 if opts.get("window"): 6412 _class = "%s fullscreen" % _class 6413 if opts.get("print_mode"): 6414 _class = "%s print" % _class 6415 self.attributes = {"_class": _class, 6416 "_id": map_id, 6417 } 6418 self.parent = None 6419 6420 # Show Color Picker? 6421 if opts.get("color_picker"): 6422 # Can't be done in _setup() as usually run from xml() and hence we've already passed this part of the layout.html 6423 s3 = current.response.s3 6424 if s3.debug: 6425 style = "plugins/spectrum.css" 6426 else: 6427 style = "plugins/spectrum.min.css" 6428 if style not in s3.stylesheets: 6429 s3.stylesheets.append(style)
6430 6431 # -------------------------------------------------------------------------
6432 - def _setup(self):
6433 """ 6434 Setup the Map 6435 - not done during init() to be as Lazy as possible 6436 - separated from xml() in order to be able to read options to put 6437 into scripts (callback or otherwise) 6438 """ 6439 6440 # Read configuration 6441 config = GIS.get_config() 6442 if not config: 6443 # No prepop - Bail 6444 current.session.error = current.T("Map cannot display without prepop data!") 6445 redirect(URL(c="default", f="index")) 6446 6447 opts = self.opts 6448 6449 T = current.T 6450 db = current.db 6451 auth = current.auth 6452 s3db = current.s3db 6453 request = current.request 6454 response = current.response 6455 if not response.warning: 6456 response.warning = "" 6457 s3 = response.s3 6458 ctable = db.gis_config 6459 settings = current.deployment_settings 6460 MAP_ADMIN = auth.s3_has_role(current.session.s3.system_roles.MAP_ADMIN) 6461 6462 # Support bookmarks (such as from the control) 6463 # - these over-ride the arguments 6464 get_vars = request.get_vars 6465 6466 # JS Globals 6467 js_globals = {} 6468 6469 # Map Options for client-side processing 6470 options = {} 6471 6472 # Strings used by all Maps 6473 i18n = {"gis_base_layers": T("Base Layers"), 6474 "gis_overlays": T(settings.get_gis_label_overlays()), 6475 "gis_layers": T(settings.get_gis_layers_label()), 6476 "gis_draft_layer": T("Draft Features"), 6477 "gis_cluster_multiple": T("There are multiple records at this location"), 6478 "gis_loading": T("Loading"), 6479 "gis_requires_login": T("Requires Login"), 6480 "gis_too_many_features": T("There are too many features, please Zoom In or Filter"), 6481 "gis_zoomin": T("Zoom In"), 6482 } 6483 6484 ########## 6485 # Viewport 6486 ########## 6487 6488 height = opts.get("height", None) 6489 if height: 6490 map_height = height 6491 else: 6492 map_height = settings.get_gis_map_height() 6493 options["map_height"] = map_height 6494 width = opts.get("width", None) 6495 if width: 6496 map_width = width 6497 else: 6498 map_width = settings.get_gis_map_width() 6499 options["map_width"] = map_width 6500 6501 zoom = get_vars.get("zoom", None) 6502 if zoom is not None: 6503 zoom = int(zoom) 6504 else: 6505 zoom = opts.get("zoom", None) 6506 if not zoom: 6507 zoom = config.zoom 6508 options["zoom"] = zoom or 1 6509 6510 # Bounding Box or Center/Zoom 6511 bbox = opts.get("bbox", None) 6512 if (bbox 6513 and (-90 <= bbox["lat_max"] <= 90) 6514 and (-90 <= bbox["lat_min"] <= 90) 6515 and (-180 <= bbox["lon_max"] <= 180) 6516 and (-180 <= bbox["lon_min"] <= 180) 6517 ): 6518 # We have sane Bounds provided, so we should use them 6519 pass 6520 elif zoom is None: 6521 # Build Bounds from Config 6522 bbox = config 6523 else: 6524 # No bounds or we've been passed bounds which aren't sane 6525 bbox = None 6526 # Use Lat/Lon/Zoom to center instead 6527 lat = get_vars.get("lat", None) 6528 if lat is not None: 6529 lat = float(lat) 6530 else: 6531 lat = opts.get("lat", None) 6532 if lat is None or lat == "": 6533 lat = config.lat 6534 lon = get_vars.get("lon", None) 6535 if lon is not None: 6536 lon = float(lon) 6537 else: 6538 lon = opts.get("lon", None) 6539 if lon is None or lon == "": 6540 lon = config.lon 6541 6542 if bbox: 6543 # Calculate from Bounds 6544 options["bbox"] = [bbox["lon_min"], # left 6545 bbox["lat_min"], # bottom 6546 bbox["lon_max"], # right 6547 bbox["lat_max"], # top 6548 ] 6549 else: 6550 options["lat"] = lat 6551 options["lon"] = lon 6552 6553 options["numZoomLevels"] = config.zoom_levels 6554 6555 options["restrictedExtent"] = (config.lon_min, 6556 config.lat_min, 6557 config.lon_max, 6558 config.lat_max, 6559 ) 6560 6561 ############ 6562 # Projection 6563 ############ 6564 6565 projection = opts.get("projection", None) 6566 if not projection: 6567 projection = config.epsg 6568 options["projection"] = projection 6569 if projection not in (900913, 4326): 6570 # Test for Valid Projection file in Proj4JS library 6571 projpath = os.path.join( 6572 request.folder, "static", "scripts", "gis", "proj4js", \ 6573 "lib", "defs", "EPSG%s.js" % projection 6574 ) 6575 try: 6576 f = open(projpath, "r") 6577 f.close() 6578 except: 6579 if projection: 6580 proj4js = config.proj4js 6581 if proj4js: 6582 # Create it 6583 try: 6584 f = open(projpath, "w") 6585 except IOError, e: 6586 response.error = \ 6587 T("Map not available: Cannot write projection file - %s") % e 6588 else: 6589 f.write('''Proj4js.defs["EPSG:4326"]="%s"''' % proj4js) 6590 f.close() 6591 else: 6592 response.warning = \ 6593 T("Map not available: Projection %(projection)s not supported - please add definition to %(path)s") % \ 6594 {"projection": "'%s'" % projection, 6595 "path": "/static/scripts/gis/proj4js/lib/defs", 6596 } 6597 else: 6598 response.error = \ 6599 T("Map not available: No Projection configured") 6600 return None 6601 options["maxExtent"] = config.maxExtent 6602 options["units"] = config.units 6603 6604 ######## 6605 # Marker 6606 ######## 6607 6608 if config.marker_image: 6609 options["marker_default"] = {"i": config.marker_image, 6610 "h": config.marker_height, 6611 "w": config.marker_width, 6612 } 6613 # @ToDo: show_map() opts with fallback to settings 6614 # Keep these in sync with scaleImage() in s3.gis.js 6615 marker_max_height = settings.get_gis_marker_max_height() 6616 if marker_max_height != 35: 6617 options["max_h"] = marker_max_height 6618 marker_max_width = settings.get_gis_marker_max_width() 6619 if marker_max_width != 30: 6620 options["max_w"] = marker_max_width 6621 6622 ######### 6623 # Colours 6624 ######### 6625 6626 # Keep these in sync with s3.gis.js 6627 cluster_fill = settings.get_gis_cluster_fill() 6628 if cluster_fill and cluster_fill != '8087ff': 6629 options["cluster_fill"] = cluster_fill 6630 cluster_stroke = settings.get_gis_cluster_stroke() 6631 if cluster_stroke and cluster_stroke != '2b2f76': 6632 options["cluster_stroke"] = cluster_stroke 6633 select_fill = settings.get_gis_select_fill() 6634 if select_fill and select_fill != 'ffdc33': 6635 options["select_fill"] = select_fill 6636 select_stroke = settings.get_gis_select_stroke() 6637 if select_stroke and select_stroke != 'ff9933': 6638 options["select_stroke"] = select_stroke 6639 if not settings.get_gis_cluster_label(): 6640 options["cluster_label"] = False 6641 6642 ######## 6643 # Layout 6644 ######## 6645 6646 if not opts.get("closable", False): 6647 options["windowNotClosable"] = True 6648 if opts.get("window", False): 6649 options["window"] = True 6650 if opts.get("window_hide", False): 6651 options["windowHide"] = True 6652 6653 if opts.get("maximizable", False): 6654 options["maximizable"] = True 6655 else: 6656 options["maximizable"] = False 6657 6658 # Collapsed 6659 if opts.get("collapsed", False): 6660 options["west_collapsed"] = True 6661 6662 # LayerTree 6663 if not settings.get_gis_layer_tree_base(): 6664 options["hide_base"] = True 6665 if not settings.get_gis_layer_tree_overlays(): 6666 options["hide_overlays"] = True 6667 if not settings.get_gis_layer_tree_expanded(): 6668 options["folders_closed"] = True 6669 if settings.get_gis_layer_tree_radio(): 6670 options["folders_radio"] = True 6671 6672 ####### 6673 # Tools 6674 ####### 6675 6676 # Toolbar 6677 if opts.get("toolbar", False): 6678 options["toolbar"] = True 6679 6680 i18n["gis_length_message"] = T("The length is") 6681 i18n["gis_length_tooltip"] = T("Measure Length: Click the points along the path & end with a double-click") 6682 i18n["gis_zoomfull"] = T("Zoom to maximum map extent") 6683 6684 if settings.get_gis_geolocate_control(): 6685 # Presence of label turns feature on in s3.gis.js 6686 # @ToDo: Provide explicit option to support multiple maps in a page with different options 6687 i18n["gis_geoLocate"] = T("Zoom to Current Location") 6688 6689 # Search 6690 if opts.get("search", False): 6691 geonames_username = settings.get_gis_geonames_username() 6692 if geonames_username: 6693 # Presence of username turns feature on in s3.gis.js 6694 options["geonames"] = geonames_username 6695 # Presence of label adds support JS in Loader 6696 i18n["gis_search"] = T("Search location in Geonames") 6697 #i18n["gis_search_no_internet"] = T("Geonames.org search requires Internet connectivity!") 6698 6699 # Show NAV controls? 6700 # e.g. removed within S3LocationSelector[Widget] 6701 nav = opts.get("nav", None) 6702 if nav is None: 6703 nav = settings.get_gis_nav_controls() 6704 if nav: 6705 i18n["gis_zoominbutton"] = T("Zoom In: click in the map or use the left mouse button and drag to create a rectangle") 6706 i18n["gis_zoomout"] = T("Zoom Out: click in the map or use the left mouse button and drag to create a rectangle") 6707 i18n["gis_pan"] = T("Pan Map: keep the left mouse button pressed and drag the map") 6708 i18n["gis_navPrevious"] = T("Previous View") 6709 i18n["gis_navNext"] = T("Next View") 6710 else: 6711 options["nav"] = False 6712 6713 # Show Area control? 6714 if opts.get("area", False): 6715 options["area"] = True 6716 i18n["gis_area_message"] = T("The area is") 6717 i18n["gis_area_tooltip"] = T("Measure Area: Click the points around the polygon & end with a double-click") 6718 6719 # Show Color Picker? 6720 color_picker = opts.get("color_picker", False) 6721 if color_picker: 6722 options["color_picker"] = True 6723 if color_picker is not True: 6724 options["draft_style"] = json.loads(color_picker) 6725 #i18n["gis_color_picker_tooltip"] = T("Select Color") 6726 i18n["gis_cancelText"] = T("cancel") 6727 i18n["gis_chooseText"] = T("choose") 6728 i18n["gis_togglePaletteMoreText"] = T("more") 6729 i18n["gis_togglePaletteLessText"] = T("less") 6730 i18n["gis_clearText"] = T("Clear Color Selection") 6731 i18n["gis_noColorSelectedText"] = T("No Color Selected") 6732 6733 # Show Print control? 6734 print_control = opts.get("print_control") is not False and settings.get_gis_print() 6735 if print_control: 6736 # @ToDo: Use internal Printing or External Service 6737 # http://eden.sahanafoundation.org/wiki/BluePrint/GIS/Printing 6738 #print_service = settings.get_gis_print_service() 6739 #if print_service: 6740 # print_tool = {"url": string, # URL of print service (e.g. http://localhost:8080/geoserver/pdf/) 6741 # "mapTitle": string, # Title for the Printed Map (optional) 6742 # "subTitle": string # subTitle for the Printed Map (optional) 6743 # } 6744 options["print"] = True 6745 i18n["gis_print"] = T("Print") 6746 i18n["gis_paper_size"] = T("Paper Size") 6747 i18n["gis_print_tip"] = T("Take a screenshot of the map which can be printed") 6748 6749 # Show Save control? 6750 # e.g. removed within S3LocationSelector[Widget] 6751 if opts.get("save") is True and auth.s3_logged_in(): 6752 options["save"] = True 6753 i18n["gis_save"] = T("Save: Default Lat, Lon & Zoom for the Viewport") 6754 if MAP_ADMIN or (config.pe_id == auth.user.pe_id): 6755 # Personal config or MapAdmin, so Save Button does Updates 6756 options["config_id"] = config.id 6757 6758 # OSM Authoring 6759 pe_id = auth.user.pe_id if auth.s3_logged_in() else None 6760 if pe_id and s3db.auth_user_options_get_osm(pe_id): 6761 # Presence of label turns feature on in s3.gis.js 6762 # @ToDo: Provide explicit option to support multiple maps in a page with different options 6763 i18n["gis_potlatch"] = T("Edit the OpenStreetMap data for this area") 6764 i18n["gis_osm_zoom_closer"] = T("Zoom in closer to Edit OpenStreetMap layer") 6765 6766 # MGRS PDF Browser 6767 mgrs = opts.get("mgrs", None) 6768 if mgrs: 6769 options["mgrs_name"] = mgrs["name"] 6770 options["mgrs_url"] = mgrs["url"] 6771 else: 6772 # No toolbar 6773 if opts.get("save") is True: 6774 opts["save"] = "float" 6775 6776 # Show Save control? 6777 # e.g. removed within S3LocationSelector[Widget] 6778 if opts.get("save") == "float" and auth.s3_logged_in(): 6779 permit = auth.s3_has_permission 6780 if permit("create", ctable): 6781 options["save"] = "float" 6782 i18n["gis_save_map"] = T("Save Map") 6783 i18n["gis_new_map"] = T("Save as New Map?") 6784 i18n["gis_name_map"] = T("Name of Map") 6785 i18n["save"] = T("Save") 6786 i18n["saved"] = T("Saved") 6787 config_id = config.id 6788 _config = db(ctable.id == config_id).select(ctable.uuid, 6789 ctable.name, 6790 limitby=(0, 1), 6791 ).first() 6792 if MAP_ADMIN: 6793 i18n["gis_my_maps"] = T("Saved Maps") 6794 else: 6795 options["pe_id"] = auth.user.pe_id 6796 i18n["gis_my_maps"] = T("My Maps") 6797 if permit("update", ctable, record_id=config_id): 6798 options["config_id"] = config_id 6799 options["config_name"] = _config.name 6800 elif _config.uuid != "SITE_DEFAULT": 6801 options["config_name"] = _config.name 6802 6803 # Legend panel 6804 legend = opts.get("legend", False) 6805 if legend: 6806 i18n["gis_legend"] = T("Legend") 6807 if legend == "float": 6808 options["legend"] = "float" 6809 if settings.get_gis_layer_metadata(): 6810 options["metadata"] = True 6811 # MAP_ADMIN better for simpler deployments 6812 #if auth.s3_has_permission("create", "cms_post_layer"): 6813 if MAP_ADMIN: 6814 i18n["gis_metadata_create"] = T("Create 'More Info'") 6815 i18n["gis_metadata_edit"] = T("Edit 'More Info'") 6816 else: 6817 i18n["gis_metadata"] = T("More Info") 6818 else: 6819 options["legend"] = True 6820 6821 # Draw Feature Controls 6822 if opts.get("add_feature", False): 6823 i18n["gis_draw_feature"] = T("Add Point") 6824 if opts.get("add_feature_active", False): 6825 options["draw_feature"] = "active" 6826 else: 6827 options["draw_feature"] = "inactive" 6828 6829 if opts.get("add_line", False): 6830 i18n["gis_draw_line"] = T("Add Line") 6831 if opts.get("add_line_active", False): 6832 options["draw_line"] = "active" 6833 else: 6834 options["draw_line"] = "inactive" 6835 6836 if opts.get("add_polygon", False): 6837 i18n["gis_draw_polygon"] = T("Add Polygon") 6838 i18n["gis_draw_polygon_clear"] = T("Clear Polygon") 6839 if opts.get("add_polygon_active", False): 6840 options["draw_polygon"] = "active" 6841 else: 6842 options["draw_polygon"] = "inactive" 6843 6844 if opts.get("add_circle", False): 6845 i18n["gis_draw_circle"] = T("Add Circle") 6846 if opts.get("add_circle_active", False): 6847 options["draw_circle"] = "active" 6848 else: 6849 options["draw_circle"] = "inactive" 6850 6851 # Clear Layers 6852 clear_layers = opts.get("clear_layers") is not False and settings.get_gis_clear_layers() 6853 if clear_layers: 6854 options["clear_layers"] = clear_layers 6855 i18n["gis_clearlayers"] = T("Clear all Layers") 6856 6857 # Layer Properties 6858 if settings.get_gis_layer_properties(): 6859 # Presence of label turns feature on in s3.gis.js 6860 i18n["gis_properties"] = T("Layer Properties") 6861 6862 # Upload Layer 6863 if settings.get_gis_geoserver_password(): 6864 # Presence of label adds support JS in Loader and turns feature on in s3.gis.js 6865 # @ToDo: Provide explicit option to support multiple maps in a page with different options 6866 i18n["gis_uploadlayer"] = T("Upload Shapefile") 6867 6868 # WMS Browser 6869 wms_browser = opts.get("wms_browser", None) 6870 if wms_browser: 6871 options["wms_browser_name"] = wms_browser["name"] 6872 # urlencode the URL 6873 options["wms_browser_url"] = urllib.quote(wms_browser["url"]) 6874 6875 # Mouse Position 6876 # 'normal', 'mgrs' or 'off' 6877 mouse_position = opts.get("mouse_position", None) 6878 if mouse_position is None: 6879 mouse_position = settings.get_gis_mouse_position() 6880 if mouse_position == "mgrs": 6881 options["mouse_position"] = "mgrs" 6882 # Tell loader to load support scripts 6883 js_globals["mgrs"] = True 6884 elif mouse_position: 6885 options["mouse_position"] = True 6886 6887 # Overview Map 6888 overview = opts.get("overview", None) 6889 if overview is None: 6890 overview = settings.get_gis_overview() 6891 if not overview: 6892 options["overview"] = False 6893 6894 # Permalink 6895 permalink = opts.get("permalink", None) 6896 if permalink is None: 6897 permalink = settings.get_gis_permalink() 6898 if not permalink: 6899 options["permalink"] = False 6900 6901 # ScaleLine 6902 scaleline = opts.get("scaleline", None) 6903 if scaleline is None: 6904 scaleline = settings.get_gis_scaleline() 6905 if not scaleline: 6906 options["scaleline"] = False 6907 6908 # Zoom control 6909 zoomcontrol = opts.get("zoomcontrol", None) 6910 if zoomcontrol is None: 6911 zoomcontrol = settings.get_gis_zoomcontrol() 6912 if not zoomcontrol: 6913 options["zoomcontrol"] = False 6914 6915 zoomWheelEnabled = opts.get("zoomWheelEnabled", True) 6916 if not zoomWheelEnabled: 6917 options["no_zoom_wheel"] = True 6918 6919 ######## 6920 # Layers 6921 ######## 6922 6923 # Duplicate Features to go across the dateline? 6924 # @ToDo: Action this again (e.g. for DRRPP) 6925 if settings.get_gis_duplicate_features(): 6926 options["duplicate_features"] = True 6927 6928 # Features 6929 features = opts.get("features", None) 6930 if features: 6931 options["features"] = addFeatures(features) 6932 6933 # Feature Queries 6934 feature_queries = opts.get("feature_queries", None) 6935 if feature_queries: 6936 options["feature_queries"] = addFeatureQueries(feature_queries) 6937 6938 # Feature Resources 6939 feature_resources = opts.get("feature_resources", None) 6940 if feature_resources: 6941 options["feature_resources"] = addFeatureResources(feature_resources) 6942 6943 # Layers 6944 db = current.db 6945 ltable = db.gis_layer_config 6946 etable = db.gis_layer_entity 6947 query = (ltable.deleted == False) 6948 join = [etable.on(etable.layer_id == ltable.layer_id)] 6949 fields = [etable.instance_type, 6950 ltable.layer_id, 6951 ltable.enabled, 6952 ltable.visible, 6953 ltable.base, 6954 ltable.dir, 6955 ] 6956 6957 if opts.get("catalogue_layers", False): 6958 # Add all enabled Layers from the Catalogue 6959 stable = db.gis_style 6960 mtable = db.gis_marker 6961 query &= (ltable.config_id.belongs(config.ids)) 6962 join.append(ctable.on(ctable.id == ltable.config_id)) 6963 fields.extend((stable.style, 6964 stable.cluster_distance, 6965 stable.cluster_threshold, 6966 stable.opacity, 6967 stable.popup_format, 6968 mtable.image, 6969 mtable.height, 6970 mtable.width, 6971 ctable.pe_type)) 6972 left = [stable.on((stable.layer_id == etable.layer_id) & \ 6973 (stable.record_id == None) & \ 6974 ((stable.config_id == ctable.id) | \ 6975 (stable.config_id == None))), 6976 mtable.on(mtable.id == stable.marker_id), 6977 ] 6978 limitby = None 6979 # @ToDo: Need to fix this?: make the style lookup a different call 6980 if settings.get_database_type() == "postgres": 6981 # None is last 6982 orderby = [ctable.pe_type, stable.config_id] 6983 else: 6984 # None is 1st 6985 orderby = [ctable.pe_type, ~stable.config_id] 6986 if settings.get_gis_layer_metadata(): 6987 cptable = s3db.cms_post_layer 6988 left.append(cptable.on(cptable.layer_id == etable.layer_id)) 6989 fields.append(cptable.post_id) 6990 else: 6991 # Add just the default Base Layer 6992 query &= (ltable.base == True) & \ 6993 (ltable.config_id == config.id) 6994 # Base layer doesn't need a style 6995 left = None 6996 limitby = (0, 1) 6997 orderby = None 6998 6999 layer_types = [] 7000 lappend = layer_types.append 7001 layers = db(query).select(join=join, 7002 left=left, 7003 limitby=limitby, 7004 orderby=orderby, 7005 *fields) 7006 if not layers: 7007 # Use Site Default base layer 7008 # (Base layer doesn't need a style) 7009 query = (etable.id == ltable.layer_id) & \ 7010 (ltable.config_id == ctable.id) & \ 7011 (ctable.uuid == "SITE_DEFAULT") & \ 7012 (ltable.base == True) & \ 7013 (ltable.enabled == True) 7014 layers = db(query).select(*fields, 7015 limitby=(0, 1)) 7016 if not layers: 7017 # Just show EmptyLayer 7018 layer_types = [LayerEmpty] 7019 7020 for layer in layers: 7021 layer_type = layer["gis_layer_entity.instance_type"] 7022 if layer_type == "gis_layer_openstreetmap": 7023 lappend(LayerOSM) 7024 elif layer_type == "gis_layer_google": 7025 # NB v3 doesn't work when initially hidden 7026 lappend(LayerGoogle) 7027 elif layer_type == "gis_layer_arcrest": 7028 lappend(LayerArcREST) 7029 elif layer_type == "gis_layer_bing": 7030 lappend(LayerBing) 7031 elif layer_type == "gis_layer_tms": 7032 lappend(LayerTMS) 7033 elif layer_type == "gis_layer_wms": 7034 lappend(LayerWMS) 7035 elif layer_type == "gis_layer_xyz": 7036 lappend(LayerXYZ) 7037 elif layer_type == "gis_layer_empty": 7038 lappend(LayerEmpty) 7039 elif layer_type == "gis_layer_js": 7040 lappend(LayerJS) 7041 elif layer_type == "gis_layer_theme": 7042 lappend(LayerTheme) 7043 elif layer_type == "gis_layer_geojson": 7044 lappend(LayerGeoJSON) 7045 elif layer_type == "gis_layer_gpx": 7046 lappend(LayerGPX) 7047 elif layer_type == "gis_layer_coordinate": 7048 lappend(LayerCoordinate) 7049 elif layer_type == "gis_layer_georss": 7050 lappend(LayerGeoRSS) 7051 elif layer_type == "gis_layer_kml": 7052 lappend(LayerKML) 7053 elif layer_type == "gis_layer_openweathermap": 7054 lappend(LayerOpenWeatherMap) 7055 elif layer_type == "gis_layer_shapefile": 7056 lappend(LayerShapefile) 7057 elif layer_type == "gis_layer_wfs": 7058 lappend(LayerWFS) 7059 elif layer_type == "gis_layer_feature": 7060 lappend(LayerFeature) 7061 7062 # Make unique 7063 layer_types = set(layer_types) 7064 scripts = [] 7065 scripts_append = scripts.append 7066 for LayerType in layer_types: 7067 try: 7068 # Instantiate the Class 7069 layer = LayerType(layers) 7070 layer.as_dict(options) 7071 for script in layer.scripts: 7072 scripts_append(script) 7073 except Exception, exception: 7074 error = "%s not shown: %s" % (LayerType.__name__, exception) 7075 current.log.error(error) 7076 if s3.debug: 7077 raise HTTP(500, error) 7078 else: 7079 response.warning += error 7080 7081 # WMS getFeatureInfo 7082 # (loads conditionally based on whether queryable WMS Layers have been added) 7083 if s3.gis.get_feature_info and settings.get_gis_getfeature_control(): 7084 # Presence of label turns feature on 7085 # @ToDo: Provide explicit option to support multiple maps in a page 7086 # with different options 7087 i18n["gis_get_feature_info"] = T("Get Feature Info") 7088 i18n["gis_feature_info"] = T("Feature Info") 7089 7090 # Callback can be set before _setup() 7091 if not self.callback: 7092 self.callback = opts.get("callback", "DEFAULT") 7093 # These can be read/modified after _setup() & before xml() 7094 self.options = options 7095 7096 self.globals = js_globals 7097 self.i18n = i18n 7098 self.scripts = scripts 7099 7100 # Set up map plugins 7101 # - currently just used by Climate 7102 # @ToDo: Get these working with new loader 7103 # This, and any code it generates, is done last 7104 # However, map plugin should not assume this. 7105 self.plugin_callbacks = [] 7106 plugins = opts.get("plugins", None) 7107 if plugins: 7108 for plugin in plugins: 7109 plugin.extend_gis_map(self) 7110 7111 # Flag to xml() that we've already been run 7112 self.setup = True 7113 7114 return options
7115 7116 # -------------------------------------------------------------------------
7117 - def xml(self):
7118 """ 7119 Render the Map 7120 - this is primarily done by inserting a lot of JavaScript 7121 - CSS loaded as-standard to avoid delays in page loading 7122 - HTML added in init() as a component 7123 """ 7124 7125 if not self.setup: 7126 result = self._setup() 7127 if result is None: 7128 return "" 7129 7130 # Add ExtJS 7131 # @ToDo: Do this conditionally on whether Ext UI is used 7132 s3_include_ext() 7133 7134 dumps = json.dumps 7135 s3 = current.response.s3 7136 7137 js_global = s3.js_global 7138 js_global_append = js_global.append 7139 7140 i18n_dict = self.i18n 7141 i18n = [] 7142 i18n_append = i18n.append 7143 for key, val in i18n_dict.items(): 7144 line = '''i18n.%s="%s"''' % (key, val) 7145 if line not in i18n: 7146 i18n_append(line) 7147 i18n = '''\n'''.join(i18n) 7148 if i18n not in js_global: 7149 js_global_append(i18n) 7150 7151 globals_dict = self.globals 7152 js_globals = [] 7153 for key, val in globals_dict.items(): 7154 line = '''S3.gis.%s=%s''' % (key, dumps(val, separators=SEPARATORS)) 7155 if line not in js_globals: 7156 js_globals.append(line) 7157 js_globals = '''\n'''.join(js_globals) 7158 if js_globals not in js_global: 7159 js_global_append(js_globals) 7160 7161 # Underscore for Popup Templates 7162 s3_include_underscore() 7163 7164 debug = s3.debug 7165 scripts = s3.scripts 7166 7167 if self.opts.get("color_picker", False): 7168 if debug: 7169 script = URL(c="static", f="scripts/spectrum.js") 7170 else: 7171 script = URL(c="static", f="scripts/spectrum.min.js") 7172 if script not in scripts: 7173 scripts.append(script) 7174 7175 if debug: 7176 script = URL(c="static", f="scripts/S3/s3.gis.loader.js") 7177 else: 7178 script = URL(c="static", f="scripts/S3/s3.gis.loader.min.js") 7179 if script not in scripts: 7180 scripts.append(script) 7181 7182 callback = self.callback 7183 map_id = self.id 7184 options = self.options 7185 projection = options["projection"] 7186 try: 7187 options = dumps(options, separators=SEPARATORS) 7188 except Exception, exception: 7189 current.log.error("Map %s failed to initialise" % map_id, exception) 7190 plugin_callbacks = '''\n'''.join(self.plugin_callbacks) 7191 if callback: 7192 if callback == "DEFAULT": 7193 if map_id == "default_map": 7194 callback = '''S3.gis.show_map(null,%s)''' % options 7195 else: 7196 callback = '''S3.gis.show_map(%s,%s)''' % (map_id, options) 7197 else: 7198 # Store options where they can be read by a later show_map() 7199 js_global_append('''S3.gis.options["%s"]=%s''' % (map_id, 7200 options)) 7201 script = URL(c="static", f="scripts/yepnope.1.5.4-min.js") 7202 if script not in scripts: 7203 scripts.append(script) 7204 if plugin_callbacks: 7205 callback = '''%s\n%s''' % (callback, plugin_callbacks) 7206 callback = '''function(){%s}''' % callback 7207 else: 7208 # Store options where they can be read by a later show_map() 7209 js_global_append('''S3.gis.options["%s"]=%s''' % (map_id, options)) 7210 if plugin_callbacks: 7211 callback = '''function(){%s}''' % plugin_callbacks 7212 else: 7213 callback = '''null''' 7214 loader = \ 7215 '''s3_gis_loadjs(%(debug)s,%(projection)s,%(callback)s,%(scripts)s)''' \ 7216 % {"debug": "true" if debug else "false", 7217 "projection": projection, 7218 "callback": callback, 7219 "scripts": self.scripts, 7220 } 7221 jquery_ready = s3.jquery_ready 7222 if loader not in jquery_ready: 7223 jquery_ready.append(loader) 7224 7225 # Return the HTML 7226 return super(MAP, self).xml()
7227
7228 # ============================================================================= 7229 -def addFeatures(features):
7230 """ 7231 Add Simple Features to the Draft layer 7232 - used by S3LocationSelectorWidget 7233 7234 @todo: obsolete? 7235 """ 7236 7237 simplify = GIS.simplify 7238 _f = [] 7239 append = _f.append 7240 for feature in features: 7241 geojson = simplify(feature, output="geojson") 7242 if geojson: 7243 f = {"type": "Feature", 7244 "geometry": json.loads(geojson), 7245 } 7246 append(f) 7247 return _f
7248
7249 # ============================================================================= 7250 -def addFeatureQueries(feature_queries):
7251 """ 7252 Add Feature Queries to the map 7253 - These can be Rows or Storage() 7254 NB These considerations need to be taken care of before arriving here: 7255 Security of data 7256 Localisation of name/popup_label 7257 """ 7258 7259 db = current.db 7260 s3db = current.s3db 7261 cache = s3db.cache 7262 request = current.request 7263 controller = request.controller 7264 function = request.function 7265 fqtable = s3db.gis_feature_query 7266 mtable = s3db.gis_marker 7267 7268 auth = current.auth 7269 auth_user = auth.user 7270 if auth_user: 7271 created_by = auth_user.id 7272 s3_make_session_owner = auth.s3_make_session_owner 7273 else: 7274 # Anonymous 7275 # @ToDo: A deployment with many Anonymous Feature Queries being 7276 # accessed will need to change this design - e.g. use session ID instead 7277 created_by = None 7278 7279 layers_feature_query = [] 7280 append = layers_feature_query.append 7281 for layer in feature_queries: 7282 name = str(layer["name"]) 7283 _layer = {"name": name} 7284 name_safe = re.sub("\W", "_", name) 7285 7286 # Lat/Lon via Join or direct? 7287 try: 7288 layer["query"][0].gis_location.lat 7289 join = True 7290 except: 7291 join = False 7292 7293 # Push the Features into a temporary table in order to have them accessible via GeoJSON 7294 # @ToDo: Maintenance Script to clean out old entries (> 24 hours?) 7295 cname = "%s_%s_%s" % (name_safe, 7296 controller, 7297 function) 7298 # Clear old records 7299 query = (fqtable.name == cname) & \ 7300 (fqtable.created_by == created_by) 7301 db(query).delete() 7302 for row in layer["query"]: 7303 rowdict = {"name" : cname} 7304 if join: 7305 rowdict["lat"] = row.gis_location.lat 7306 rowdict["lon"] = row.gis_location.lon 7307 else: 7308 rowdict["lat"] = row["lat"] 7309 rowdict["lon"] = row["lon"] 7310 if "popup_url" in row: 7311 rowdict["popup_url"] = row["popup_url"] 7312 if "popup_label" in row: 7313 rowdict["popup_label"] = row["popup_label"] 7314 if "marker" in row: 7315 rowdict["marker_url"] = URL(c="static", f="img", 7316 args=["markers", 7317 row["marker"].image]) 7318 rowdict["marker_height"] = row["marker"].height 7319 rowdict["marker_width"] = row["marker"].width 7320 else: 7321 if "marker_url" in row: 7322 rowdict["marker_url"] = row["marker_url"] 7323 if "marker_height" in row: 7324 rowdict["marker_height"] = row["marker_height"] 7325 if "marker_width" in row: 7326 rowdict["marker_width"] = row["marker_width"] 7327 if "shape" in row: 7328 rowdict["shape"] = row["shape"] 7329 if "size" in row: 7330 rowdict["size"] = row["size"] 7331 if "colour" in row: 7332 rowdict["colour"] = row["colour"] 7333 if "opacity" in row: 7334 rowdict["opacity"] = row["opacity"] 7335 record_id = fqtable.insert(**rowdict) 7336 if not created_by: 7337 s3_make_session_owner(fqtable, record_id) 7338 7339 # URL to retrieve the data 7340 url = "%s.geojson?feature_query.name=%s&feature_query.created_by=%s" % \ 7341 (URL(c="gis", f="feature_query"), 7342 cname, 7343 created_by) 7344 _layer["url"] = url 7345 7346 if "active" in layer and not layer["active"]: 7347 _layer["visibility"] = False 7348 7349 if "marker" in layer: 7350 # per-Layer Marker 7351 marker = layer["marker"] 7352 if isinstance(marker, int): 7353 # integer (marker_id) not row 7354 marker = db(mtable.id == marker).select(mtable.image, 7355 mtable.height, 7356 mtable.width, 7357 limitby=(0, 1), 7358 cache=cache 7359 ).first() 7360 if marker: 7361 # @ToDo: Single option as Marker.as_json_dict() 7362 _layer["marker_url"] = marker["image"] 7363 _layer["marker_height"] = marker["height"] 7364 _layer["marker_width"] = marker["width"] 7365 7366 if "opacity" in layer and layer["opacity"] != 1: 7367 _layer["opacity"] = "%.1f" % layer["opacity"] 7368 if "cluster_attribute" in layer and \ 7369 layer["cluster_attribute"] != CLUSTER_ATTRIBUTE: 7370 _layer["cluster_attribute"] = layer["cluster_attribute"] 7371 if "cluster_distance" in layer and \ 7372 layer["cluster_distance"] != CLUSTER_DISTANCE: 7373 _layer["cluster_distance"] = layer["cluster_distance"] 7374 if "cluster_threshold" in layer and \ 7375 layer["cluster_threshold"] != CLUSTER_THRESHOLD: 7376 _layer["cluster_threshold"] = layer["cluster_threshold"] 7377 append(_layer) 7378 7379 return layers_feature_query
7380
7381 # ============================================================================= 7382 -def addFeatureResources(feature_resources):
7383 """ 7384 Add Feature Resources to the map 7385 - REST URLs to back-end resources 7386 """ 7387 7388 T = current.T 7389 db = current.db 7390 s3db = current.s3db 7391 ftable = s3db.gis_layer_feature 7392 ltable = s3db.gis_layer_config 7393 # Better to do a separate query 7394 #mtable = s3db.gis_marker 7395 stable = db.gis_style 7396 config = GIS.get_config() 7397 config_id = config.id 7398 postgres = current.deployment_settings.get_database_type() == "postgres" 7399 7400 layers_feature_resource = [] 7401 append = layers_feature_resource.append 7402 for layer in feature_resources: 7403 name = s3_str(layer["name"]) 7404 _layer = {"name": name} 7405 _id = layer.get("id") 7406 if _id: 7407 _id = str(_id) 7408 else: 7409 _id = name 7410 _id = re.sub("\W", "_", _id) 7411 _layer["id"] = _id 7412 7413 # Are we loading a Catalogue Layer or a simple URL? 7414 layer_id = layer.get("layer_id", None) 7415 if layer_id: 7416 query = (ftable.layer_id == layer_id) 7417 left = [ltable.on((ltable.layer_id == layer_id) & \ 7418 (ltable.config_id == config_id)), 7419 stable.on((stable.layer_id == layer_id) & \ 7420 ((stable.config_id == config_id) | \ 7421 (stable.config_id == None)) & \ 7422 (stable.record_id == None) & \ 7423 (stable.aggregate == False)), 7424 # Better to do a separate query 7425 #mtable.on(mtable.id == stable.marker_id), 7426 ] 7427 # @ToDo: Need to fix this?: make the style lookup a different call 7428 if postgres: 7429 # None is last 7430 orderby = stable.config_id 7431 else: 7432 # None is 1st 7433 orderby = ~stable.config_id 7434 row = db(query).select(ftable.layer_id, 7435 ftable.controller, 7436 ftable.function, 7437 ftable.filter, 7438 ftable.aggregate, 7439 ftable.trackable, 7440 ftable.use_site, 7441 # @ToDo: Deprecate Legacy 7442 ftable.popup_fields, 7443 # @ToDo: Deprecate Legacy 7444 ftable.popup_label, 7445 ftable.cluster_attribute, 7446 ltable.dir, 7447 # Better to do a separate query 7448 #mtable.image, 7449 #mtable.height, 7450 #mtable.width, 7451 stable.marker_id, 7452 stable.opacity, 7453 stable.popup_format, 7454 # @ToDo: If-required 7455 #stable.url_format, 7456 stable.cluster_distance, 7457 stable.cluster_threshold, 7458 stable.style, 7459 left=left, 7460 limitby=(0, 1), 7461 orderby=orderby, 7462 ).first() 7463 _dir = layer.get("dir", row["gis_layer_config.dir"]) 7464 # Better to do a separate query 7465 #_marker = row["gis_marker"] 7466 _style = row["gis_style"] 7467 row = row["gis_layer_feature"] 7468 if row.use_site: 7469 maxdepth = 1 7470 else: 7471 maxdepth = 0 7472 opacity = layer.get("opacity", _style.opacity) or 1 7473 cluster_attribute = layer.get("cluster_attribute", 7474 row.cluster_attribute) or \ 7475 CLUSTER_ATTRIBUTE 7476 cluster_distance = layer.get("cluster_distance", 7477 _style.cluster_distance) or \ 7478 CLUSTER_DISTANCE 7479 cluster_threshold = layer.get("cluster_threshold", 7480 _style.cluster_threshold) 7481 if cluster_threshold is None: 7482 cluster_threshold = CLUSTER_THRESHOLD 7483 style = layer.get("style", None) 7484 if style: 7485 try: 7486 # JSON Object? 7487 style = json.loads(style) 7488 except: 7489 current.log.error("Invalid Style: %s" % style) 7490 style = None 7491 else: 7492 style = _style.style 7493 #url_format = _style.url_format 7494 7495 aggregate = layer.get("aggregate", row.aggregate) 7496 if aggregate: 7497 url = "%s.geojson?layer=%i&show_ids=true" % \ 7498 (URL(c=row.controller, f=row.function, args="report"), 7499 row.layer_id) 7500 #if not url_format: 7501 # Use gis/location controller in all reports 7502 url_format = "%s/{id}.plain" % URL(c="gis", f="location") 7503 else: 7504 _url = URL(c=row.controller, f=row.function) 7505 url = "%s.geojson?layer=%i&components=None&show_ids=true&maxdepth=%s" % \ 7506 (_url, 7507 row.layer_id, 7508 maxdepth) 7509 #if not url_format: 7510 url_format = "%s/{id}.plain" % _url 7511 7512 # Use specified filter or fallback to the one in the layer 7513 _filter = layer.get("filter", row.filter) 7514 if _filter: 7515 url = "%s&%s" % (url, _filter) 7516 if row.trackable: 7517 url = "%s&track=1" % url 7518 if not style: 7519 marker = layer.get("marker") 7520 if marker: 7521 marker = Marker(marker).as_json_dict() 7522 elif _style.marker_id: 7523 marker = Marker(marker_id=_style.marker_id).as_json_dict() 7524 7525 popup_format = _style.popup_format 7526 if not popup_format: 7527 # Old-style 7528 popup_fields = row["popup_fields"] 7529 if popup_fields: 7530 popup_label = row["popup_label"] 7531 if popup_label: 7532 popup_format = "{%s} (%s)" % (popup_fields[0], 7533 current.T(popup_label)) 7534 else: 7535 popup_format = "%s" % popup_fields[0] 7536 for f in popup_fields[1:]: 7537 popup_format = "%s<br />{%s}" % (popup_format, f) 7538 7539 else: 7540 # URL to retrieve the data 7541 url = layer["url"] 7542 tablename = layer["tablename"] 7543 table = s3db[tablename] 7544 # Optimise the query 7545 if "location_id" in table.fields: 7546 maxdepth = 0 7547 elif "site_id" in table.fields: 7548 maxdepth = 1 7549 elif tablename == "gis_location": 7550 maxdepth = 0 7551 else: 7552 # Not much we can do! 7553 # @ToDo: Use Context 7554 continue 7555 options = "components=None&maxdepth=%s&show_ids=true" % maxdepth 7556 if "?" in url: 7557 url = "%s&%s" % (url, options) 7558 else: 7559 url = "%s?%s" % (url, options) 7560 opacity = layer.get("opacity", 1) 7561 cluster_attribute = layer.get("cluster_attribute", 7562 CLUSTER_ATTRIBUTE) 7563 cluster_distance = layer.get("cluster_distance", 7564 CLUSTER_DISTANCE) 7565 cluster_threshold = layer.get("cluster_threshold", 7566 CLUSTER_THRESHOLD) 7567 _dir = layer.get("dir", None) 7568 style = layer.get("style", None) 7569 if style: 7570 try: 7571 # JSON Object? 7572 style = json.loads(style) 7573 except: 7574 current.log.error("Invalid Style: %s" % style) 7575 style = None 7576 if not style: 7577 marker = layer.get("marker", None) 7578 if marker: 7579 marker = Marker(marker).as_json_dict() 7580 popup_format = layer.get("popup_format") 7581 url_format = layer.get("url_format") 7582 7583 if "active" in layer and not layer["active"]: 7584 _layer["visibility"] = False 7585 if opacity != 1: 7586 _layer["opacity"] = "%.1f" % opacity 7587 if popup_format: 7588 if "T(" in popup_format: 7589 # i18n 7590 items = regex_translate.findall(popup_format) 7591 for item in items: 7592 titem = str(T(item[1:-1])) 7593 popup_format = popup_format.replace("T(%s)" % item, 7594 titem) 7595 _layer["popup_format"] = popup_format 7596 if url_format: 7597 _layer["url_format"] = url_format 7598 if cluster_attribute != CLUSTER_ATTRIBUTE: 7599 _layer["cluster_attribute"] = cluster_attribute 7600 if cluster_distance != CLUSTER_DISTANCE: 7601 _layer["cluster_distance"] = cluster_distance 7602 if cluster_threshold != CLUSTER_THRESHOLD: 7603 _layer["cluster_threshold"] = cluster_threshold 7604 if _dir: 7605 _layer["dir"] = _dir 7606 7607 if style: 7608 _layer["style"] = style 7609 elif marker: 7610 # Per-layer Marker 7611 _layer["marker"] = marker 7612 else: 7613 # Request the server to provide per-feature Markers 7614 url = "%s&markers=1" % url 7615 _layer["url"] = url 7616 append(_layer) 7617 7618 return layers_feature_resource
7619
7620 # ============================================================================= 7621 -class Layer(object):
7622 """ 7623 Abstract base class for Layers from Catalogue 7624 """ 7625
7626 - def __init__(self, all_layers):
7627 7628 sublayers = [] 7629 append = sublayers.append 7630 # List of Scripts to load async with the Map JavaScript 7631 self.scripts = [] 7632 7633 s3_has_role = current.auth.s3_has_role 7634 7635 tablename = self.tablename 7636 table = current.s3db[tablename] 7637 fields = table.fields 7638 metafields = s3_all_meta_field_names() 7639 fields = [table[f] for f in fields if f not in metafields] 7640 layer_ids = [row["gis_layer_config.layer_id"] for row in all_layers if \ 7641 row["gis_layer_entity.instance_type"] == tablename] 7642 query = (table.layer_id.belongs(set(layer_ids))) 7643 rows = current.db(query).select(*fields) 7644 7645 SubLayer = self.SubLayer 7646 # Flag to show whether we've set the default baselayer 7647 # (otherwise a config higher in the hierarchy can overrule one lower down) 7648 base = True 7649 # Layers requested to be visible via URL (e.g. embedded map) 7650 visible = current.request.get_vars.get("layers", None) 7651 if visible: 7652 visible = visible.split(".") 7653 else: 7654 visible = [] 7655 metadata = current.deployment_settings.get_gis_layer_metadata() 7656 styled = self.style 7657 7658 for record in rows: 7659 layer_id = record.layer_id 7660 7661 # Find the 1st row in all_layers which matches this 7662 for row in all_layers: 7663 if row["gis_layer_config.layer_id"] == layer_id: 7664 layer_config = row["gis_layer_config"] 7665 break 7666 7667 # Check if layer is enabled 7668 if layer_config.enabled is False: 7669 continue 7670 7671 # Check user is allowed to access the layer 7672 role_required = record.role_required 7673 if role_required and not s3_has_role(role_required): 7674 continue 7675 7676 # All OK - add SubLayer 7677 record["visible"] = layer_config.visible or str(layer_id) in visible 7678 if base and layer_config.base: 7679 # var name can't conflict with OSM/WMS/ArcREST layers 7680 record["_base"] = True 7681 base = False 7682 else: 7683 record["_base"] = False 7684 7685 record["dir"] = layer_config.dir 7686 7687 if styled: 7688 style = row.get("gis_style", None) 7689 if style: 7690 style_dict = style.style 7691 if isinstance(style_dict, basestring): 7692 # Matryoshka (=double-serialized JSON)? 7693 # - should no longer happen, but a (now-fixed) bug 7694 # regularly produced double-serialized JSON, so 7695 # catching it here to keep it working with legacy 7696 # databases: 7697 try: 7698 style_dict = json.loads(style_dict) 7699 except ValueError: 7700 pass 7701 if style_dict: 7702 record["style"] = style_dict 7703 else: 7704 record["style"] = None 7705 marker = row.get("gis_marker", None) 7706 if marker: 7707 record["marker"] = Marker(marker) 7708 #if style.marker_id: 7709 # record["marker"] = Marker(marker_id=style.marker_id) 7710 else: 7711 # Default Marker? 7712 record["marker"] = Marker(tablename=tablename) 7713 record["opacity"] = style.opacity or 1 7714 record["popup_format"] = style.popup_format 7715 record["cluster_distance"] = style.cluster_distance or CLUSTER_DISTANCE 7716 if style.cluster_threshold != None: 7717 record["cluster_threshold"] = style.cluster_threshold 7718 else: 7719 record["cluster_threshold"] = CLUSTER_THRESHOLD 7720 else: 7721 record["style"] = None 7722 record["opacity"] = 1 7723 record["popup_format"] = None 7724 record["cluster_distance"] = CLUSTER_DISTANCE 7725 record["cluster_threshold"] = CLUSTER_THRESHOLD 7726 # Default Marker? 7727 record["marker"] = Marker(tablename=tablename) 7728 7729 if metadata: 7730 post_id = row.get("cms_post_layer.post_id", None) 7731 record["post_id"] = post_id 7732 7733 if tablename in ("gis_layer_bing", "gis_layer_google"): 7734 # SubLayers handled differently 7735 append(record) 7736 else: 7737 append(SubLayer(record)) 7738 7739 # Alphasort layers 7740 # - client will only sort within their type: s3.gis.layers.js 7741 self.sublayers = sorted(sublayers, key=lambda row: row.name)
7742 7743 # -------------------------------------------------------------------------
7744 - def as_dict(self, options=None):
7745 """ 7746 Output the Layers as a Python dict 7747 """ 7748 7749 sublayer_dicts = [] 7750 append = sublayer_dicts.append 7751 sublayers = self.sublayers 7752 for sublayer in sublayers: 7753 # Read the output dict for this sublayer 7754 sublayer_dict = sublayer.as_dict() 7755 if sublayer_dict: 7756 # Add this layer to the list of layers for this layer type 7757 append(sublayer_dict) 7758 7759 if sublayer_dicts: 7760 if options: 7761 # Used by Map._setup() 7762 options[self.dictname] = sublayer_dicts 7763 else: 7764 # Used by as_json() and hence as_javascript() 7765 return sublayer_dicts
7766 7767 # -------------------------------------------------------------------------
7768 - def as_json(self):
7769 """ 7770 Output the Layers as JSON 7771 """ 7772 7773 result = self.as_dict() 7774 if result: 7775 #return json.dumps(result, indent=4, separators=(",", ": "), sort_keys=True) 7776 return json.dumps(result, separators=SEPARATORS)
7777 7778 # -------------------------------------------------------------------------
7779 - def as_javascript(self):
7780 """ 7781 Output the Layers as global Javascript 7782 - suitable for inclusion in the HTML page 7783 """ 7784 7785 result = self.as_json() 7786 if result: 7787 return '''S3.gis.%s=%s\n''' % (self.dictname, result)
7788 7789 # -------------------------------------------------------------------------
7790 - class SubLayer(object):
7791 - def __init__(self, record):
7792 # Ensure all attributes available (even if Null) 7793 self.__dict__.update(record) 7794 del record 7795 if current.deployment_settings.get_L10n_translate_gis_layer(): 7796 self.safe_name = re.sub('[\\"]', "", s3_str(current.T(self.name))) 7797 else: 7798 self.safe_name = re.sub('[\\"]', "", self.name) 7799 7800 if hasattr(self, "projection_id"): 7801 self.projection = Projection(self.projection_id)
7802
7803 - def setup_clustering(self, output):
7804 if hasattr(self, "cluster_attribute"): 7805 cluster_attribute = self.cluster_attribute 7806 else: 7807 cluster_attribute = None 7808 cluster_distance = self.cluster_distance 7809 cluster_threshold = self.cluster_threshold 7810 if cluster_attribute and \ 7811 cluster_attribute != CLUSTER_ATTRIBUTE: 7812 output["cluster_attribute"] = cluster_attribute 7813 if cluster_distance != CLUSTER_DISTANCE: 7814 output["cluster_distance"] = cluster_distance 7815 if cluster_threshold != CLUSTER_THRESHOLD: 7816 output["cluster_threshold"] = cluster_threshold
7817
7818 - def setup_folder(self, output):
7819 if self.dir: 7820 output["dir"] = s3_str(current.T(self.dir))
7821
7822 - def setup_folder_and_visibility(self, output):
7823 if not self.visible: 7824 output["visibility"] = False 7825 if self.dir: 7826 output["dir"] = s3_str(current.T(self.dir))
7827
7828 - def setup_folder_visibility_and_opacity(self, output):
7829 if not self.visible: 7830 output["visibility"] = False 7831 if self.opacity != 1: 7832 output["opacity"] = "%.1f" % self.opacity 7833 if self.dir: 7834 output["dir"] = s3_str(current.T(self.dir))
7835 7836 # --------------------------------------------------------------------- 7837 @staticmethod
7838 - def add_attributes_if_not_default(output, **values_and_defaults):
7839 # could also write values in debug mode, to check if defaults ignored. 7840 # could also check values are not being overwritten. 7841 for key, (value, defaults) in values_and_defaults.iteritems(): 7842 if value not in defaults: 7843 output[key] = value
7844
7845 # ----------------------------------------------------------------------------- 7846 -class LayerArcREST(Layer):
7847 """ 7848 ArcGIS REST Layers from Catalogue 7849 """ 7850 7851 tablename = "gis_layer_arcrest" 7852 dictname = "layers_arcrest" 7853 style = False 7854 7855 # -------------------------------------------------------------------------
7856 - class SubLayer(Layer.SubLayer):
7857 - def as_dict(self):
7858 # Mandatory attributes 7859 output = {"id": self.layer_id, 7860 "type": "arcrest", 7861 "name": self.safe_name, 7862 "url": self.url, 7863 } 7864 7865 # Attributes which are defaulted client-side if not set 7866 self.setup_folder_and_visibility(output) 7867 self.add_attributes_if_not_default( 7868 output, 7869 layers = (self.layers, ([0],)), 7870 transparent = (self.transparent, (True,)), 7871 base = (self.base, (False,)), 7872 _base = (self._base, (False,)), 7873 format = (self.img_format, ("png",)), 7874 ) 7875 7876 return output
7877
7878 # ----------------------------------------------------------------------------- 7879 -class LayerBing(Layer):
7880 """ 7881 Bing Layers from Catalogue 7882 """ 7883 7884 tablename = "gis_layer_bing" 7885 dictname = "Bing" 7886 style = False 7887 7888 # -------------------------------------------------------------------------
7889 - def as_dict(self, options=None):
7890 sublayers = self.sublayers 7891 if sublayers: 7892 if Projection().epsg != 900913: 7893 raise Exception("Cannot display Bing layers unless we're using the Spherical Mercator Projection\n") 7894 apikey = current.deployment_settings.get_gis_api_bing() 7895 if not apikey: 7896 raise Exception("Cannot display Bing layers unless we have an API key\n") 7897 # Mandatory attributes 7898 ldict = {"ApiKey": apikey 7899 } 7900 7901 for sublayer in sublayers: 7902 # Attributes which are defaulted client-side if not set 7903 if sublayer._base: 7904 # Set default Base layer 7905 ldict["Base"] = sublayer.type 7906 if sublayer.type == "aerial": 7907 ldict["Aerial"] = {"name": sublayer.name or "Bing Satellite", 7908 "id": sublayer.layer_id} 7909 elif sublayer.type == "road": 7910 ldict["Road"] = {"name": sublayer.name or "Bing Roads", 7911 "id": sublayer.layer_id} 7912 elif sublayer.type == "hybrid": 7913 ldict["Hybrid"] = {"name": sublayer.name or "Bing Hybrid", 7914 "id": sublayer.layer_id} 7915 if options: 7916 # Used by Map._setup() 7917 options[self.dictname] = ldict 7918 else: 7919 # Used by as_json() and hence as_javascript() 7920 return ldict
7921
7922 # ----------------------------------------------------------------------------- 7923 -class LayerCoordinate(Layer):
7924 """ 7925 Coordinate Layer from Catalogue 7926 - there should only be one of these 7927 """ 7928 7929 tablename = "gis_layer_coordinate" 7930 dictname = "CoordinateGrid" 7931 style = False 7932 7933 # -------------------------------------------------------------------------
7934 - def as_dict(self, options=None):
7935 sublayers = self.sublayers 7936 if sublayers: 7937 sublayer = sublayers[0] 7938 name_safe = re.sub("'", "", sublayer.name) 7939 ldict = {"name": name_safe, 7940 "visibility": sublayer.visible, 7941 "id": sublayer.layer_id, 7942 } 7943 if options: 7944 # Used by Map._setup() 7945 options[self.dictname] = ldict 7946 else: 7947 # Used by as_json() and hence as_javascript() 7948 return ldict
7949
7950 # ----------------------------------------------------------------------------- 7951 -class LayerEmpty(Layer):
7952 """ 7953 Empty Layer from Catalogue 7954 - there should only be one of these 7955 """ 7956 7957 tablename = "gis_layer_empty" 7958 dictname = "EmptyLayer" 7959 style = False 7960 7961 # -------------------------------------------------------------------------
7962 - def as_dict(self, options=None):
7963 sublayers = self.sublayers 7964 if sublayers: 7965 sublayer = sublayers[0] 7966 name = s3_str(current.T(sublayer.name)) 7967 name_safe = re.sub("'", "", name) 7968 ldict = {"name": name_safe, 7969 "id": sublayer.layer_id, 7970 } 7971 if sublayer._base: 7972 ldict["base"] = True 7973 if options: 7974 # Used by Map._setup() 7975 options[self.dictname] = ldict 7976 else: 7977 # Used by as_json() and hence as_javascript() 7978 return ldict
7979
7980 # ----------------------------------------------------------------------------- 7981 -class LayerFeature(Layer):
7982 """ 7983 Feature Layers from Catalogue 7984 """ 7985 7986 tablename = "gis_layer_feature" 7987 dictname = "layers_feature" 7988 style = True 7989 7990 # -------------------------------------------------------------------------
7991 - class SubLayer(Layer.SubLayer):
7992 - def __init__(self, record):
7993 controller = record.controller 7994 self.skip = False 7995 if controller is not None: 7996 if controller not in current.deployment_settings.modules: 7997 # Module is disabled 7998 self.skip = True 7999 if not current.auth.permission.has_permission("read", 8000 c=controller, 8001 f=record.function): 8002 # User has no permission to this resource (in ACL) 8003 self.skip = True 8004 else: 8005 error = "Feature Layer Record '%s' has no controller" % \ 8006 record.name 8007 raise Exception(error) 8008 super(LayerFeature.SubLayer, self).__init__(record)
8009
8010 - def as_dict(self):
8011 if self.skip: 8012 # Skip layer 8013 return 8014 # @ToDo: Option to force all filters via POST? 8015 if self.aggregate: 8016 # id is used for url_format 8017 url = "%s.geojson?layer=%i&show_ids=true" % \ 8018 (URL(c=self.controller, f=self.function, args="report"), 8019 self.layer_id) 8020 # Use gis/location controller in all reports 8021 url_format = "%s/{id}.plain" % URL(c="gis", f="location") 8022 else: 8023 if self.use_site: 8024 maxdepth = 1 8025 else: 8026 maxdepth = 0 8027 _url = URL(self.controller, self.function) 8028 # id is used for url_format 8029 url = "%s.geojson?layer=%i&components=None&maxdepth=%s&show_ids=true" % \ 8030 (_url, 8031 self.layer_id, 8032 maxdepth) 8033 url_format = "%s/{id}.plain" % _url 8034 if self.filter: 8035 url = "%s&%s" % (url, self.filter) 8036 if self.trackable: 8037 url = "%s&track=1" % url 8038 8039 # Mandatory attributes 8040 output = {"id": self.layer_id, 8041 # Defaults client-side if not-provided 8042 #"type": "feature", 8043 "name": self.safe_name, 8044 "url_format": url_format, 8045 "url": url, 8046 } 8047 8048 popup_format = self.popup_format 8049 if popup_format: 8050 # New-style 8051 if "T(" in popup_format: 8052 # i18n 8053 T = current.T 8054 items = regex_translate.findall(popup_format) 8055 for item in items: 8056 titem = str(T(item[1:-1])) 8057 popup_format = popup_format.replace("T(%s)" % item, 8058 titem) 8059 output["popup_format"] = popup_format 8060 else: 8061 # @ToDo: Deprecate 8062 popup_fields = self.popup_fields 8063 if popup_fields: 8064 # Old-style 8065 popup_label = self.popup_label 8066 if popup_label: 8067 popup_format = "{%s} (%s)" % (popup_fields[0], 8068 current.T(popup_label)) 8069 else: 8070 popup_format = "%s" % popup_fields[0] 8071 for f in popup_fields[1:]: 8072 popup_format = "%s<br/>{%s}" % (popup_format, f) 8073 output["popup_format"] = popup_format or "" 8074 8075 # Attributes which are defaulted client-side if not set 8076 self.setup_folder_visibility_and_opacity(output) 8077 self.setup_clustering(output) 8078 if self.aggregate: 8079 # Enable the Cluster Strategy, so that it can be enabled/disabled 8080 # depending on the zoom level & hence Points or Polygons 8081 output["cluster"] = 1 8082 if not popup_format: 8083 # Need this to differentiate from e.g. FeatureQueries 8084 output["no_popups"] = 1 8085 if self.style: 8086 output["style"] = self.style 8087 else: 8088 self.marker.add_attributes_to_output(output) 8089 8090 return output
8091
8092 # ----------------------------------------------------------------------------- 8093 -class LayerGeoJSON(Layer):
8094 """ 8095 GeoJSON Layers from Catalogue 8096 """ 8097 8098 tablename = "gis_layer_geojson" 8099 dictname = "layers_geojson" 8100 style = True 8101 8102 # -------------------------------------------------------------------------
8103 - class SubLayer(Layer.SubLayer):
8104 - def as_dict(self):
8105 # Mandatory attributes 8106 output = {"id": self.layer_id, 8107 "type": "geojson", 8108 "name": self.safe_name, 8109 "url": self.url, 8110 } 8111 8112 # Attributes which are defaulted client-side if not set 8113 projection = self.projection 8114 if projection.epsg != 4326: 8115 output["projection"] = projection.epsg 8116 self.setup_folder_visibility_and_opacity(output) 8117 self.setup_clustering(output) 8118 if self.style: 8119 output["style"] = self.style 8120 else: 8121 self.marker.add_attributes_to_output(output) 8122 8123 popup_format = self.popup_format 8124 if popup_format: 8125 if "T(" in popup_format: 8126 # i18n 8127 T = current.T 8128 items = regex_translate.findall(popup_format) 8129 for item in items: 8130 titem = str(T(item[1:-1])) 8131 popup_format = popup_format.replace("T(%s)" % item, 8132 titem) 8133 output["popup_format"] = popup_format 8134 8135 return output
8136
8137 # ----------------------------------------------------------------------------- 8138 -class LayerGeoRSS(Layer):
8139 """ 8140 GeoRSS Layers from Catalogue 8141 """ 8142 8143 tablename = "gis_layer_georss" 8144 dictname = "layers_georss" 8145 style = True 8146
8147 - def __init__(self, all_layers):
8148 super(LayerGeoRSS, self).__init__(all_layers) 8149 LayerGeoRSS.SubLayer.cachetable = current.s3db.gis_cache
8150 8151 # -------------------------------------------------------------------------
8152 - class SubLayer(Layer.SubLayer):
8153 - def as_dict(self):
8154 db = current.db 8155 request = current.request 8156 response = current.response 8157 cachetable = self.cachetable 8158 8159 url = self.url 8160 # Check to see if we should Download layer to the cache 8161 download = True 8162 query = (cachetable.source == url) 8163 existing_cached_copy = db(query).select(cachetable.modified_on, 8164 limitby=(0, 1)).first() 8165 refresh = self.refresh or 900 # 15 minutes set if we have no data (legacy DB) 8166 if existing_cached_copy: 8167 modified_on = existing_cached_copy.modified_on 8168 cutoff = modified_on + datetime.timedelta(seconds=refresh) 8169 if request.utcnow < cutoff: 8170 download = False 8171 if download: 8172 # Download layer to the Cache 8173 from gluon.tools import fetch 8174 # @ToDo: Call directly without going via HTTP 8175 # @ToDo: Make this async by using S3Task (also use this for the refresh time) 8176 fields = "" 8177 if self.data: 8178 fields = "&data_field=%s" % self.data 8179 if self.image: 8180 fields = "%s&image_field=%s" % (fields, self.image) 8181 _url = "%s%s/update.georss?fetchurl=%s%s" % (current.deployment_settings.get_base_public_url(), 8182 URL(c="gis", f="cache_feed"), 8183 url, 8184 fields) 8185 # Keep Session for local URLs 8186 import Cookie 8187 cookie = Cookie.SimpleCookie() 8188 cookie[response.session_id_name] = response.session_id 8189 current.session._unlock(response) 8190 try: 8191 # @ToDo: Need to commit to not have DB locked with SQLite? 8192 fetch(_url, cookie=cookie) 8193 if existing_cached_copy: 8194 # Clear old selfs which are no longer active 8195 query = (cachetable.source == url) & \ 8196 (cachetable.modified_on < cutoff) 8197 db(query).delete() 8198 except Exception, exception: 8199 current.log.error("GeoRSS %s download error" % url, exception) 8200 # Feed down 8201 if existing_cached_copy: 8202 # Use cached copy 8203 # Should we Update timestamp to prevent every 8204 # subsequent request attempting the download? 8205 #query = (cachetable.source == url) 8206 #db(query).update(modified_on=request.utcnow) 8207 pass 8208 else: 8209 response.warning += "%s down & no cached copy available" % url 8210 8211 name_safe = self.safe_name 8212 8213 # Pass the GeoJSON URL to the client 8214 # Filter to the source of this feed 8215 url = "%s.geojson?cache.source=%s" % (URL(c="gis", f="cache_feed"), 8216 url) 8217 8218 # Mandatory attributes 8219 output = {"id": self.layer_id, 8220 "type": "georss", 8221 "name": name_safe, 8222 "url": url, 8223 } 8224 self.marker.add_attributes_to_output(output) 8225 8226 # Attributes which are defaulted client-side if not set 8227 if self.refresh != 900: 8228 output["refresh"] = self.refresh 8229 self.setup_folder_visibility_and_opacity(output) 8230 self.setup_clustering(output) 8231 8232 return output
8233
8234 # ----------------------------------------------------------------------------- 8235 -class LayerGoogle(Layer):
8236 """ 8237 Google Layers/Tools from Catalogue 8238 """ 8239 8240 tablename = "gis_layer_google" 8241 dictname = "Google" 8242 style = False 8243 8244 # -------------------------------------------------------------------------
8245 - def as_dict(self, options=None):
8246 sublayers = self.sublayers 8247 if sublayers: 8248 T = current.T 8249 spherical_mercator = (Projection().epsg == 900913) 8250 settings = current.deployment_settings 8251 apikey = settings.get_gis_api_google() 8252 s3 = current.response.s3 8253 debug = s3.debug 8254 # Google scripts use document.write so cannot be loaded async via yepnope.js 8255 s3_scripts = s3.scripts 8256 8257 ldict = {} 8258 8259 if spherical_mercator: 8260 # Earth was the only layer which can run in non-Spherical Mercator 8261 # @ToDo: Warning? 8262 for sublayer in sublayers: 8263 # Attributes which are defaulted client-side if not set 8264 #if sublayer.type == "earth": 8265 # # Deprecated: 8266 # # https://maps-apis.googleblog.com/2014/12/announcing-deprecation-of-google-earth.html 8267 # ldict["Earth"] = str(T("Switch to 3D")) 8268 # #{"modules":[{"name":"earth","version":"1"}]} 8269 # script = "//www.google.com/jsapi?key=" + apikey + "&autoload=%7B%22modules%22%3A%5B%7B%22name%22%3A%22earth%22%2C%22version%22%3A%221%22%7D%5D%7D" 8270 # if script not in s3_scripts: 8271 # s3_scripts.append(script) 8272 # # Dynamic Loading not supported: https://developers.google.com/loader/#Dynamic 8273 # #s3.jquery_ready.append('''try{google.load('earth','1')catch(e){}''') 8274 # if debug: 8275 # self.scripts.append("gis/gxp/widgets/GoogleEarthPanel.js") 8276 # else: 8277 # self.scripts.append("gis/gxp/widgets/GoogleEarthPanel.min.js") 8278 # s3.js_global.append('''S3.public_url="%s"''' % settings.get_base_public_url()) 8279 if sublayer._base: 8280 # Set default Base layer 8281 ldict["Base"] = sublayer.type 8282 if sublayer.type == "satellite": 8283 ldict["Satellite"] = {"name": sublayer.name or "Google Satellite", 8284 "id": sublayer.layer_id} 8285 elif sublayer.type == "maps": 8286 ldict["Maps"] = {"name": sublayer.name or "Google Maps", 8287 "id": sublayer.layer_id} 8288 elif sublayer.type == "hybrid": 8289 ldict["Hybrid"] = {"name": sublayer.name or "Google Hybrid", 8290 "id": sublayer.layer_id} 8291 elif sublayer.type == "streetview": 8292 ldict["StreetviewButton"] = "Click where you want to open Streetview" 8293 elif sublayer.type == "terrain": 8294 ldict["Terrain"] = {"name": sublayer.name or "Google Terrain", 8295 "id": sublayer.layer_id} 8296 elif sublayer.type == "mapmaker": 8297 ldict["MapMaker"] = {"name": sublayer.name or "Google MapMaker", 8298 "id": sublayer.layer_id} 8299 elif sublayer.type == "mapmakerhybrid": 8300 ldict["MapMakerHybrid"] = {"name": sublayer.name or "Google MapMaker Hybrid", 8301 "id": sublayer.layer_id} 8302 8303 if "MapMaker" in ldict or "MapMakerHybrid" in ldict: 8304 # Need to use v2 API 8305 # This should be able to be fixed in OpenLayers now since Google have fixed in v3 API: 8306 # http://code.google.com/p/gmaps-api-issues/issues/detail?id=2349#c47 8307 script = "//maps.google.com/maps?file=api&v=2&key=%s" % apikey 8308 if script not in s3_scripts: 8309 s3_scripts.append(script) 8310 else: 8311 # v3 API (3.0 gives us the latest frozen version, currently 3.30) 8312 # Note that it does give a warning: "Google Maps API warning: RetiredVersion" 8313 # https://developers.google.com/maps/documentation/javascript/versions 8314 script = "//maps.google.com/maps/api/js?v=3.0&key=%s" % apikey 8315 if script not in s3_scripts: 8316 s3_scripts.append(script) 8317 if "StreetviewButton" in ldict: 8318 # Streetview doesn't work with v2 API 8319 ldict["StreetviewButton"] = str(T("Click where you want to open Streetview")) 8320 ldict["StreetviewTitle"] = str(T("Street View")) 8321 if debug: 8322 self.scripts.append("gis/gxp/widgets/GoogleStreetViewPanel.js") 8323 else: 8324 self.scripts.append("gis/gxp/widgets/GoogleStreetViewPanel.min.js") 8325 8326 if options: 8327 # Used by Map._setup() 8328 options[self.dictname] = ldict 8329 else: 8330 # Used by as_json() and hence as_javascript() 8331 return ldict
8332
8333 # ----------------------------------------------------------------------------- 8334 -class LayerGPX(Layer):
8335 """ 8336 GPX Layers from Catalogue 8337 """ 8338 8339 tablename = "gis_layer_gpx" 8340 dictname = "layers_gpx" 8341 style = True 8342 8343 # -------------------------------------------------------------------------
8344 - class SubLayer(Layer.SubLayer):
8345 - def as_dict(self):
8346 url = URL(c="default", f="download", 8347 args=self.track) 8348 8349 # Mandatory attributes 8350 output = {"id": self.layer_id, 8351 "name": self.safe_name, 8352 "url": url, 8353 } 8354 8355 # Attributes which are defaulted client-side if not set 8356 self.marker.add_attributes_to_output(output) 8357 self.add_attributes_if_not_default( 8358 output, 8359 waypoints = (self.waypoints, (True,)), 8360 tracks = (self.tracks, (True,)), 8361 routes = (self.routes, (True,)), 8362 ) 8363 self.setup_folder_visibility_and_opacity(output) 8364 self.setup_clustering(output) 8365 return output
8366
8367 # ----------------------------------------------------------------------------- 8368 -class LayerJS(Layer):
8369 """ 8370 JS Layers from Catalogue 8371 - these are raw Javascript layers for use by expert OpenLayers people 8372 to quickly add/configure new data sources without needing support 8373 from back-end Sahana programmers 8374 """ 8375 8376 tablename = "gis_layer_js" 8377 dictname = "layers_js" 8378 style = False 8379 8380 # -------------------------------------------------------------------------
8381 - def as_dict(self, options=None):
8382 sublayers = self.sublayers 8383 if sublayers: 8384 sublayer_dicts = [] 8385 append = sublayer_dicts.append 8386 for sublayer in sublayers: 8387 append(sublayer.code) 8388 if options: 8389 # Used by Map._setup() 8390 options[self.dictname] = sublayer_dicts 8391 else: 8392 # Used by as_json() and hence as_javascript() 8393 return sublayer_dicts
8394
8395 # ----------------------------------------------------------------------------- 8396 -class LayerKML(Layer):
8397 """ 8398 KML Layers from Catalogue 8399 """ 8400 8401 tablename = "gis_layer_kml" 8402 dictname = "layers_kml" 8403 style = True 8404 8405 # -------------------------------------------------------------------------
8406 - def __init__(self, all_layers, init=True):
8407 "Set up the KML cache, should be done once per request" 8408 8409 super(LayerKML, self).__init__(all_layers) 8410 8411 # Can we cache downloaded KML feeds? 8412 # Needed for unzipping & filtering as well 8413 # @ToDo: Should we move this folder to static to speed up access to cached content? 8414 # Do we need to secure it? 8415 request = current.request 8416 cachepath = os.path.join(request.folder, 8417 "uploads", 8418 "gis_cache") 8419 8420 if os.path.exists(cachepath): 8421 cacheable = os.access(cachepath, os.W_OK) 8422 else: 8423 try: 8424 os.mkdir(cachepath) 8425 except OSError, os_error: 8426 current.log.error("GIS: KML layers cannot be cached: %s %s" % \ 8427 (cachepath, os_error)) 8428 cacheable = False 8429 else: 8430 cacheable = True 8431 # @ToDo: Migrate to gis_cache 8432 LayerKML.cachetable = current.s3db.gis_cache2 8433 LayerKML.cacheable = cacheable 8434 LayerKML.cachepath = cachepath
8435 8436 # -------------------------------------------------------------------------
8437 - class SubLayer(Layer.SubLayer):
8438 - def as_dict(self):
8439 db = current.db 8440 request = current.request 8441 8442 cachetable = LayerKML.cachetable 8443 cacheable = LayerKML.cacheable 8444 #cachepath = LayerKML.cachepath 8445 8446 name = self.name 8447 if cacheable: 8448 _name = urllib2.quote(name) 8449 _name = _name.replace("%", "_") 8450 filename = "%s.file.%s.kml" % (cachetable._tablename, 8451 _name) 8452 8453 8454 # Should we download a fresh copy of the source file? 8455 download = True 8456 query = (cachetable.name == name) 8457 cached = db(query).select(cachetable.modified_on, 8458 limitby=(0, 1)).first() 8459 refresh = self.refresh or 900 # 15 minutes set if we have no data (legacy DB) 8460 if cached: 8461 modified_on = cached.modified_on 8462 cutoff = modified_on + datetime.timedelta(seconds=refresh) 8463 if request.utcnow < cutoff: 8464 download = False 8465 8466 if download: 8467 # Download file (async, if workers alive) 8468 response = current.response 8469 session_id_name = response.session_id_name 8470 session_id = response.session_id 8471 current.s3task.async("gis_download_kml", 8472 args=[self.id, filename, session_id_name, session_id]) 8473 if cached: 8474 db(query).update(modified_on=request.utcnow) 8475 else: 8476 cachetable.insert(name=name, file=filename) 8477 8478 url = URL(c="default", f="download", 8479 args=[filename]) 8480 else: 8481 # No caching possible (e.g. GAE), display file direct from remote (using Proxy) 8482 # (Requires OpenLayers.Layer.KML to be available) 8483 url = self.url 8484 8485 # Mandatory attributes 8486 output = {"id": self.layer_id, 8487 "name": self.safe_name, 8488 "url": url, 8489 } 8490 8491 # Attributes which are defaulted client-side if not set 8492 self.add_attributes_if_not_default( 8493 output, 8494 title = (self.title, ("name", None, "")), 8495 body = (self.body, ("description", None)), 8496 refresh = (self.refresh, (900,)), 8497 ) 8498 self.setup_folder_visibility_and_opacity(output) 8499 self.setup_clustering(output) 8500 if self.style: 8501 output["style"] = self.style 8502 else: 8503 self.marker.add_attributes_to_output(output) 8504 8505 return output
8506
8507 # ----------------------------------------------------------------------------- 8508 -class LayerOSM(Layer):
8509 """ 8510 OpenStreetMap Layers from Catalogue 8511 8512 @ToDo: Provide a catalogue of standard layers which are fully-defined 8513 in static & can just have name over-ridden, as well as 8514 fully-custom layers. 8515 """ 8516 8517 tablename = "gis_layer_openstreetmap" 8518 dictname = "layers_osm" 8519 style = False 8520 8521 # -------------------------------------------------------------------------
8522 - class SubLayer(Layer.SubLayer):
8523 - def as_dict(self):
8524 8525 if Projection().epsg != 900913: 8526 # Cannot display OpenStreetMap layers unless we're using the Spherical Mercator Projection 8527 return {} 8528 8529 # Mandatory attributes 8530 output = {"id": self.layer_id, 8531 "name": self.safe_name, 8532 "url1": self.url1, 8533 } 8534 8535 # Attributes which are defaulted client-side if not set 8536 self.add_attributes_if_not_default( 8537 output, 8538 base = (self.base, (True,)), 8539 _base = (self._base, (False,)), 8540 url2 = (self.url2, ("",)), 8541 url3 = (self.url3, ("",)), 8542 zoomLevels = (self.zoom_levels, (9,)), 8543 attribution = (self.attribution, (None,)), 8544 ) 8545 self.setup_folder_and_visibility(output) 8546 return output
8547
8548 # ----------------------------------------------------------------------------- 8549 -class LayerOpenWeatherMap(Layer):
8550 """ 8551 OpenWeatherMap Layers from Catalogue 8552 """ 8553 8554 tablename = "gis_layer_openweathermap" 8555 dictname = "OWM" 8556 style = False 8557 8558 # -------------------------------------------------------------------------
8559 - def as_dict(self, options=None):
8560 sublayers = self.sublayers 8561 if sublayers: 8562 if current.response.s3.debug: 8563 self.scripts.append("gis/OWM.OpenLayers.js") 8564 else: 8565 self.scripts.append("gis/OWM.OpenLayers.min.js") 8566 ldict = {} 8567 for sublayer in sublayers: 8568 if sublayer.type == "station": 8569 ldict["station"] = {"name": sublayer.name or "Weather Stations", 8570 "id": sublayer.layer_id, 8571 "dir": sublayer.dir, 8572 "visibility": sublayer.visible 8573 } 8574 elif sublayer.type == "city": 8575 ldict["city"] = {"name": sublayer.name or "Current Weather", 8576 "id": sublayer.layer_id, 8577 "dir": sublayer.dir, 8578 "visibility": sublayer.visible 8579 } 8580 if options: 8581 # Used by Map._setup() 8582 options[self.dictname] = ldict 8583 else: 8584 # Used by as_json() and hence as_javascript() 8585 return ldict
8586
8587 # ----------------------------------------------------------------------------- 8588 -class LayerShapefile(Layer):
8589 """ 8590 Shapefile Layers from Catalogue 8591 """ 8592 8593 tablename = "gis_layer_shapefile" 8594 dictname = "layers_shapefile" 8595 style = True 8596 8597 # -------------------------------------------------------------------------
8598 - class SubLayer(Layer.SubLayer):
8599 - def as_dict(self):
8600 url = "%s/%s/data.geojson" % \ 8601 (URL(c="gis", f="layer_shapefile"), self.id) 8602 if self.filter: 8603 url = "%s?layer_shapefile_%s.%s" % (url, self.id, self.filter) 8604 8605 # Mandatory attributes 8606 output = {"id": self.layer_id, 8607 "type": "shapefile", 8608 "name": self.safe_name, 8609 "url": url, 8610 # Shapefile layers don't alter their contents, so don't refresh 8611 "refresh": 0, 8612 } 8613 8614 # Attributes which are defaulted client-side if not set 8615 self.add_attributes_if_not_default( 8616 output, 8617 desc = (self.description, (None, "")), 8618 src = (self.source_name, (None, "")), 8619 src_url = (self.source_url, (None, "")), 8620 ) 8621 # We convert on-upload to have BBOX handling work properly 8622 #projection = self.projection 8623 #if projection.epsg != 4326: 8624 # output["projection"] = projection.epsg 8625 self.setup_folder_visibility_and_opacity(output) 8626 self.setup_clustering(output) 8627 if self.style: 8628 output["style"] = self.style 8629 else: 8630 self.marker.add_attributes_to_output(output) 8631 8632 return output
8633
8634 # ----------------------------------------------------------------------------- 8635 -class LayerTheme(Layer):
8636 """ 8637 Theme Layers from Catalogue 8638 """ 8639 8640 tablename = "gis_layer_theme" 8641 dictname = "layers_theme" 8642 style = True 8643 8644 # -------------------------------------------------------------------------
8645 - class SubLayer(Layer.SubLayer):
8646 - def as_dict(self):
8647 url = "%s.geojson?theme_data.layer_theme_id=%i&polygons=1&maxdepth=0" % \ 8648 (URL(c="gis", f="theme_data"), self.id) 8649 8650 # Mandatory attributes 8651 output = {"id": self.layer_id, 8652 "type": "theme", 8653 "name": self.safe_name, 8654 "url": url, 8655 } 8656 8657 # Attributes which are defaulted client-side if not set 8658 self.setup_folder_visibility_and_opacity(output) 8659 self.setup_clustering(output) 8660 style = self.style 8661 if style: 8662 output["style"] = style 8663 8664 return output
8665
8666 # ----------------------------------------------------------------------------- 8667 -class LayerTMS(Layer):
8668 """ 8669 TMS Layers from Catalogue 8670 """ 8671 8672 tablename = "gis_layer_tms" 8673 dictname = "layers_tms" 8674 style = False 8675 8676 # -------------------------------------------------------------------------
8677 - class SubLayer(Layer.SubLayer):
8678 - def as_dict(self):
8679 # Mandatory attributes 8680 output = {"id": self.layer_id, 8681 "type": "tms", 8682 "name": self.safe_name, 8683 "url": self.url, 8684 "layername": self.layername 8685 } 8686 8687 # Attributes which are defaulted client-side if not set 8688 self.add_attributes_if_not_default( 8689 output, 8690 _base = (self._base, (False,)), 8691 url2 = (self.url2, (None,)), 8692 url3 = (self.url3, (None,)), 8693 format = (self.img_format, ("png", None)), 8694 zoomLevels = (self.zoom_levels, (19,)), 8695 attribution = (self.attribution, (None,)), 8696 ) 8697 self.setup_folder(output) 8698 return output
8699
8700 # ----------------------------------------------------------------------------- 8701 -class LayerWFS(Layer):
8702 """ 8703 WFS Layers from Catalogue 8704 """ 8705 8706 tablename = "gis_layer_wfs" 8707 dictname = "layers_wfs" 8708 style = True 8709 8710 # -------------------------------------------------------------------------
8711 - class SubLayer(Layer.SubLayer):
8712 - def as_dict(self):
8713 # Mandatory attributes 8714 output = {"id": self.layer_id, 8715 "name": self.safe_name, 8716 "url": self.url, 8717 "title": self.title, 8718 "featureType": self.featureType, 8719 } 8720 8721 # Attributes which are defaulted client-side if not set 8722 self.add_attributes_if_not_default( 8723 output, 8724 version = (self.version, ("1.1.0",)), 8725 featureNS = (self.featureNS, (None, "")), 8726 geometryName = (self.geometryName, ("the_geom",)), 8727 schema = (self.wfs_schema, (None, "")), 8728 username = (self.username, (None, "")), 8729 password = (self.password, (None, "")), 8730 projection = (self.projection.epsg, (4326,)), 8731 desc = (self.description, (None, "")), 8732 src = (self.source_name, (None, "")), 8733 src_url = (self.source_url, (None, "")), 8734 refresh = (self.refresh, (0,)), 8735 #editable 8736 ) 8737 self.setup_folder_visibility_and_opacity(output) 8738 self.setup_clustering(output) 8739 if self.style: 8740 output["style"] = self.style 8741 else: 8742 self.marker.add_attributes_to_output(output) 8743 8744 return output
8745
8746 # ----------------------------------------------------------------------------- 8747 -class LayerWMS(Layer):
8748 """ 8749 WMS Layers from Catalogue 8750 """ 8751 8752 tablename = "gis_layer_wms" 8753 dictname = "layers_wms" 8754 style = False 8755 8756 # -------------------------------------------------------------------------
8757 - def __init__(self, all_layers):
8758 super(LayerWMS, self).__init__(all_layers) 8759 if self.sublayers: 8760 if current.response.s3.debug: 8761 self.scripts.append("gis/gxp/plugins/WMSGetFeatureInfo.js") 8762 else: 8763 self.scripts.append("gis/gxp/plugins/WMSGetFeatureInfo.min.js")
8764 8765 # -------------------------------------------------------------------------
8766 - class SubLayer(Layer.SubLayer):
8767 - def as_dict(self):
8768 if self.queryable: 8769 current.response.s3.gis.get_feature_info = True 8770 # Mandatory attributes 8771 output = {"id": self.layer_id, 8772 "name": self.safe_name, 8773 "url": self.url, 8774 "layers": self.layers, 8775 } 8776 8777 # Attributes which are defaulted client-side if not set 8778 legend_url = self.legend_url 8779 if legend_url and not legend_url.startswith("http"): 8780 legend_url = "%s/%s%s" % \ 8781 (current.deployment_settings.get_base_public_url(), 8782 current.request.application, 8783 legend_url) 8784 attr = {"transparent": (self.transparent, (True,)), 8785 "version": (self.version, ("1.1.1",)), 8786 "format": (self.img_format, ("image/png",)), 8787 "map": (self.map, (None, "")), 8788 "username": (self.username, (None, "")), 8789 "password": (self.password, (None, "")), 8790 "buffer": (self.buffer, (0,)), 8791 "base": (self.base, (False,)), 8792 "_base": (self._base, (False,)), 8793 "style": (self.style, (None, "")), 8794 "bgcolor": (self.bgcolor, (None, "")), 8795 "tiled": (self.tiled, (False,)), 8796 "legendURL": (legend_url, (None, "")), 8797 "queryable": (self.queryable, (False,)), 8798 "desc": (self.description, (None, "")), 8799 } 8800 8801 if current.deployment_settings.get_gis_layer_metadata(): 8802 # Use CMS to add info about sources 8803 attr["post_id"] = (self.post_id, (None, "")) 8804 else: 8805 # Link direct to sources 8806 attr.update(src = (self.source_name, (None, "")), 8807 src_url = (self.source_url, (None, "")), 8808 ) 8809 8810 self.add_attributes_if_not_default(output, **attr) 8811 self.setup_folder_visibility_and_opacity(output) 8812 8813 return output
8814
8815 # ----------------------------------------------------------------------------- 8816 -class LayerXYZ(Layer):
8817 """ 8818 XYZ Layers from Catalogue 8819 """ 8820 8821 tablename = "gis_layer_xyz" 8822 dictname = "layers_xyz" 8823 style = False 8824 8825 # -------------------------------------------------------------------------
8826 - class SubLayer(Layer.SubLayer):
8827 - def as_dict(self):
8828 # Mandatory attributes 8829 output = {"id": self.layer_id, 8830 "name": self.safe_name, 8831 "url": self.url 8832 } 8833 8834 # Attributes which are defaulted client-side if not set 8835 self.add_attributes_if_not_default( 8836 output, 8837 _base = (self._base, (False,)), 8838 url2 = (self.url2, (None,)), 8839 url3 = (self.url3, (None,)), 8840 format = (self.img_format, ("png", None)), 8841 zoomLevels = (self.zoom_levels, (19,)), 8842 attribution = (self.attribution, (None,)), 8843 ) 8844 self.setup_folder(output) 8845 return output
8846
8847 # ============================================================================= 8848 -class Marker(object):
8849 """ 8850 Represents a Map Marker 8851 8852 @ToDo: Support Markers in Themes 8853 """ 8854
8855 - def __init__(self, 8856 marker=None, 8857 marker_id=None, 8858 layer_id=None, 8859 tablename=None):
8860 """ 8861 @param marker: Storage object with image/height/width (looked-up in bulk) 8862 @param marker_id: id of record in gis_marker 8863 @param layer_id: layer_id to lookup marker in gis_style (unused) 8864 @param tablename: used to identify whether to provide a default marker as fallback 8865 """ 8866 8867 no_default = False 8868 if not marker: 8869 db = current.db 8870 s3db = current.s3db 8871 mtable = s3db.gis_marker 8872 config = None 8873 if marker_id: 8874 # Lookup the Marker details from it's ID 8875 marker = db(mtable.id == marker_id).select(mtable.image, 8876 mtable.height, 8877 mtable.width, 8878 limitby=(0, 1), 8879 cache=s3db.cache 8880 ).first() 8881 elif layer_id: 8882 # Check if we have a Marker defined for this Layer 8883 config = GIS.get_config() 8884 stable = s3db.gis_style 8885 query = (stable.layer_id == layer_id) & \ 8886 ((stable.config_id == config.id) | \ 8887 (stable.config_id == None)) & \ 8888 (stable.marker_id == mtable.id) & \ 8889 (stable.record_id == None) 8890 marker = db(query).select(mtable.image, 8891 mtable.height, 8892 mtable.width, 8893 limitby=(0, 1)).first() 8894 8895 if not marker: 8896 # Check to see if we're a Polygon/LineString 8897 # (& hence shouldn't use a default marker) 8898 if tablename == "gis_layer_shapefile": 8899 table = db.gis_layer_shapefile 8900 query = (table.layer_id == layer_id) 8901 layer = db(query).select(table.gis_feature_type, 8902 limitby=(0, 1)).first() 8903 if layer and layer.gis_feature_type != 1: 8904 no_default = True 8905 #elif tablename == "gis_layer_feature": 8906 # table = db.gis_layer_feature 8907 # query = (table.layer_id == layer_id) 8908 # layer = db(query).select(table.polygons, 8909 # limitby=(0, 1)).first() 8910 # if layer and layer.polygons: 8911 # no_default = True 8912 8913 if marker: 8914 self.image = marker["image"] 8915 self.height = marker["height"] 8916 self.width = marker["width"] 8917 elif no_default: 8918 self.image = None 8919 else: 8920 # Default Marker 8921 if not config: 8922 config = GIS.get_config() 8923 self.image = config.marker_image 8924 self.height = config.marker_height 8925 self.width = config.marker_width
8926 8927 # -------------------------------------------------------------------------
8928 - def add_attributes_to_output(self, output):
8929 """ 8930 Called by Layer.as_dict() 8931 """ 8932 8933 if self.image: 8934 output["marker"] = self.as_json_dict()
8935 8936 # -------------------------------------------------------------------------
8937 - def as_dict(self):
8938 """ 8939 Called by gis.get_marker(), feature_resources & s3profile 8940 """ 8941 8942 if self.image: 8943 marker = Storage(image = self.image, 8944 height = self.height, 8945 width = self.width, 8946 ) 8947 else: 8948 marker = None 8949 return marker
8950 8951 # ------------------------------------------------------------------------- 8952 #def as_json(self): 8953 # """ 8954 # Called by nothing 8955 # """ 8956 8957 # output = dict(i = self.image, 8958 # h = self.height, 8959 # w = self.width, 8960 # ) 8961 # return json.dumps(output, separators=SEPARATORS) 8962 8963 # -------------------------------------------------------------------------
8964 - def as_json_dict(self):
8965 """ 8966 Called by Style.as_dict() and add_attributes_to_output() 8967 """ 8968 8969 if self.image: 8970 marker = {"i": self.image, 8971 "h": self.height, 8972 "w": self.width, 8973 } 8974 else: 8975 marker = None 8976 return marker
8977
8978 # ============================================================================= 8979 -class Projection(object):
8980 """ 8981 Represents a Map Projection 8982 """ 8983
8984 - def __init__(self, projection_id=None):
8985 8986 if projection_id: 8987 s3db = current.s3db 8988 table = s3db.gis_projection 8989 query = (table.id == projection_id) 8990 projection = current.db(query).select(table.epsg, 8991 limitby=(0, 1), 8992 cache=s3db.cache).first() 8993 else: 8994 # Default projection 8995 config = GIS.get_config() 8996 projection = Storage(epsg = config.epsg) 8997 8998 self.epsg = projection.epsg
8999
9000 # ============================================================================= 9001 -class Style(object):
9002 """ 9003 Represents a Map Style 9004 """ 9005
9006 - def __init__(self, 9007 style_id=None, 9008 layer_id=None, 9009 aggregate=None):
9010 9011 db = current.db 9012 s3db = current.s3db 9013 table = s3db.gis_style 9014 fields = [table.marker_id, 9015 table.opacity, 9016 table.popup_format, 9017 # @ToDo: if-required 9018 #table.url_format, 9019 table.cluster_distance, 9020 table.cluster_threshold, 9021 table.style, 9022 ] 9023 9024 if style_id: 9025 query = (table.id == style_id) 9026 limitby = (0, 1) 9027 9028 elif layer_id: 9029 config = GIS.get_config() 9030 # @ToDo: if record_id: 9031 query = (table.layer_id == layer_id) & \ 9032 (table.record_id == None) & \ 9033 ((table.config_id == config.id) | \ 9034 (table.config_id == None)) 9035 if aggregate is not None: 9036 query &= (table.aggregate == aggregate) 9037 fields.append(table.config_id) 9038 limitby = (0, 2) 9039 9040 else: 9041 # Default style for this config 9042 # - falling back to Default config 9043 config = GIS.get_config() 9044 ctable = db.gis_config 9045 query = (table.config_id == ctable.id) & \ 9046 ((ctable.id == config.id) | \ 9047 (ctable.uuid == "SITE_DEFAULT")) & \ 9048 (table.layer_id == None) 9049 fields.append(ctable.uuid) 9050 limitby = (0, 2) 9051 9052 styles = db(query).select(*fields, 9053 limitby=limitby) 9054 9055 if len(styles) > 1: 9056 if layer_id: 9057 # Remove the general one 9058 _filter = lambda row: row.config_id == None 9059 else: 9060 # Remove the Site Default 9061 _filter = lambda row: row["gis_config.uuid"] == "SITE_DEFAULT" 9062 styles.exclude(_filter) 9063 9064 if styles: 9065 style = styles.first() 9066 if not layer_id and "gis_style" in style: 9067 style = style["gis_style"] 9068 else: 9069 current.log.error("Style not found!") 9070 style = None 9071 9072 if style: 9073 if style.marker_id: 9074 style.marker = Marker(marker_id=style.marker_id) 9075 if aggregate is True: 9076 # Use gis/location controller in all reports 9077 style.url_format = "%s/{id}.plain" % URL(c="gis", f="location") 9078 elif layer_id: 9079 # Build from controller/function 9080 ftable = s3db.gis_layer_feature 9081 layer = db(ftable.layer_id == layer_id).select(ftable.controller, 9082 ftable.function, 9083 limitby=(0, 1) 9084 ).first() 9085 if layer: 9086 style.url_format = "%s/{id}.plain" % \ 9087 URL(c=layer.controller, f=layer.function) 9088 9089 self.style = style
9090 9091 # -------------------------------------------------------------------------
9092 - def as_dict(self):
9093 """ 9094 9095 """ 9096 9097 # Not JSON-serializable 9098 #return self.style 9099 style = self.style 9100 output = Storage() 9101 if not style: 9102 return output 9103 if hasattr(style, "marker"): 9104 output.marker = style.marker.as_json_dict() 9105 opacity = style.opacity 9106 if opacity and opacity not in (1, 1.0): 9107 output.opacity = style.opacity 9108 if style.popup_format: 9109 output.popup_format = style.popup_format 9110 if style.url_format: 9111 output.url_format = style.url_format 9112 cluster_distance = style.cluster_distance 9113 if cluster_distance is not None and \ 9114 cluster_distance != CLUSTER_DISTANCE: 9115 output.cluster_distance = cluster_distance 9116 cluster_threshold = style.cluster_threshold 9117 if cluster_threshold is not None and \ 9118 cluster_threshold != CLUSTER_THRESHOLD: 9119 output.cluster_threshold = cluster_threshold 9120 if style.style: 9121 if isinstance(style.style, basestring): 9122 # Native JSON 9123 try: 9124 style.style = json.loads(style.style) 9125 except: 9126 current.log.error("Unable to decode Style: %s" % style.style) 9127 style.style = None 9128 output.style = style.style 9129 return output
9130
9131 # ============================================================================= 9132 -class S3Map(S3Method):
9133 """ 9134 Class to generate a Map linked to Search filters 9135 """ 9136 9137 # -------------------------------------------------------------------------
9138 - def apply_method(self, r, **attr):
9139 """ 9140 Entry point to apply map method to S3Requests 9141 - produces a full page with S3FilterWidgets above a Map 9142 9143 @param r: the S3Request instance 9144 @param attr: controller attributes for the request 9145 9146 @return: output object to send to the view 9147 """ 9148 9149 if r.http == "GET": 9150 representation = r.representation 9151 if representation == "html": 9152 return self.page(r, **attr) 9153 9154 else: 9155 r.error(405, current.ERROR.BAD_METHOD)
9156 9157 # -------------------------------------------------------------------------
9158 - def page(self, r, **attr):
9159 """ 9160 Map page 9161 9162 @param r: the S3Request instance 9163 @param attr: controller attributes for the request 9164 """ 9165 9166 if r.representation in ("html", "iframe"): 9167 9168 response = current.response 9169 resource = self.resource 9170 get_config = resource.get_config 9171 tablename = resource.tablename 9172 9173 widget_id = "default_map" 9174 9175 output = {} 9176 9177 title = self.crud_string(tablename, "title_map") 9178 output["title"] = title 9179 9180 # Filter widgets 9181 filter_widgets = get_config("filter_widgets", None) 9182 if filter_widgets and not self.hide_filter: 9183 advanced = False 9184 for widget in filter_widgets: 9185 if "hidden" in widget.opts and widget.opts.hidden: 9186 advanced = resource.get_config("map_advanced", True) 9187 break 9188 9189 request = self.request 9190 from s3filter import S3FilterForm 9191 # Apply filter defaults (before rendering the data!) 9192 S3FilterForm.apply_filter_defaults(r, resource) 9193 filter_formstyle = get_config("filter_formstyle", None) 9194 submit = resource.get_config("map_submit", True) 9195 filter_form = S3FilterForm(filter_widgets, 9196 formstyle=filter_formstyle, 9197 advanced=advanced, 9198 submit=submit, 9199 ajax=True, 9200 # URL to update the Filter Widget Status 9201 ajaxurl=r.url(method="filter", 9202 vars={}, 9203 representation="options"), 9204 _class="filter-form", 9205 _id="%s-filter-form" % widget_id, 9206 ) 9207 get_vars = request.get_vars 9208 filter_form = filter_form.html(resource, get_vars=get_vars, target=widget_id) 9209 else: 9210 # Render as empty string to avoid the exception in the view 9211 filter_form = "" 9212 9213 output["form"] = filter_form 9214 9215 # Map 9216 output["map"] = self.widget(r, widget_id=widget_id, 9217 callback='''S3.search.s3map()''', **attr) 9218 9219 # View 9220 response.view = self._view(r, "map.html") 9221 9222 return output 9223 9224 else: 9225 r.error(415, current.ERROR.BAD_FORMAT)
9226 9227 # -------------------------------------------------------------------------
9228 - def widget(self, 9229 r, 9230 method="map", 9231 widget_id=None, 9232 visible=True, 9233 callback=None, 9234 **attr):
9235 """ 9236 Render a Map widget suitable for use in an S3Filter-based page 9237 such as S3Summary 9238 9239 @param r: the S3Request 9240 @param method: the widget method 9241 @param widget_id: the widget ID 9242 @param callback: None by default in case DIV is hidden 9243 @param visible: whether the widget is initially visible 9244 @param attr: controller attributes 9245 """ 9246 9247 if not widget_id: 9248 widget_id = "default_map" 9249 9250 gis = current.gis 9251 tablename = self.tablename 9252 9253 ftable = current.s3db.gis_layer_feature 9254 9255 def lookup_layer(prefix, name): 9256 query = (ftable.controller == prefix) & \ 9257 (ftable.function == name) 9258 layers = current.db(query).select(ftable.layer_id, 9259 ftable.style_default, 9260 ) 9261 if len(layers) > 1: 9262 layers.exclude(lambda row: row.style_default == False) 9263 if len(layers) == 1: 9264 layer_id = layers.first().layer_id 9265 else: 9266 # We can't distinguish 9267 layer_id = None 9268 return layer_id
9269 9270 prefix = r.controller 9271 name = r.function 9272 layer_id = lookup_layer(prefix, name) 9273 if not layer_id: 9274 # Try the tablename 9275 prefix, name = tablename.split("_", 1) 9276 layer_id = lookup_layer(prefix, name) 9277 9278 url = URL(extension="geojson", args=None, vars=r.get_vars) 9279 9280 # @ToDo: Support maps with multiple layers (Dashboards) 9281 #_id = "search_results_%s" % widget_id 9282 _id = "search_results" 9283 feature_resources = [{"name" : current.T("Search Results"), 9284 "id" : _id, 9285 "layer_id" : layer_id, 9286 "tablename" : tablename, 9287 "url" : url, 9288 # We activate in callback after ensuring URL is updated for current filter status 9289 "active" : False, 9290 }] 9291 settings = current.deployment_settings 9292 catalogue_layers = settings.get_gis_widget_catalogue_layers() 9293 legend = settings.get_gis_legend() 9294 search = settings.get_gis_search_geonames() 9295 toolbar = settings.get_gis_toolbar() 9296 wms_browser = settings.get_gis_widget_wms_browser() 9297 if wms_browser: 9298 config = gis.get_config() 9299 if config.wmsbrowser_url: 9300 wms_browser = wms_browser = {"name" : config.wmsbrowser_name, 9301 "url" : config.wmsbrowser_url, 9302 } 9303 else: 9304 wms_browser = None 9305 9306 map = gis.show_map(id = widget_id, 9307 feature_resources = feature_resources, 9308 catalogue_layers = catalogue_layers, 9309 collapsed = True, 9310 legend = legend, 9311 toolbar = toolbar, 9312 save = False, 9313 search = search, 9314 wms_browser = wms_browser, 9315 callback = callback, 9316 ) 9317 return map
9318
9319 # ============================================================================= 9320 -class S3ExportPOI(S3Method):
9321 """ Export point-of-interest resources for a location """ 9322 9323 # -------------------------------------------------------------------------
9324 - def apply_method(self, r, **attr):
9325 """ 9326 Apply method. 9327 9328 @param r: the S3Request 9329 @param attr: controller options for this request 9330 """ 9331 9332 output = {} 9333 9334 if r.http == "GET": 9335 output = self.export(r, **attr) 9336 else: 9337 r.error(405, current.ERROR.BAD_METHOD) 9338 9339 return output
9340 9341 # -------------------------------------------------------------------------
9342 - def export(self, r, **attr):
9343 """ 9344 Export POI resources. 9345 9346 URL options: 9347 9348 - "resources" list of tablenames to export records from 9349 9350 - "msince" datetime in ISO format, "auto" to use the 9351 feed's last update 9352 9353 - "update_feed" 0 to skip the update of the feed's last 9354 update datetime, useful for trial exports 9355 9356 Supported formats: 9357 9358 .xml S3XML 9359 .osm OSM XML Format 9360 .kml Google KML 9361 9362 (other formats can be requested, but may give unexpected results) 9363 9364 @param r: the S3Request 9365 @param attr: controller options for this request 9366 """ 9367 9368 # Determine request Lx 9369 current_lx = r.record 9370 if not current_lx: # or not current_lx.level: 9371 # Must have a location 9372 r.error(400, current.ERROR.BAD_REQUEST) 9373 else: 9374 self.lx = current_lx.id 9375 9376 tables = [] 9377 # Parse the ?resources= parameter 9378 if "resources" in r.get_vars: 9379 resources = r.get_vars["resources"] 9380 else: 9381 # Fallback to deployment_setting 9382 resources = current.deployment_settings.get_gis_poi_export_resources() 9383 if not isinstance(resources, list): 9384 resources = [resources] 9385 [tables.extend(t.split(",")) for t in resources] 9386 9387 # Parse the ?update_feed= parameter 9388 update_feed = True 9389 if "update_feed" in r.get_vars: 9390 _update_feed = r.get_vars["update_feed"] 9391 if _update_feed == "0": 9392 update_feed = False 9393 9394 # Parse the ?msince= parameter 9395 msince = None 9396 if "msince" in r.get_vars: 9397 msince = r.get_vars["msince"] 9398 if msince.lower() == "auto": 9399 msince = "auto" 9400 else: 9401 msince = s3_parse_datetime(msince) 9402 9403 # Export a combined tree 9404 tree = self.export_combined_tree(tables, 9405 msince=msince, 9406 update_feed=update_feed) 9407 9408 xml = current.xml 9409 9410 # Set response headers 9411 response = current.response 9412 s3 = response.s3 9413 headers = response.headers 9414 representation = r.representation 9415 if r.representation in s3.json_formats: 9416 as_json = True 9417 default = "application/json" 9418 else: 9419 as_json = False 9420 default = "text/xml" 9421 headers["Content-Type"] = s3.content_type.get(representation, 9422 default) 9423 9424 # Find XSLT stylesheet and transform 9425 stylesheet = r.stylesheet() 9426 if tree and stylesheet is not None: 9427 args = Storage(domain=xml.domain, 9428 base_url=s3.base_url, 9429 utcnow=s3_format_datetime()) 9430 tree = xml.transform(tree, stylesheet, **args) 9431 if tree: 9432 if as_json: 9433 output = xml.tree2json(tree, pretty_print=True) 9434 else: 9435 output = xml.tostring(tree, pretty_print=True) 9436 9437 return output
9438 9439 # -------------------------------------------------------------------------
9440 - def export_combined_tree(self, tables, msince=None, update_feed=True):
9441 """ 9442 Export a combined tree of all records in tables, which 9443 are in Lx, and have been updated since msince. 9444 9445 @param tables: list of table names 9446 @param msince: minimum modified_on datetime, "auto" for 9447 automatic from feed data, None to turn it off 9448 @param update_feed: update the last_update datetime in the feed 9449 """ 9450 9451 db = current.db 9452 s3db = current.s3db 9453 ftable = s3db.gis_poi_feed 9454 9455 lx = self.lx 9456 9457 elements = [] 9458 for tablename in tables: 9459 9460 # Define the resource 9461 try: 9462 resource = s3db.resource(tablename, components=[]) 9463 except AttributeError: 9464 # Table not defined (module deactivated?) 9465 continue 9466 9467 # Check 9468 if "location_id" not in resource.fields: 9469 # Hardly a POI resource without location_id 9470 continue 9471 9472 # Add Lx filter 9473 self._add_lx_filter(resource, lx) 9474 9475 # Get the feed data 9476 query = (ftable.tablename == tablename) & \ 9477 (ftable.location_id == lx) 9478 feed = db(query).select(limitby=(0, 1)).first() 9479 if msince == "auto": 9480 if feed is None: 9481 _msince = None 9482 else: 9483 _msince = feed.last_update 9484 else: 9485 _msince = msince 9486 9487 # Export the tree and append its element to the element list 9488 tree = resource.export_tree(msince=_msince, 9489 references=["location_id"]) 9490 9491 # Update the feed data 9492 if update_feed: 9493 muntil = resource.muntil 9494 if feed is None: 9495 ftable.insert(location_id = lx, 9496 tablename = tablename, 9497 last_update = muntil) 9498 else: 9499 feed.update_record(last_update = muntil) 9500 9501 elements.extend([c for c in tree.getroot()]) 9502 9503 # Combine all elements in one tree and return it 9504 tree = current.xml.tree(elements, results=len(elements)) 9505 return tree
9506 9507 # ------------------------------------------------------------------------- 9508 @staticmethod
9509 - def _add_lx_filter(resource, lx):
9510 """ 9511 Add a Lx filter for the current location to this 9512 resource. 9513 9514 @param resource: the resource 9515 """ 9516 9517 from s3query import FS 9518 query = (FS("location_id$path").contains("/%s/" % lx)) | \ 9519 (FS("location_id$path").like("%s/%%" % lx)) 9520 resource.add_filter(query)
9521
9522 # ============================================================================= 9523 -class S3ImportPOI(S3Method):
9524 """ 9525 Import point-of-interest resources for a location 9526 """ 9527 9528 # ------------------------------------------------------------------------- 9529 @staticmethod
9530 - def apply_method(r, **attr):
9531 """ 9532 Apply method. 9533 9534 @param r: the S3Request 9535 @param attr: controller options for this request 9536 """ 9537 9538 if r.representation == "html": 9539 9540 T = current.T 9541 s3db = current.s3db 9542 request = current.request 9543 response = current.response 9544 settings = current.deployment_settings 9545 s3 = current.response.s3 9546 9547 title = T("Import from OpenStreetMap") 9548 9549 resources_list = settings.get_gis_poi_export_resources() 9550 uploadpath = os.path.join(request.folder,"uploads/") 9551 from s3utils import s3_yes_no_represent 9552 9553 fields = [Field("text1", # Dummy Field to add text inside the Form 9554 label = "", 9555 default = T("Can read PoIs either from an OpenStreetMap file (.osm) or mirror."), 9556 writable = False), 9557 Field("file", "upload", 9558 length = current.MAX_FILENAME_LENGTH, 9559 uploadfolder = uploadpath, 9560 label = T("File")), 9561 Field("text2", # Dummy Field to add text inside the Form 9562 label = "", 9563 default = "Or", 9564 writable = False), 9565 Field("host", 9566 default = "localhost", 9567 label = T("Host")), 9568 Field("database", 9569 default = "osm", 9570 label = T("Database")), 9571 Field("user", 9572 default = "osm", 9573 label = T("User")), 9574 Field("password", "string", 9575 default = "planet", 9576 label = T("Password")), 9577 Field("ignore_errors", "boolean", 9578 label = T("Ignore Errors?"), 9579 represent = s3_yes_no_represent), 9580 Field("resources", 9581 label = T("Select resources to import"), 9582 requires = IS_IN_SET(resources_list, multiple=True), 9583 default = resources_list, 9584 widget = SQLFORM.widgets.checkboxes.widget) 9585 ] 9586 9587 if not r.id: 9588 from s3validators import IS_LOCATION 9589 from s3widgets import S3LocationAutocompleteWidget 9590 # dummy field 9591 field = s3db.org_office.location_id 9592 field.requires = IS_EMPTY_OR(IS_LOCATION()) 9593 field.widget = S3LocationAutocompleteWidget() 9594 fields.insert(3, field) 9595 9596 from s3utils import s3_mark_required 9597 labels, required = s3_mark_required(fields, ["file", "location_id"]) 9598 s3.has_required = True 9599 9600 form = SQLFORM.factory(*fields, 9601 formstyle = settings.get_ui_formstyle(), 9602 submit_button = T("Import"), 9603 labels = labels, 9604 separator = "", 9605 table_name = "import_poi" # Dummy table name 9606 ) 9607 9608 response.view = "create.html" 9609 output = {"title": title, 9610 "form": form, 9611 } 9612 9613 if form.accepts(request.vars, current.session): 9614 form_vars = form.vars 9615 if form_vars.file != "": 9616 File = open(uploadpath + form_vars.file, "r") 9617 else: 9618 # Create .poly file 9619 if r.record: 9620 record = r.record 9621 elif not form_vars.location_id: 9622 form.errors["location_id"] = T("Location is Required!") 9623 return output 9624 else: 9625 gtable = s3db.gis_location 9626 record = current.db(gtable.id == form_vars.location_id).select(gtable.name, 9627 gtable.wkt, 9628 limitby=(0, 1) 9629 ).first() 9630 if record.wkt is None: 9631 form.errors["location_id"] = T("Location needs to have WKT!") 9632 return output 9633 error = GIS.create_poly(record) 9634 if error: 9635 current.session.error = error 9636 redirect(URL(args=r.id)) 9637 # Use Osmosis to extract an .osm file using this .poly 9638 name = record.name 9639 if os.path.exists(os.path.join(os.getcwd(), "temp")): # use web2py/temp 9640 TEMP = os.path.join(os.getcwd(), "temp") 9641 else: 9642 import tempfile 9643 TEMP = tempfile.gettempdir() 9644 filename = os.path.join(TEMP, "%s.osm" % name) 9645 cmd = ["/home/osm/osmosis/bin/osmosis", # @ToDo: deployment_setting 9646 "--read-pgsql", 9647 "host=%s" % form_vars.host, 9648 "database=%s" % form_vars.database, 9649 "user=%s" % form_vars.user, 9650 "password=%s" % form_vars.password, 9651 "--dataset-dump", 9652 "--bounding-polygon", 9653 "file=%s" % os.path.join(TEMP, "%s.poly" % name), 9654 "--write-xml", 9655 "file=%s" % filename, 9656 ] 9657 import subprocess 9658 try: 9659 #result = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) 9660 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) 9661 except subprocess.CalledProcessError, e: 9662 current.session.error = T("OSM file generation failed: %s") % e.output 9663 redirect(URL(args=r.id)) 9664 except AttributeError: 9665 # Python < 2.7 9666 error = subprocess.call(cmd, shell=True) 9667 if error: 9668 current.log.debug(cmd) 9669 current.session.error = T("OSM file generation failed!") 9670 redirect(URL(args=r.id)) 9671 try: 9672 File = open(filename, "r") 9673 except: 9674 current.session.error = T("Cannot open created OSM file!") 9675 redirect(URL(args=r.id)) 9676 9677 stylesheet = os.path.join(request.folder, "static", "formats", 9678 "osm", "import.xsl") 9679 ignore_errors = form_vars.get("ignore_errors", None) 9680 xml = current.xml 9681 tree = xml.parse(File) 9682 define_resource = s3db.resource 9683 response.error = "" 9684 import_count = 0 9685 9686 import_res = list(set(form_vars["resources"]) & \ 9687 set(resources_list)) 9688 9689 for tablename in import_res: 9690 try: 9691 s3db[tablename] 9692 except: 9693 # Module disabled 9694 continue 9695 resource = define_resource(tablename) 9696 s3xml = xml.transform(tree, stylesheet_path=stylesheet, 9697 name=resource.name) 9698 try: 9699 resource.import_xml(s3xml, 9700 ignore_errors=ignore_errors) 9701 import_count += resource.import_count 9702 except: 9703 response.error += str(sys.exc_info()[1]) 9704 if import_count: 9705 response.confirmation = "%s %s" % \ 9706 (import_count, 9707 T("PoIs successfully imported.")) 9708 else: 9709 response.information = T("No PoIs available.") 9710 9711 return output 9712 9713 else: 9714 raise HTTP(405, current.ERROR.BAD_METHOD)
9715 9716 # END ========================================================================= 9717