Skip to content

Commit

Permalink
Treeview now loads in QGIS
Browse files Browse the repository at this point in the history
  • Loading branch information
timlinux committed Sep 14, 2024
1 parent c2e32fc commit 4d781da
Show file tree
Hide file tree
Showing 21 changed files with 2,198 additions and 7 deletions.
12 changes: 8 additions & 4 deletions geest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@

#from .geest import Geest
#from .core import RenderQueue, setting
from .core import setting
from .core import setting, JSONValidator
from .utilities import resources_path
from .gui import GeestOptionsFactory
from .gui import GeestOptionsFactory, GeestDock


def classFactory(iface): # pylint: disable=missing-function-docstring
Expand All @@ -53,12 +53,16 @@ def __init__(self, iface):
def initGui(self): # pylint: disable=missing-function-docstring

#self.render_queue = RenderQueue()
icon = QIcon(resources_path("icons", "geest-main.svg"))
icon = QIcon(resources_path("resources", "geest-main.svg"))

# Validate our json schema first
#validator = JSONValidator('resources/schema.json', 'resources/model.json')
#validator.validate_json()

self.run_action = QAction(icon, "GEEST", self.iface.mainWindow())
self.run_action.triggered.connect(self.run)
self.iface.addToolBarIcon(self.run_action)
self.dock_widget = QDockWidget("GEEST", self.iface.mainWindow())
self.dock_widget = GeestDock(parent=self.iface.mainWindow(), json_file=resources_path("resources", "model.json"))
self.dock_widget.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea)
#self.dock_widget.setWidget(Geest(self.iface.mainWindow(), self.iface))
self.dock_widget.setFloating(False)
Expand Down
1 change: 1 addition & 0 deletions geest/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,6 @@
# InvalidAnimationParametersException,
#)
from .default_settings import default_settings
from .json_validator import JSONValidator
#from .movie_creator import MovieFormat, MovieCommandGenerator, MovieCreationTask
#from .render_queue import RenderJob, RenderQueue
111 changes: 111 additions & 0 deletions geest/core/generate_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
#!/usr/bin/env python

import pandas as pd
import json
import os

class SpreadsheetToJsonParser:
def __init__(self, spreadsheet_path):
"""
Constructor for SpreadsheetToJsonParser class.
Takes in the path to an ODS spreadsheet file.
"""
self.spreadsheet_path = spreadsheet_path
self.dataframe = None
self.result = {"dimensions": []}

def load_spreadsheet(self):
"""
Load the spreadsheet and preprocess it.
"""
# Load the ODS spreadsheet
self.dataframe = pd.read_excel(self.spreadsheet_path, engine='odf', skiprows=1)

# Select only the relevant columns
self.dataframe = self.dataframe[['DIMENSION', 'FACTOR', 'Layer', 'Source', 'Indicator', 'Query', 'Text',
'Default Weighting', 'Use Aggregate', 'Default Index Score', 'Index Score',
'Use default Idex Score', 'Rasterise Raster', 'Rasterise Polygon',
'Rasterise Polyline', 'Rasterise Point', 'Default Buffer Distances',
'Use Buffer point', 'Default pixel', 'Use Create Grid', 'Default Mode',
'Default Measurement', 'Default Increments', 'Use Mode of Travel']]

# Fill NaN values in 'DIMENSION' and 'FACTOR' columns to propagate their values downwards for hierarchical grouping
self.dataframe['DIMENSION'] = self.dataframe['DIMENSION'].ffill()
self.dataframe['FACTOR'] = self.dataframe['FACTOR'].ffill()

def parse_to_json(self):
"""
Parse the dataframe into the hierarchical JSON structure.
"""
dimension_map = {}

for _, row in self.dataframe.iterrows():
dimension = row['DIMENSION']
factor = row['FACTOR']
layer_data = {
"layer": row['Layer'],
'Text': row['Source'] if not pd.isna(row['Source']) else "",
'Default Weighting': row['Default Weighting'] if not pd.isna(row['Default Weighting']) else "",
'Use Aggregate': row['Use Aggregate'] if not pd.isna(row['Use Aggregate']) else "",
'Default Index Score': row['Default Index Score'] if not pd.isna(row['Default Index Score']) else "",
'Index Score': row['Index Score'] if not pd.isna(row['Index Score']) else "",
'Use default Idex Score': row['Use default Idex Score'] if not pd.isna(row['Use default Idex Score']) else "",
'Rasterise Raster': row['Rasterise Raster'] if not pd.isna(row['Rasterise Raster']) else "",
'Rasterise Polygon': row['Rasterise Polygon'] if not pd.isna(row['Rasterise Polygon']) else "",
'Rasterise Polyline': row['Rasterise Polyline'] if not pd.isna(row['Rasterise Polyline']) else "",
'Rasterise Point': row['Rasterise Point'] if not pd.isna(row['Rasterise Point']) else "",
'Default Buffer Distances': row['Default Buffer Distances'] if not pd.isna(row['Default Buffer Distances']) else "",
'Use Buffer point': row['Use Buffer point'] if not pd.isna(row['Use Buffer point']) else "",
'Default pixel': row['Default pixel'] if not pd.isna(row['Default pixel']) else "",
'Use Create Grid': row['Use Create Grid'] if not pd.isna(row['Use Create Grid']) else "",
'Default Mode': row['Default Mode'] if not pd.isna(row['Default Mode']) else "",
'Default Measurement': row['Default Measurement'] if not pd.isna(row['Default Measurement']) else "",
'Default Increments': row['Default Increments'] if not pd.isna(row['Default Increments']) else "",
'Use Mode of Travel': row['Use Mode of Travel'] if not pd.isna(row['Use Mode of Travel']) else "",
"source": row['Source'] if not pd.isna(row['Source']) else "",
"indicator": row['Indicator'] if not pd.isna(row['Indicator']) else "",
"query": row['Query'] if not pd.isna(row['Query']) else ""
}

# If the dimension doesn't exist yet, create it
if dimension not in dimension_map:
new_dimension = {
"name": dimension,
"factors": []
}
self.result["dimensions"].append(new_dimension)
dimension_map[dimension] = new_dimension

# If the factor doesn't exist in the current dimension, add it
factor_map = {f['name']: f for f in dimension_map[dimension]["factors"]}
if factor not in factor_map:
new_factor = {
"name": factor,
"layers": []
}
dimension_map[dimension]["factors"].append(new_factor)
factor_map[factor] = new_factor

# Add layer data to the current factor
factor_map[factor]["layers"].append(layer_data)

def get_json(self):
"""
Return the parsed JSON structure.
"""
return self.result

def save_json_to_file(self, output_json_path='model.json'):
"""
Save the parsed JSON structure to a file.
"""
with open(output_json_path, 'w') as json_file:
json.dump(self.result, json_file, indent=4)
print(f"JSON data has been saved to {output_json_path}")

# Example usage:
# parser = SpreadsheetToJsonParser('geest2.ods')
# parser.load_spreadsheet()
# parser.parse_to_json()
# json_data = parser.get_json()
# parser.save_json_to_file('output.json')
43 changes: 43 additions & 0 deletions geest/core/json_validator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/usr/bin/env python
import json
import jsonschema
from jsonschema import validate

class JSONValidator:
def __init__(self, json_schema_path, json_data_path):
"""
Constructor for the JSONValidator class.
Takes paths for the JSON schema and the JSON document to be validated.
"""
self.json_schema_path = json_schema_path
self.json_data_path = json_data_path
self.json_schema = self.load_json(json_schema_path)
self.json_data = self.load_json(json_data_path)

def load_json(self, file_path):
"""
Load JSON from the given file path.
"""
try:
with open(file_path, 'r') as file:
return json.load(file)
except Exception as e:
print(f"Error loading JSON file: {file_path}")
print(f"Details: {e}")
return None

def validate_json(self):
"""
Validate the JSON data against the JSON schema.
"""
try:
# Perform validation
validate(instance=self.json_data, schema=self.json_schema)
print("Validation successful: The JSON document is valid.")
except jsonschema.exceptions.ValidationError as err:
print("Validation error: The JSON document is invalid.")
print(f"Error details: {err.message}")

# Example usage:
# validator = JSONValidator('schema.json', 'model.json')
# validator.validate_json()
133 changes: 133 additions & 0 deletions geest/core/osm_data_downloader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
import xml.etree.ElementTree as ET
from qgis.core import (
QgsProject, QgsVectorLayer, QgsFeature, QgsGeometry,
QgsPointXY, QgsPolygon, QgsFields, QgsField, QgsCoordinateReferenceSystem,
QgsVectorFileWriter, QgsApplication, QgsBlockingNetworkRequest, QgsNetworkRequest
)
from qgis.PyQt.QtCore import QByteArray, QUrl, QObject, QVariant

# Please see https://gis.stackexchange.com/questions/343126/performing-sync-or-async-network-request-in-pyqgis
# for the QgsBlockingNetworkRequest class and QgsNetworkRequest class
# notes on when to use them
class OsmDataDownloader(QObject):
def __init__(self, query: str = "", output_path: str = "", parent=None):
"""
:param query: Overpass API query as a string
:param output_path: File path for saving the output shapefile
"""
super().__init__(parent)
self.query = query
self.output_path = output_path

def send_query(self):
"""
Sends the Overpass API query using QgsBlockingNetworkRequest to fetch OSM data synchronously.
"""
url = QUrl("http://overpass-api.de/api/interpreter")
request = QgsNetworkRequest(url)
request.setMethod(QgsNetworkRequest.PostMethod)
request.setHeader("Content-Type", "application/x-www-form-urlencoded")

# Send the POST request using QgsBlockingNetworkRequest
blocking_request = QgsBlockingNetworkRequest()
reply = blocking_request.fetch(request, QByteArray(self.query.encode('utf-8')))

# Check for errors in the reply
if reply.error():
print(f"Network Error: {reply.errorMessage()}")
return None
else:
# Return the response data
return reply.content().data().decode('utf-8')

def download_line_data(self):
"""
Processes line-based OSM data (e.g., footpaths) and saves it as a shapefile.
"""
data = self.send_query()
if not data:
return

# Parse the XML
root = ET.fromstring(data)

# Create a new layer to store the line-based data
crs = QgsCoordinateReferenceSystem(4326) # WGS 84
layer = QgsVectorLayer("LineString?crs=EPSG:4326", "Lines", "memory")
pr = layer.dataProvider()

# Add attributes
pr.addAttributes([QgsField("osm_id", QVariant.String)])
layer.updateFields()

# Iterate over the ways and extract coordinates
for way in root.findall(".//way"):
osm_id = way.get('id')
coords = []
for nd in way.findall("nd"):
ref = nd.get('ref')
node = root.find(f".//node[@id='{ref}']")
lat = float(node.get('lat'))
lon = float(node.get('lon'))
coords.append(QgsPointXY(lon, lat))

# Create a feature
feature = QgsFeature()
feature.setGeometry(QgsGeometry.fromPolylineXY(coords))
feature.setAttributes([osm_id])
pr.addFeatures([feature])

# Add the layer to the QGIS project
QgsProject.instance().addMapLayer(layer)

# Save to a shapefile
QgsVectorFileWriter.writeAsVectorFormat(layer, self.output_path, "UTF-8", crs, "ESRI Shapefile")
print(f"Line-based shapefile saved to {self.output_path}")

def download_polygon_data(self):
"""
Processes polygon-based OSM data (e.g., buildings) and saves it as a shapefile.
"""
data = self.send_query()
if not data:
return

# Parse the XML
root = ET.fromstring(data)

# Create a new layer to store the polygon-based data
crs = QgsCoordinateReferenceSystem(4326) # WGS 84
layer = QgsVectorLayer("Polygon?crs=EPSG:4326", "Polygons", "memory")
pr = layer.dataProvider()

# Add attributes
pr.addAttributes([QgsField("osm_id", QVariant.String)])
layer.updateFields()

# Iterate over the ways and extract coordinates (forming polygons)
for way in root.findall(".//way"):
osm_id = way.get('id')
coords = []
for nd in way.findall("nd"):
ref = nd.get('ref')
node = root.find(f".//node[@id='{ref}']")
lat = float(node.get('lat'))
lon = float(node.get('lon'))
coords.append(QgsPointXY(lon, lat))

# Close the polygon (by connecting the first and last points)
if coords[0] != coords[-1]:
coords.append(coords[0])

# Create a feature
feature = QgsFeature()
feature.setGeometry(QgsGeometry.fromPolygonXY([coords]))
feature.setAttributes([osm_id])
pr.addFeatures([feature])

# Add the layer to the QGIS project
QgsProject.instance().addMapLayer(layer)

# Save to a shapefile
QgsVectorFileWriter.writeAsVectorFormat(layer, self.output_path, "UTF-8", crs, "ESRI Shapefile")
print(f"Polygon-based shapefile saved to {self.output_path}")
Binary file added geest/geest2.ods
Binary file not shown.
2 changes: 2 additions & 0 deletions geest/gui/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@
"""

from .geest_settings import GeestOptionsFactory
from .geest_dock import GeestDock
from .geest_treeview import JsonTreeItem, JsonTreeModel, CustomTreeView
Loading

0 comments on commit 4d781da

Please sign in to comment.