Compare commits

...

15 Commits

56 changed files with 1353 additions and 328 deletions

1
.gitignore vendored
View File

@ -22,7 +22,6 @@ eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/

View File

@ -1,4 +1,3 @@
Django backend, vue.js frontend
Tagging support (tag roads, trails, etc)
Sharing (share individual items or select items or tags to include)
Organization by folder

View File

@ -1 +1,2 @@
Recall
Centerline

View File

@ -0,0 +1,4 @@
```sql
GRANT ALL ON SCHEMA public TO geobackend;
GRANT ALL ON SCHEMA public TO public;
```

View File

@ -0,0 +1,3 @@
Test Accounts:
admin1:hei8iWae

View File

@ -1,4 +1,5 @@
import logging
import sys
import threading
import time
@ -7,14 +8,21 @@ from geo_lib.daemon.workers.importer import import_worker
from geo_lib.redis import flush_redis
logging.basicConfig(level=logging.INFO) # TODO: config level
_logger = logging.getLogger("DAEMON")
flush_redis()
if __name__ == "__main__":
flush_redis()
# TODO: config
Database.initialise(minconn=1, maxconn=100, host='h.postgres.nb', database='geobackend', user='geobackend', password='juu1waigu1pookee1ohcierahMoofie3')
# TODO: config
Database.initialise(minconn=1, maxconn=100, host='h.postgres.nb', database='geobackend', user='geobackend', password='juu1waigu1pookee1ohcierahMoofie3')
import_thread = threading.Thread(target=import_worker)
import_thread.start()
import_thread = threading.Thread(target=import_worker)
import_thread.start()
_logger.info('Started importer')
while True:
time.sleep(3600)
while True:
try:
time.sleep(3600)
except KeyboardInterrupt:
# TODO: shut down workers
sys.exit(0)

View File

@ -5,9 +5,19 @@ from django.db import models
class ImportQueue(models.Model):
id = models.AutoField(primary_key=True)
user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE)
geojson = models.JSONField(default=dict)
imported = models.BooleanField(default=False)
geofeatures = models.JSONField(default=list)
original_filename = models.TextField()
raw_kml = models.TextField()
raw_kml_hash = models.CharField(max_length=64, unique=True)
data = models.JSONField(default=dict)
log = models.JSONField(default=list)
timestamp = models.DateTimeField(auto_now_add=True)
class FeatureStore(models.Model):
id = models.AutoField(primary_key=True)
user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE)
source = models.ForeignKey(ImportQueue, on_delete=models.SET_NULL, null=True)
geojson = models.JSONField(null=False)
timestamp = models.DateTimeField(auto_now_add=True)

View File

@ -1,10 +1,14 @@
from django.urls import path
from data.views.import_item import upload_item, fetch_import_queue, fetch_queued, delete_import_queue
from data.views.import_item import upload_item, fetch_import_queue, fetch_import_waiting, delete_import_item, update_import_item, fetch_import_history, fetch_import_history_item, import_to_featurestore
urlpatterns = [
path('item/import/upload/', upload_item, name='upload_file'),
path('item/import/get/<int:id>', fetch_import_queue, name='fetch_import_queue'),
path('item/import/get/mine', fetch_queued, name='fetch_queued'),
path('item/import/delete/<int:id>', delete_import_queue, name='delete_import_queue'),
path('item/import/upload', upload_item),
path('item/import/get/<int:item_id>', fetch_import_queue),
path('item/import/get', fetch_import_waiting),
path('item/import/get/history', fetch_import_history),
path('item/import/get/history/<int:item_id>', fetch_import_history_item),
path('item/import/delete/<int:id>', delete_import_item),
path('item/import/update/<int:item_id>', update_import_item),
path('item/import/perform/<int:item_id>', import_to_featurestore),
]

View File

@ -6,9 +6,14 @@ from django import forms
from django.core.serializers.json import DjangoJSONEncoder
from django.db import IntegrityError
from django.http import HttpResponse, JsonResponse
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.http import require_http_methods
from data.models import ImportQueue
from geo_lib.spatial.kml import kmz_to_kml
from data.models import ImportQueue, FeatureStore
from geo_lib.daemon.database.locking import DBLockManager
from geo_lib.daemon.workers.workers_lib.importer.kml import kmz_to_kml
from geo_lib.daemon.workers.workers_lib.importer.tagging import generate_auto_tags
from geo_lib.types.feature import GeoPoint, GeoLineString, GeoPolygon
from geo_lib.website.auth import login_required_401
@ -33,7 +38,7 @@ def upload_item(request):
try:
kml_doc = kmz_to_kml(file_data)
except Exception as e:
except:
print(traceback.format_exc()) # TODO: logging
return JsonResponse({'success': False, 'msg': 'failed to parse KML/KMZ', 'id': None}, status=400)
@ -45,7 +50,6 @@ def upload_item(request):
import_queue = ImportQueue.objects.get(
raw_kml=kml_doc,
raw_kml_hash=_hash_kml(kml_doc),
# original_filename=file_name,
user=request.user
)
msg = 'upload successful'
@ -62,34 +66,57 @@ def upload_item(request):
@login_required_401
def fetch_import_queue(request, id):
if id is None:
def fetch_import_queue(request, item_id):
if item_id is None:
return JsonResponse({'success': False, 'msg': 'ID not provided', 'code': 400}, status=400)
lock_manager = DBLockManager()
try:
queue = ImportQueue.objects.get(id=id)
if queue.user_id != request.user.id:
return JsonResponse({'success': False, 'msg': 'not authorized to view this item', 'code': 403}, status=400)
if len(queue.geojson):
return JsonResponse({'success': True, 'geojson': queue.geojson}, status=200)
return JsonResponse({'success': True, 'geojson': {}, 'msg': 'uploaded data still processing'}, status=200)
item = ImportQueue.objects.get(id=item_id)
if item.user_id != request.user.id:
return JsonResponse({'success': False, 'processing': False, 'msg': 'not authorized to view this item', 'code': 403}, status=400)
if item.imported:
return JsonResponse({'success': False, 'processing': False, 'msg': 'item already imported', 'code': 400}, status=400)
if not lock_manager.is_locked('data_importqueue', item.id) and (len(item.geofeatures) or len(item.log)):
return JsonResponse({'success': True, 'processing': False, 'geofeatures': item.geofeatures, 'log': item.log, 'msg': None, 'original_filename': item.original_filename}, status=200)
return JsonResponse({'success': True, 'processing': True, 'geofeatures': [], 'log': [], 'msg': 'uploaded data still processing'}, status=200)
except ImportQueue.DoesNotExist:
return JsonResponse({'success': False, 'msg': 'ID does not exist', 'code': 404}, status=400)
@login_required_401
def fetch_queued(request):
user_items = ImportQueue.objects.filter(user=request.user).values('id', 'geojson', 'original_filename', 'raw_kml_hash', 'data', 'timestamp')
def fetch_import_waiting(request):
user_items = ImportQueue.objects.exclude(data__contains='[]').filter(user=request.user, imported=False).values('id', 'geofeatures', 'original_filename', 'raw_kml_hash', 'data', 'log', 'timestamp', 'imported')
data = json.loads(json.dumps(list(user_items), cls=DjangoJSONEncoder))
lock_manager = DBLockManager()
for i, item in enumerate(data):
count = len(item['geojson'].get('features', []))
item['processing'] = len(item['geojson']) == 0
count = len(item['geofeatures'])
item['processing'] = not (len(item['geofeatures']) and len(item['log'])) and lock_manager.is_locked('data_importqueue', item['id'])
item['feature_count'] = count
del item['geojson']
del item['geofeatures']
return JsonResponse({'data': data, 'msg': None})
@login_required_401
def fetch_import_history(request):
user_items = ImportQueue.objects.filter(imported=True).values('id', 'original_filename', 'timestamp')
data = json.loads(json.dumps(list(user_items), cls=DjangoJSONEncoder))
return JsonResponse({'data': data})
@login_required_401
def delete_import_queue(request, id):
def fetch_import_history_item(request, item_id: int):
item = ImportQueue.objects.get(id=item_id)
if item.user_id != request.user.id:
return JsonResponse({'success': False, 'msg': 'not authorized to view this item', 'code': 403}, status=400)
response = HttpResponse(item.raw_kml, content_type='application/octet-stream')
response['Content-Disposition'] = 'attachment; filename="%s"' % item.original_filename
return response
@login_required_401
def delete_import_item(request, id):
if request.method == 'DELETE':
try:
queue = ImportQueue.objects.get(id=id)
@ -100,6 +127,84 @@ def delete_import_queue(request, id):
return HttpResponse(status=405)
@login_required_401
@csrf_protect # TODO: put this on all routes
@require_http_methods(["PUT"])
def update_import_item(request, item_id):
try:
queue = ImportQueue.objects.get(id=item_id)
except ImportQueue.DoesNotExist:
return JsonResponse({'success': False, 'msg': 'ID does not exist', 'code': 404}, status=400)
if queue.user_id != request.user.id:
return JsonResponse({'success': False, 'msg': 'not authorized to edit this item', 'code': 403}, status=403)
try:
data = json.loads(request.body)
if not isinstance(data, list):
raise ValueError('Invalid data format. Expected a list.')
except (json.JSONDecodeError, ValueError) as e:
return JsonResponse({'success': False, 'msg': str(e), 'code': 400}, status=400)
parsed_data = []
for feature in data:
match feature['type'].lower():
case 'point':
c = GeoPoint(**feature)
case 'linestring':
c = GeoLineString(**feature)
case 'polygon':
c = GeoPolygon(**feature)
case _:
continue
# Generate the tags after the user has made their changes.
c.properties.tags = generate_auto_tags(c)
parsed_data.append(json.loads(c.model_dump_json()))
# Update the data column with the new data
queue.data = parsed_data
queue.save()
return JsonResponse({'success': True, 'msg': 'Item updated successfully'})
@login_required_401
@csrf_protect # TODO: put this on all routes
@require_http_methods(["POST"])
def import_to_featurestore(request, item_id):
try:
import_item = ImportQueue.objects.get(id=item_id)
except ImportQueue.DoesNotExist:
return JsonResponse({'success': False, 'msg': 'ID does not exist', 'code': 404}, status=400)
if import_item.user_id != request.user.id:
return JsonResponse({'success': False, 'msg': 'not authorized to edit this item', 'code': 403}, status=403)
import_item.imported = True
i = 0
for feature in import_item.geofeatures:
match feature['type'].lower():
case 'point':
c = GeoPoint(**feature)
case 'linestring':
c = GeoLineString(**feature)
case 'polygon':
c = GeoPolygon(**feature)
case _:
continue
data = json.loads(c.model_dump_json())
feature = FeatureStore.objects.create(geojson=data, source=import_item, user=request.user)
feature.save()
i += 1
# Erase the geofeatures column
import_item.geofeatures = []
import_item.save()
return JsonResponse({'success': True, 'msg': f'Successfully imported {i} items'})
def _hash_kml(b: str):
if not isinstance(b, bytes):
b = b.encode()

2
src/geo-backend/dev-server.sh Executable file
View File

@ -0,0 +1,2 @@
#!/bin/bash
./manage.py runserver

2
src/geo-backend/dev-workers.sh Executable file
View File

@ -0,0 +1,2 @@
#!/bin/bash
python3 ./daemon.py

View File

@ -4,31 +4,42 @@ import redis
from redis.exceptions import LockError
def _get_lock_name(table_name: str, primary_key: str):
return f'database_lock_{table_name}:{primary_key}'
class DBLockManager:
_redis = redis.Redis(host='localhost', port=6379, db=0)
locks = {}
locks_lock = threading.Lock()
_locks = {}
def __init__(self, worker_id):
def __init__(self, worker_id=None):
self.worker_id = worker_id
self._read_only = worker_id is None
def lock_row(self, table_name: str, primary_key):
lock = self._redis.lock(f'database_lock_{table_name}:{primary_key}')
if self._read_only:
raise Exception('Cannot lock row in read-only mode')
lock_name = _get_lock_name(table_name, primary_key)
lock = self._redis.lock(lock_name)
if lock.acquire(blocking=False):
with self.locks_lock:
self.locks[f'{table_name}:{primary_key}'] = lock
self._locks[lock_name] = lock
return True
else:
return False
return False
def unlock_row(self, table_name: str, primary_key):
with self.locks_lock:
lock = self.locks.get(f'{table_name}:{primary_key}')
if lock:
try:
lock.release()
return True
except LockError:
return False
else:
if self._read_only:
raise Exception('Cannot unlock row in read-only mode')
lock_name = _get_lock_name(table_name, primary_key)
lock = self._locks.get(lock_name)
if lock is None:
return False
try:
lock.release()
del self._locks[lock_name]
return True
except LockError:
return False
def is_locked(self, table_name: str, primary_key):
return self._redis.lock(_get_lock_name(table_name, primary_key)).locked()

View File

@ -8,12 +8,14 @@ from psycopg2.extras import RealDictCursor
from geo_lib.daemon.database.connection import CursorFromConnectionFromPool
from geo_lib.daemon.database.locking import DBLockManager
from geo_lib.daemon.workers.workers_lib.importer.kml import kml_to_geojson
from geo_lib.daemon.workers.workers_lib.importer.logging import ImportLog
from geo_lib.logging.database import log_to_db, DatabaseLogLevel, DatabaseLogSource
from geo_lib.spatial.kml import kml_to_geojson
from geo_lib.time import get_time_ms
from geo_lib.types.feature import geojson_to_geofeature
_SQL_GET_UNPROCESSED_ITEMS = "SELECT * FROM public.data_importqueue WHERE geojson = '{}' ORDER BY id ASC"
_SQL_INSERT_PROCESSED_ITEM = "UPDATE public.data_importqueue SET geojson = %s WHERE id = %s"
_SQL_GET_UNPROCESSED_ITEMS = "SELECT * FROM public.data_importqueue WHERE geofeatures = '[]'::jsonb AND imported = false ORDER BY id ASC"
_SQL_INSERT_PROCESSED_ITEM = "UPDATE public.data_importqueue SET geofeatures = %s, log = %s WHERE id = %s"
_SQL_DELETE_ITEM = "DELETE FROM public.data_importqueue WHERE id = %s"
_logger = logging.getLogger("DAEMON").getChild("IMPORTER")
@ -23,15 +25,15 @@ _logger = logging.getLogger("DAEMON").getChild("IMPORTER")
def import_worker():
worker_id = str(uuid4())
lock_manager = DBLockManager(worker_id=worker_id)
lock_manager = DBLockManager(worker_id)
while True:
queue = []
with CursorFromConnectionFromPool(cursor_factory=RealDictCursor) as cursor:
cursor.execute(_SQL_GET_UNPROCESSED_ITEMS)
import_queue_items = cursor.fetchall()
for item in import_queue_items:
if lock_manager.lock_row('data_importqueue', item['id']):
queue.append(item)
for item in import_queue_items:
if lock_manager.lock_row('data_importqueue', item['id']):
queue.append(item)
if len(queue):
_logger.info(f'processing {len(import_queue_items)} items -- {worker_id}')
@ -39,8 +41,14 @@ def import_worker():
for item in queue:
start = get_time_ms()
success = False
geofetures = []
import_log = ImportLog()
import_log.add('Processing start')
try:
geojson_data, messages = kml_to_geojson(item['raw_kml'])
geojson_data, kml_conv_messages = kml_to_geojson(item['raw_kml'])
import_log.extend(kml_conv_messages)
geofetures, typing_messages = geojson_to_geofeature(geojson_data)
import_log.extend(typing_messages)
success = True
except Exception as e:
err_name = e.__class__.__name__
@ -48,18 +56,19 @@ def import_worker():
if hasattr(e, 'message'):
err_msg = e.message
msg = f'Failed to import item #{item["id"]} "{item["original_filename"]}", encountered {err_name}. {err_msg}'
import_log.add(f'{err_name}: {err_msg}')
log_to_db(msg, level=DatabaseLogLevel.ERROR, user_id=item['user_id'], source=DatabaseLogSource.IMPORT)
traceback.print_exc()
with CursorFromConnectionFromPool(cursor_factory=RealDictCursor) as cursor:
cursor.execute(_SQL_DELETE_ITEM, (item['id'],))
features = [] # dummy data
if success:
with CursorFromConnectionFromPool(cursor_factory=RealDictCursor) as cursor:
cursor.execute(_SQL_INSERT_PROCESSED_ITEM, (json.dumps(geojson_data, sort_keys=True), item['id']))
_logger.info(f'IMPORT: processed #{item["id"]} in {round((get_time_ms() - start) / 1000, 2)} seconds -- {worker_id}')
features = [json.loads(x.model_dump_json()) for x in geofetures]
import_log.add(f'Processing finished {"un" if not success else ""}successfully')
with CursorFromConnectionFromPool(cursor_factory=RealDictCursor) as cursor:
data = json.dumps(features)
cursor.execute(_SQL_INSERT_PROCESSED_ITEM, (data, import_log.json(), item['id']))
lock_manager.unlock_row('data_importqueue', item['id'])
_logger.info(f'IMPORT: processed #{item["id"]} in {round((get_time_ms() - start) / 1000, 2)} seconds -- {worker_id}')
if not len(queue):
# Only sleep if there were no items last time we checked.
time.sleep(5)
# def _process_item_data(item)

View File

@ -6,8 +6,9 @@ from typing import Union, Tuple
import geojson
import kml2geojson
from dateparser import parse
from geojson import FeatureCollection, Point, LineString, Polygon
from geojson import Point, LineString, Polygon, FeatureCollection
from geo_lib.daemon.workers.workers_lib.importer.logging import ImportLog
from geo_lib.types.geojson import GeojsonRawProperty
@ -25,35 +26,33 @@ def kmz_to_kml(kml_bytes: Union[str, bytes]) -> str:
return kml_bytes.decode('utf-8')
def kml_to_geojson(kml_bytes) -> Tuple[dict, list]:
def kml_to_geojson(kml_bytes) -> Tuple[dict, ImportLog]:
# TODO: preserve KML object styling, such as color and opacity
doc = kmz_to_kml(kml_bytes)
converted_kml = kml2geojson.main.convert(io.BytesIO(doc.encode('utf-8')))
features, messages = process_feature(converted_kml)
features, import_log = process_feature(converted_kml)
data = {
'type': 'FeatureCollection',
'features': features
}
return load_geojson_type(data), messages
return load_geojson_type(data), import_log
def process_feature(converted_kml):
def process_feature(converted_kml) -> Tuple[list, ImportLog]:
features = []
messages = []
import_log = ImportLog()
for feature in converted_kml[0]['features']:
if feature['geometry']['type'] in ['Point', 'LineString', 'Polygon']:
if feature['properties'].get('times'):
for i, timestamp_str in enumerate(feature['properties']['times']):
timestamp = int(parse(timestamp_str).timestamp() * 1000)
feature['geometry']['coordinates'][i].append(timestamp)
feature['properties'] = GeojsonRawProperty(**feature['properties']).dict()
feature['properties'] = GeojsonRawProperty(**feature['properties']).model_dump()
features.append(feature)
else:
# Log the error
messages.append(f'Feature type {feature["properties"]["type"]} not supported')
return features, messages
import_log.add(f'Feature type {feature["properties"]["type"]} not supported')
return features, import_log
def load_geojson_type(data: dict) -> dict:
@ -79,5 +78,4 @@ def load_geojson_type(data: dict) -> dict:
'coordinates': item.pop('coordinates'),
}
item['type'] = 'Feature'
item['properties']['title'] = item['properties'].pop('name')
return geojson_dict

View File

@ -0,0 +1,30 @@
import datetime
import json
from typing import List
from typing import Optional
from pydantic import BaseModel, Field
class ImportLogMsg(BaseModel):
timestamp: Optional[str] = Field(default_factory=lambda: datetime.datetime.now(datetime.timezone.utc).isoformat())
msg: str
class ImportLog:
def __init__(self):
self._messages: List[ImportLogMsg] = []
def add(self, msg: str):
assert isinstance(msg, str)
self._messages.append(ImportLogMsg(msg=msg))
def extend(self, msgs: 'ImportLog'):
for msg in msgs.get():
self._messages.append(msg)
def get(self) -> List[ImportLogMsg]:
return self._messages.copy()
def json(self) -> str:
return json.dumps([x.model_dump() for x in self._messages])

View File

@ -0,0 +1,15 @@
from datetime import datetime
from typing import List
from geo_lib.types.feature import GeoFeatureSupported
def generate_auto_tags(feature: GeoFeatureSupported) -> List[str]:
tags = [
f'type:{feature.type.value.lower()}'
]
now = datetime.now()
tags.append(f'import-year:{now.year}')
tags.append(f'import-month:{now.strftime("%B")}')
return [str(x) for x in tags]

View File

@ -0,0 +1,2 @@
SOFTWARE_NAME = 'geo-backend'
SOFTWARE_VERSION = '0.0.0'

View File

@ -0,0 +1,101 @@
import json
from enum import Enum
from typing import List, Tuple, Optional
from typing import Union
from pydantic import BaseModel, Field
from geo_lib.daemon.workers.workers_lib.importer.logging import ImportLog
class GeoFeatureType(str, Enum):
POINT = 'Point'
LINESTRING = 'LineString'
POLYGON = 'Polygon'
class Rendering(BaseModel):
stroke_width: int = Field(2, alias='strokeWidth')
stroke_color: Tuple[int, int, int, float] = Field((255, 0, 0, 1.0), alias='strokeColor')
fill_color: Optional[Tuple[int, int, int, float]] = Field((255, 0, 0, 0.2), alias='fillColor')
class Properties(BaseModel):
name: str
id: Optional[int] = -1
description: Optional[str] = None
tags: Optional[List[str]] = Field(default_factory=list)
rendering: Optional[Rendering] = Field(default_factory=Rendering)
class PointFeatureGeometry(BaseModel):
type: GeoFeatureType = GeoFeatureType.POINT
coordinates: Union[Tuple[float, float], Tuple[float, float, float]]
class LineStringGeometry(BaseModel):
type: GeoFeatureType = GeoFeatureType.LINESTRING
coordinates: List[Union[Tuple[float, float], Tuple[float, float, float], Tuple[float, float, float, int]]]
class PolygonGeometry(BaseModel):
type: GeoFeatureType = GeoFeatureType.POLYGON
coordinates: List[List[Union[Tuple[float, float], Tuple[float, float, float]]]]
class Feature(BaseModel):
type: str = 'Feature'
geometry: Union[PointFeatureGeometry, LineStringGeometry, PolygonGeometry]
properties: Properties
class PointFeature(Feature):
geometry: PointFeatureGeometry
class LineStringFeature(Feature):
geometry: LineStringGeometry
class PolygonFeature(Feature):
geometry: PolygonGeometry
GeoFeatureSupported = Union[PointFeature, LineStringFeature, PolygonFeature]
def geojson_to_geofeature(geojson: dict) -> Tuple[List[GeoFeatureSupported], ImportLog]:
result = []
import_log = ImportLog()
for item in geojson['features']:
match item['geometry']['type'].lower():
case 'point':
c = PointFeature
case 'linestring':
c = LineStringFeature
case 'polygon':
c = PolygonFeature
case _:
import_log.add(f'Feature named "{item["properties"]["title"]}" had unsupported type "{item["geometry"]["type"]}".')
continue
f = c(**item)
if isinstance(f, (PointFeature, LineStringFeature)):
del f.properties.rendering.fill_color
# TODO: do this shit
f.properties.id = -1 # This will be updated after it's added to the main data store.
result.append(f)
return result, import_log
def geofeature_to_geojson(feature: Union[GeoFeatureSupported, list]) -> str:
if isinstance(feature, list):
return json.dumps({
'type': 'FeatureCollection',
'features': [json.loads(x.model_dump_json(by_alias=True)) for x in feature]
})
else:
return feature.model_dump_json(by_alias=True)

View File

@ -1,9 +1,10 @@
from typing import Optional
from typing import Optional, List
from pydantic import BaseModel
from pydantic import BaseModel, Field
class GeojsonRawProperty(BaseModel):
# Whitelist these properties.
# A class to whitelist these properties.
name: str
description: Optional[str] = None
tags: List[str] = Field(default_factory=list, alias='feature_tags') # kml2geojson calls this field `feature_tags`

View File

@ -6,7 +6,7 @@ import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'geo_backend.settings')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'website.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:

View File

@ -9,3 +9,4 @@ pydantic==2.7.3
sqlalchemy==2.0.30
redis==5.0.5
async_timeout==4.0.3
pytz

View File

@ -0,0 +1,19 @@
import argparse
import sys
from pathlib import Path
sys.path.append(str(list(Path(__file__).parents)[1]))
from geo_lib.daemon.workers.workers_lib.importer.kml import kml_to_geojson
from geo_lib.types.feature import geojson_to_geofeature, geofeature_to_geojson
parser = argparse.ArgumentParser()
parser.add_argument('kml_path')
args = parser.parse_args()
raw_kml = Path(args.kml_path).expanduser().absolute().resolve().read_text()
geojson_data, kml_conv_messages = kml_to_geojson(raw_kml)
geofetures, typing_messages = geojson_to_geofeature(geojson_data)
print(geofeature_to_geojson(geofetures))

1
src/geo-backend/todo.txt Normal file
View File

@ -0,0 +1 @@
- For tracks, set the created date to the timestamp of the first point in the track

View File

@ -1,5 +1,5 @@
from django.contrib.auth import login
from django.shortcuts import render
from django.shortcuts import render, redirect
from users.forms import CustomUserCreationForm
@ -15,3 +15,6 @@ def register(request):
if form.is_valid():
user = form.save()
login(request, user)
return redirect('/account/login/')
else:
return render(request, "users/register.html", {"form": form}) # return the form with errors

View File

View File

@ -1,5 +1,5 @@
"""
ASGI config for geo_backend project.
ASGI config for website project.
It exposes the ASGI callable as a module-level variable named ``application``.
@ -11,6 +11,6 @@ import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'geo_backend.settings')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'website.settings')
application = get_asgi_application()

View File

@ -1,5 +1,5 @@
"""
Django settings for geo_backend project.
Django settings for website project.
Generated by 'django-admin startproject' using Django 5.0.6.
@ -48,10 +48,10 @@ MIDDLEWARE = [
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'geo_backend.middleware.CustomHeaderMiddleware',
'website.middleware.CustomHeaderMiddleware',
]
ROOT_URLCONF = 'geo_backend.urls'
ROOT_URLCONF = 'website.urls'
TEMPLATES = [
{
@ -69,7 +69,7 @@ TEMPLATES = [
},
]
WSGI_APPLICATION = 'geo_backend.wsgi.application'
WSGI_APPLICATION = 'website.wsgi.application'
# Database
# https://docs.djangoproject.com/en/5.0/ref/settings/#databases
@ -134,4 +134,8 @@ STATICFILES_DIRS = [
os.path.join(BASE_DIR, '../geo-frontend/dist/static'),
]
APPEND_SLASH = False
APPEND_SLASH = True
LOGIN_URL = '/account/login'
CSRF_TRUSTED_ORIGINS = ['http://localhost:5173']

View File

@ -1,5 +1,5 @@
"""
URL configuration for geo_backend project.
URL configuration for website project.
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/5.0/topics/http/urls/
@ -18,7 +18,7 @@ from django.conf.urls import include
from django.contrib import admin
from django.urls import path, re_path
from geo_backend.views import index
from website.views import index
urlpatterns = [
path('', index),

View File

@ -1,5 +1,7 @@
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
@login_required
def index(request):
return render(request, "index.html")

View File

@ -1,5 +1,5 @@
"""
WSGI config for geo_backend project.
WSGI config for website project.
It exposes the WSGI callable as a module-level variable named ``application``.
@ -11,6 +11,6 @@ import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'geo_backend.settings')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'website.settings')
application = get_wsgi_application()

View File

@ -11,12 +11,15 @@
"@types/geojson": "^7946.0.14",
"axios": "^1.7.2",
"dropzone-vue": "^0.1.11",
"flatpickr": "^4.6.13",
"geojson": "^0.5.0",
"vue": "^3.4.21",
"vue-flatpickr-component": "^11.0.5",
"vue-router": "^4.3.2",
"vuex": "^4.1.0"
},
"devDependencies": {
"@tailwindcss/typography": "^0.5.15",
"@vitejs/plugin-vue": "^5.0.4",
"autoprefixer": "^10.4.19",
"postcss": "^8.4.38",
@ -732,6 +735,36 @@
"win32"
]
},
"node_modules/@tailwindcss/typography": {
"version": "0.5.15",
"resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.15.tgz",
"integrity": "sha512-AqhlCXl+8grUz8uqExv5OTtgpjuVIwFTSXTrh8y9/pw6q2ek7fJ+Y8ZEVw7EB2DCcuCOtEjf9w3+J3rzts01uA==",
"dev": true,
"license": "MIT",
"dependencies": {
"lodash.castarray": "^4.4.0",
"lodash.isplainobject": "^4.0.6",
"lodash.merge": "^4.6.2",
"postcss-selector-parser": "6.0.10"
},
"peerDependencies": {
"tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20"
}
},
"node_modules/@tailwindcss/typography/node_modules/postcss-selector-parser": {
"version": "6.0.10",
"resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz",
"integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==",
"dev": true,
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
"util-deprecate": "^1.0.2"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@types/estree": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
@ -943,9 +976,10 @@
}
},
"node_modules/axios": {
"version": "1.7.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz",
"integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==",
"version": "1.7.7",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz",
"integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
@ -1330,6 +1364,12 @@
"node": ">=8"
}
},
"node_modules/flatpickr": {
"version": "4.6.13",
"resolved": "https://registry.npmjs.org/flatpickr/-/flatpickr-4.6.13.tgz",
"integrity": "sha512-97PMG/aywoYpB4IvbvUJi0RQi8vearvU0oov1WW3k0WZPBMrTQVqekSX5CjSG/M4Q3i6A/0FKXC7RyAoAUUSPw==",
"license": "MIT"
},
"node_modules/follow-redirects": {
"version": "1.15.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
@ -1579,6 +1619,27 @@
"integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
"dev": true
},
"node_modules/lodash.castarray": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.castarray/-/lodash.castarray-4.4.0.tgz",
"integrity": "sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==",
"dev": true,
"license": "MIT"
},
"node_modules/lodash.isplainobject": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
"integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
"dev": true,
"license": "MIT"
},
"node_modules/lodash.merge": {
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
"dev": true,
"license": "MIT"
},
"node_modules/lru-cache": {
"version": "10.2.2",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz",
@ -1606,10 +1667,11 @@
}
},
"node_modules/micromatch": {
"version": "4.0.7",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz",
"integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==",
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dev": true,
"license": "MIT",
"dependencies": {
"braces": "^3.0.3",
"picomatch": "^2.3.1"
@ -2434,6 +2496,21 @@
}
}
},
"node_modules/vue-flatpickr-component": {
"version": "11.0.5",
"resolved": "https://registry.npmjs.org/vue-flatpickr-component/-/vue-flatpickr-component-11.0.5.tgz",
"integrity": "sha512-Vfwg5uVU+sanKkkLzUGC5BUlWd5wlqAMq/UpQ6lI2BCZq0DDrXhOMX7hrevt8bEgglIq2QUv0K2Nl84Me/VnlA==",
"license": "MIT",
"dependencies": {
"flatpickr": "^4.6.13"
},
"engines": {
"node": ">=14.13.0"
},
"peerDependencies": {
"vue": "^3.2.0"
}
},
"node_modules/vue-router": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.3.2.tgz",

View File

@ -12,12 +12,15 @@
"@types/geojson": "^7946.0.14",
"axios": "^1.7.2",
"dropzone-vue": "^0.1.11",
"flatpickr": "^4.6.13",
"geojson": "^0.5.0",
"vue": "^3.4.21",
"vue-flatpickr-component": "^11.0.5",
"vue-router": "^4.3.2",
"vuex": "^4.1.0"
},
"devDependencies": {
"@tailwindcss/typography": "^0.5.15",
"@vitejs/plugin-vue": "^5.0.4",
"autoprefixer": "^10.4.19",
"postcss": "^8.4.38",

View File

@ -1,4 +1,4 @@
import {UserInfo} from "@/assets/js/store-types.ts";
import {UserInfo} from "@/assets/js/types/store-types";
import {getUserInfo} from "@/assets/js/auth.js";
export const authMixin = {

View File

@ -0,0 +1,2 @@
export const IMPORT_QUEUE_LIST_URL = "/api/data/item/import/get"
export const IMPORT_HISTORY_URL = "/api/data/item/import/get/history"

View File

@ -1,16 +1,35 @@
import {createStore} from 'vuex'
import {UserInfo} from './store-types'
import {UserInfo} from './types/store-types'
import {ImportQueueItem} from "@/assets/js/types/import-types";
export default createStore({
state: {
userInfo: UserInfo
userInfo: UserInfo,
importQueue: ImportQueueItem,
importQueueRefreshTrigger: false,
}, mutations: {
userInfo(state, payload) {
state.userInfo = payload
}
},
importQueue(state, payload) {
state.importQueue = payload
},
setImportQueue(state, importQueue) {
state.importQueue = importQueue;
},
triggerImportQueueRefresh(state) {
state.importQueueRefreshTrigger = !state.importQueueRefreshTrigger;
},
}, getters: {
// alertExists: (state) => (message) => {
// return state.siteAlerts.includes(message);
// },
}
},
actions: {
refreshImportQueue({commit}) {
commit('triggerImportQueueRefresh');
},
},
})

View File

@ -0,0 +1,5 @@
export const GeoFeatureTypeStrings = {
Point: "Point",
LineString: "LineString",
Polygon: "Polygon"
}

View File

@ -0,0 +1,55 @@
enum GeoFeatureType {
POINT = 'Point',
LINESTRING = 'LineString',
POLYGON = 'Polygon'
}
interface GeoFeatureProperties {
created: Date;
software: string;
software_version: string;
tags: string[];
}
interface GeoFeatureProps {
name: string;
id: number;
type: GeoFeatureType;
description?: string;
geometry: any[];
properties: GeoFeatureProperties;
}
class GeoFeature {
name: string;
id: number;
type: GeoFeatureType;
description?: string;
tags: string[] = [];
geometry: any[];
properties: GeoFeatureProperties;
constructor(props: GeoFeatureProps) {
this.name = props.name;
this.id = props.id;
this.type = props.type;
this.description = props.description;
this.geometry = props.geometry || [];
this.properties = props.properties;
}
}
export class GeoPoint extends GeoFeature {
type: GeoFeatureType = GeoFeatureType.POINT;
geometry: number[];
}
export class GeoLineString extends GeoFeature {
type: GeoFeatureType = GeoFeatureType.LINESTRING;
geometry: number[][];
}
export class GeoPolygon extends GeoFeature {
type: GeoFeatureType = GeoFeatureType.POLYGON;
geometry: number[][][];
}

View File

@ -0,0 +1,21 @@
export class ImportQueueItem {
id: number;
original_filename: string;
raw_kml_hash: string;
data: object;
log: any[];
timestamp: string;
processing: boolean;
feature_count: number;
constructor(data: any) {
this.id = data.id;
this.original_filename = data.original_filename;
this.raw_kml_hash = data.raw_kml_hash;
this.data = data.data;
this.log = data.log;
this.timestamp = data.timestamp;
this.processing = data.processing;
this.feature_count = data.feature_count;
}
}

View File

@ -1,9 +1,9 @@
import {getCookie} from "./auth.js"
import {getCookie} from "../auth.js"
export class UserInfo {
private username: String;
private id: BigInteger;
private csrftoken: String;
username: String;
id: BigInteger;
csrftoken: String;
constructor(username: String, userId: BigInteger) {
this.username = username

View File

@ -1,5 +1,11 @@
<template>
<p>Home page</p>
<div class="prose">
<h1>Home page</h1>
</div>
<div>
<a href="/#/import">Import</a>
</div>
</template>

View File

@ -1,4 +1,6 @@
<template>
<a href="/#/import">Import</a>
<p>username: {{ userInfo.username }}</p>
<p>id: {{ userInfo.id }}</p>
</template>

View File

@ -1,34 +1,48 @@
<template>
<div class="prose mb-10">
<h1 class="mb-1">Import Data</h1>
</div>
<div class="mb-10">
<div>
<a href="/#/import/upload">Upload Files</a>
</div>
<div>
<button @click="fetchQueueList">Refresh</button>
<a class="text-blue-500 hover:text-blue-700" href="/#/import/upload">Upload Files</a>
</div>
<table>
<div class="prose mt-10">
<h3>Ready to Import</h3>
</div>
<Importqueue/>
<div class="prose mt-10">
<h3>Import History</h3>
</div>
<table class="mt-6 w-full border-collapse">
<thead>
<tr>
<th>ID</th>
<th>File Name</th>
<th>Features</th>
<th></th>
<tr class="bg-gray-100">
<th class="px-4 py-2 text-left w-[50%]">File Name</th>
<th class="px-4 py-2">Date/Time Imported</th>
<th class="px-4 py-2 w-[10%]"></th>
</tr>
</thead>
<tbody>
<tr v-for="(item, index) in processQueue" :key="`item-${index}`">
<td>
<a :href="`/#/import/process/${item.id}`">{{ item.id }}</a>
<tr v-for="(item, index) in history" :key="`history-${index}`" class="border-t">
<td class="px-4 py-2 w-[50%]">
<a :href="`${IMPORT_HISTORY_URL()}/${item.id}`" class="text-blue-500 hover:text-blue-700">{{
item.original_filename
}}</a>
</td>
<td>
<a :href="`/#/import/process/${item.id}`">{{ item.original_filename }}</a>
<td class="px-4 py-2 text-center">
{{ item.timestamp }}
</td>
<td>
{{ item.processing === true ? "processing" : item.feature_count }}
<td class="px-4 py-2 w-[10%]">
</td>
<td>
<button @click="deleteItem(item, index)">Delete</button>
</tr>
<tr v-if="historyIsLoading" class="animate-pulse border-t">
<td class="px-4 py-2 text-left w-[50%]">
<div class="w-32 h-8 bg-gray-200 rounded-s"></div>
</td>
<td class="px-4 py-2 text-center">
<div class="w-32 h-8 bg-gray-200 rounded-s mx-auto"></div>
</td>
</tr>
</tbody>
@ -40,43 +54,33 @@
import {mapState} from "vuex"
import {authMixin} from "@/assets/js/authMixin.js";
import axios from "axios";
import {IMPORT_HISTORY_URL} from "@/assets/js/import/url.js";
import Importqueue from "@/components/import/parts/importqueue.vue";
export default {
computed: {
...mapState(["userInfo"]),
...mapState(["userInfo", "importQueue"]),
},
components: {},
components: {Importqueue},
mixins: [authMixin],
data() {
return {
processQueue: []
history: [],
historyIsLoading: true,
}
},
methods: {
async fetchQueueList() {
const response = await axios.get('/api/data/item/import/get/mine')
this.processQueue = response.data.data
IMPORT_HISTORY_URL() {
return IMPORT_HISTORY_URL
},
async fetchHistory() {
const response = await axios.get(IMPORT_HISTORY_URL)
this.history = response.data.data
this.historyIsLoading = false
},
async deleteItem(item, index) {
if (window.confirm(`Delete "${item.original_filename}" (#${item.id})`))
try {
this.processQueue.splice(index, 1)
// TODO: add a message popup when delete is completed
const response = await axios.delete('/api/data/item/import/delete/' + item.id, {
headers: {
'X-CSRFToken': this.userInfo.csrftoken
}
})
await this.fetchQueueList()
} catch (error) {
alert(`Failed to delete ${item.id}: ${error.message}`)
this.processQueue.splice(index, 0, item)
}
}
},
async created() {
await this.fetchQueueList()
await this.fetchHistory()
},
// async mounted() {
// },

View File

@ -0,0 +1,294 @@
<template>
<div class="prose mb-10">
<h1 class="mb-1">Process Import</h1>
<h2 v-if="originalFilename != null" class="mt-0">{{ originalFilename }}</h2>
<h2 v-else class="mt-0 invisible">loading...</h2>
</div>
<div v-if="msg !== '' && msg != null">
<div class="bg-red-500 p-4 rounded">
<p class="font-bold text-white">{{ msg }}</p>
</div>
</div>
<div id="importLog" class="w-full my-10 mx-auto bg-white shadow rounded-lg p-4">
<h2 class="text-lg font-semibold text-gray-700 mb-2">Logs</h2>
<hr class="mb-4 border-t border-gray-200">
<div class="h-32 overflow-auto">
<ul class="space-y-2">
<li v-for="(item, index) in workerLog" :key="`logitem-${index}`"
class="border-b border-gray-200 last:border-b-0">
<p class="text-sm">{{ item.timestamp }} - {{ item.msg }}</p>
</li>
</ul>
</div>
</div>
<Loader v-if="originalFilename == null"/>
<div>
<ul class="space-y-4">
<li v-for="(item, index) in itemsForUser" :key="`item-${index}`" class="bg-white shadow rounded-md p-4">
<div class="mb-4">
<label class="block text-gray-700 font-bold mb-2">Name:</label>
<div class="flex items-center">
<input v-model="item.name" :placeholder="originalItems[index].name"
class="border border-gray-300 rounded-md px-3 py-2 w-full"/>
<button class="ml-2 bg-gray-200 hover:bg-gray-300 text-gray-700 font-bold py-2 px-4 rounded"
@click="resetField(index, 'name')">Reset
</button>
</div>
</div>
<div class="mb-4">
<label class="block text-gray-700 font-bold mb-2">Description:</label>
<div class="flex items-center">
<input v-model="item.description" :placeholder="originalItems[index].description"
class="border border-gray-300 rounded-md px-3 py-2 w-full"/>
<button class="ml-2 bg-gray-200 hover:bg-gray-300 text-gray-700 font-bold py-2 px-4 rounded"
@click="resetField(index, 'description')">Reset
</button>
</div>
</div>
<div>
<label class="block text-gray-700 font-bold mb-2">Created:</label>
<div class="flex items-center">
<flat-pickr :config="flatpickrConfig" :value="item.properties.created"
class="border border-gray-300 rounded-md px-3 py-2 w-full"
@on-change="updateDate(index, $event)"></flat-pickr>
<button class="ml-2 bg-gray-200 hover:bg-gray-300 text-gray-700 font-bold py-2 px-4 rounded"
@click="resetNestedField(index, 'properties', 'created')">Reset
</button>
</div>
<div>
<label class="block text-gray-700 font-bold mb-2">Tags:</label>
<div v-for="(tag, tagIndex) in item.properties.tags" :key="`tag-${tagIndex}`" class="mb-2">
<div class="flex items-center">
<input v-model="item.properties.tags[tagIndex]" :placeholder="getTagPlaceholder(index, tag)"
class="border rounded-md px-3 py-2 w-full bg-white"/>
<button class="ml-2 bg-red-500 hover:bg-red-600 text-white font-bold py-2 px-4 rounded"
@click="removeTag(index, tagIndex)">Remove
</button>
</div>
</div>
</div>
<div class="flex items-center mt-2">
<button :class="{ 'opacity-50 cursor-not-allowed': isLastTagEmpty(index) }"
:disabled="isLastTagEmpty(index)"
class="bg-blue-500 hover:bg-blue-600 text-white font-bold py-2 px-4 rounded"
@click="addTag(index)">Add Tag
</button>
<button class="ml-2 bg-gray-200 hover:bg-gray-300 text-gray-700 font-bold py-2 px-4 rounded"
@click="resetTags(index)">Reset Tags
</button>
</div>
</div>
</li>
</ul>
</div>
<div v-if="itemsForUser.length > 0">
<button :disabled="lockButtons"
class="m-2 bg-green-500 hover:bg-green-600 disabled:bg-green-300 text-white font-bold py-2 px-4 rounded"
@click="saveChanges">Save
</button>
<button :disabled="lockButtons"
class="m-2 bg-blue-500 hover:bg-blue-600 disabled:bg-blue-300 text-white font-bold py-2 px-4 rounded"
@click="performImport">Import
</button>
</div>
<div class="hidden">
<!-- Load the queue to populate it. -->
<Importqueue/>
</div>
</template>
<script>
import {mapState} from "vuex";
import {authMixin} from "@/assets/js/authMixin.js";
import axios from "axios";
import {capitalizeFirstLetter} from "@/assets/js/string.js";
import Importqueue from "@/components/import/parts/importqueue.vue";
import {GeoFeatureTypeStrings} from "@/assets/js/types/geofeature-strings";
import {GeoPoint, GeoLineString, GeoPolygon} from "@/assets/js/types/geofeature-types";
import {getCookie} from "@/assets/js/auth.js";
import flatPickr from 'vue-flatpickr-component';
import 'flatpickr/dist/flatpickr.css';
import Loader from "@/components/parts/Loader.vue";
// TODO: for each feature, query the DB and check if there is a duplicate. For points that's duplicate coords, for linestrings and polygons that's duplicate points
// TODO: redo the entire log feature to include local timestamps
export default {
computed: {
...mapState(["userInfo"]),
},
components: {Loader, Importqueue, flatPickr},
data() {
return {
msg: "",
currentId: null,
originalFilename: null,
itemsForUser: [],
originalItems: [],
workerLog: [],
lockButtons: false,
flatpickrConfig: {
enableTime: true,
time_24hr: true,
dateFormat: 'Y-m-d H:i',
},
}
},
mixins: [authMixin],
props: ['id'],
methods: {
handleError(responseMsg) {
console.log(responseMsg)
this.msg = capitalizeFirstLetter(responseMsg).trim(".") + "."
},
parseGeoJson(item) {
switch (item.type) {
case GeoFeatureTypeStrings.Point:
return new GeoPoint(item);
case GeoFeatureTypeStrings.LineString:
return new GeoLineString(item);
case GeoFeatureTypeStrings.Polygon:
return new GeoPolygon(item);
default:
throw new Error(`Invalid feature type: ${item.type}`);
}
},
resetField(index, fieldName) {
this.itemsForUser[index][fieldName] = this.originalItems[index][fieldName];
},
resetNestedField(index, nestedField, fieldName) {
this.itemsForUser[index][nestedField][fieldName] = this.originalItems[index][nestedField][fieldName];
},
addTag(index) {
if (!this.isLastTagEmpty(index)) {
this.itemsForUser[index].tags.push('');
}
},
getTagPlaceholder(index, tag) {
const originalTagIndex = this.originalItems[index].tags.indexOf(tag);
return originalTagIndex !== -1 ? this.originalItems[index].tags[originalTagIndex] : '';
},
isLastTagEmpty(index) {
const tags = this.itemsForUser[index].tags;
return tags.length > 0 && tags[tags.length - 1].trim().length === 0;
},
resetTags(index) {
this.itemsForUser[index].tags = [...this.originalItems[index].tags];
},
removeTag(index, tagIndex) {
this.itemsForUser[index].tags.splice(tagIndex, 1);
},
updateDate(index, selectedDates) {
this.itemsForUser[index].properties.created = selectedDates[0];
},
saveChanges() {
this.lockButtons = true
const csrftoken = getCookie('csrftoken')
axios.put('/api/data/item/import/update/' + this.id, this.itemsForUser, {
headers: {
'X-CSRFToken': csrftoken
}
}).then(response => {
if (response.data.success) {
window.alert(response.data.msg);
} else {
this.msg = 'Error saving changes: ' + response.data.msg;
window.alert(this.msg);
}
this.lockButtons = false
}).catch(error => {
this.msg = 'Error saving changes: ' + error.message;
window.alert(this.msg);
});
},
async performImport() {
this.lockButtons = true
const csrftoken = getCookie('csrftoken')
// Save changes first.
await axios.put('/api/data/item/import/update/' + this.id, this.itemsForUser, {
headers: {
'X-CSRFToken': csrftoken
}
})
axios.post('/api/data/item/import/perform/' + this.id, [], {
headers: {
'X-CSRFToken': csrftoken
}
}).then(response => {
if (response.data.success) {
this.$store.dispatch('refreshImportQueue')
window.alert(response.data.msg);
} else {
this.msg = 'Error performing import: ' + response.data.msg;
window.alert(this.msg);
}
this.lockButtons = false
}).catch(error => {
this.msg = 'Error performing import: ' + error.message;
window.alert(this.msg);
this.lockButtons = false
});
},
},
beforeRouteEnter(to, from, next) {
const now = new Date().toISOString()
let ready = false
next(async vm => {
if (vm.currentId !== vm.id) {
vm.msg = ""
vm.currentId = null
vm.originalFilename = null
vm.itemsForUser = []
vm.originalItems = []
vm.workerLog = []
vm.lockButtons = false
while (!ready) {
try {
const response = await axios.get('/api/data/item/import/get/' + vm.id)
if (!response.data.success) {
vm.handleError(response.data.msg)
} else {
vm.currentId = vm.id
if (Object.keys(response.data).length > 0) {
vm.originalFilename = response.data.original_filename
response.data.geofeatures.forEach((item) => {
vm.itemsForUser.push(vm.parseGeoJson(item))
})
vm.originalItems = JSON.parse(JSON.stringify(vm.itemsForUser))
}
if (!response.data.processing) {
vm.workerLog = vm.workerLog.concat(response.data.log)
if (response.data.msg != null && response.data.msg.length > 0) {
vm.workerLog.push({timestamp: now, msg: response.data.msg})
}
ready = true
} else {
vm.workerLog = [{timestamp: now, msg: "uploaded data still processing"}]
await new Promise(r => setTimeout(r, 1000));
}
}
} catch (error) {
vm.handleError(error.message)
}
}
}
})
}
,
}
</script>
<style scoped>
</style>

View File

@ -0,0 +1,152 @@
<template>
<div class="prose mb-10">
<h1 class="mb-1">Upload Data</h1>
</div>
<div class="mb-10">
<p class="mb-2">Only KML/KMZ files supported.</p>
<p class="">
Be careful not to upload duplicate files of the opposite type. For example, do not upload both
<kbd class="bg-gray-200 text-gray-800 px-2 py-1 rounded">example.kml</kbd>
and <kbd class="bg-gray-200 text-gray-800 px-2 py-1 rounded">example.kmz</kbd>. Currently, the system can't detect
duplicate cross-file types.
</p>
</div>
<div class="relative w-[90%] mx-auto">
<div class="flex items-center">
<input id="uploadInput" :disabled="disableUpload" class="mr-4 px-4 py-2 border border-gray-300 rounded"
type="file"
@change="onFileChange">
<button :disabled="disableUpload"
class="px-4 py-2 bg-blue-500 text-white rounded hover:bg-blue-600 disabled:bg-gray-400 disabled:cursor-not-allowed"
@click="upload">
Upload
</button>
</div>
<div :class="{invisible: uploadProgress <= 0}" class="mt-4">
<div class="w-full bg-gray-200 rounded-full h-2.5">
<div :style="{ width: uploadProgress + '%' }" class="bg-blue-600 h-2.5 rounded-full"></div>
</div>
<div class="text-center mt-2">{{ uploadProgress }}%</div>
</div>
<div v-if="uploadMsg !== ''" class="max-h-40 overflow-y-auto bg-gray-200 rounded-s p-5">
<!-- <strong>Message from Server:</strong><br>-->
{{ uploadMsg }}
</div>
<div class="prose mt-5" v-html="uploadResponse"></div>
</div>
<div class="prose mt-10">
<h3 class="inline">Ready to Import</h3>
<span v-if="loadingQueueList" class="italic mr-3">
Loading...
</span>
</div>
<Importqueue/>
</template>
<script>
import {mapState} from "vuex"
import {authMixin} from "@/assets/js/authMixin.js";
import axios from "axios";
import {capitalizeFirstLetter} from "@/assets/js/string.js";
import {IMPORT_QUEUE_LIST_URL} from "@/assets/js/import/url.js";
import {ImportQueueItem} from "@/assets/js/types/import-types"
import Importqueue from "@/components/import/parts/importqueue.vue";
import {getCookie} from "@/assets/js/auth.js";
// TODO: after import, don't disable the upload, instead add the new item to a table at the button and then prompt the user to continue
export default {
computed: {
...mapState(["userInfo", "importQueue"]),
},
components: {Importqueue},
mixins: [authMixin],
data() {
return {
file: null,
disableUpload: false,
uploadMsg: "",
uploadProgress: 0,
loadingQueueList: false,
uploadResponse: ""
}
},
methods: {
async fetchQueueList() {
this.loadingQueueList = true
const response = await axios.get(IMPORT_QUEUE_LIST_URL)
const ourImportQueue = response.data.data.map((item) => new ImportQueueItem(item))
this.$store.commit('importQueue', ourImportQueue)
this.loadingQueueList = false
},
onFileChange(e) {
this.file = e.target.files[0]
const fileType = this.file.name.split('.').pop().toLowerCase()
if (fileType !== 'kmz' && fileType !== 'kml') {
alert('Invalid file type. Only KMZ and KML files are allowed.')
e.target.value = "" // Reset the input value
}
},
async upload() {
this.uploadProgress = 0
this.uploadMsg = ""
if (this.file == null) {
return
}
let formData = new FormData()
formData.append('file', this.file)
try {
this.disableUpload = true
const response = await axios.post('/api/data/item/import/upload', formData, {
headers: {
'Content-Type': 'multipart/form-data',
'X-CSRFToken': getCookie('csrftoken')
},
onUploadProgress: (progressEvent) => {
this.uploadProgress = Math.round((progressEvent.loaded * 100) / progressEvent.total)
if (this.uploadProgress === 100) {
this.uploadMsg = "Processing..."
}
},
})
this.uploadMsg = capitalizeFirstLetter(response.data.msg).trim(".") + "."
this.uploadResponse = `<a href="/#/import/process/${response.data.id}">Continue to Import</a>`
await this.fetchQueueList()
this.file = null
document.getElementById("uploadInput").value = ""
} catch (error) {
this.handleError(error)
}
this.disableUpload = false
},
handleError(error) {
console.error("Upload failed:", error)
if (error.response.data.msg != null) {
this.uploadMsg = error.response.data.msg
}
},
},
async created() {
},
async mounted() {
},
beforeRouteEnter(to, from, next) {
next(async vm => {
vm.file = null
vm.disableUpload = false
vm.uploadMsg = ""
vm.uploadProgress = 0
vm.uploadResponse = ""
})
},
watch: {},
}
</script>
<style scoped>
</style>

View File

@ -1,69 +0,0 @@
<template>
<div v-if="msg !== ''">
<p class="font-bold">{{ msg }}</p>
</div>
<!-- TODO: loading indicator -->
<div>
<li v-for="(item, index) in geoJsonData" :key="`item-${index}`">
<pre>
{{ parseGeoJson(item) }}
</pre>
</li>
</div>
</template>
<script>
import {mapState} from "vuex";
import {authMixin} from "@/assets/js/authMixin.js";
import axios from "axios";
import {capitalizeFirstLetter} from "@/assets/js/string.js";
// TODO: for each feature, query the DB and check if there is a duplicate. For points that's duplicate coords, for linestrings and polygons that's duplicate points
// TODO: auto-refresh if still processing
export default {
computed: {
...mapState(["userInfo"]),
},
components: {},
data() {
return {
msg: "",
geoJsonData: {},
}
},
mixins: [authMixin],
props: ['id'],
methods: {
handleError(responseMsg) {
console.log(responseMsg)
this.msg = capitalizeFirstLetter(responseMsg).trim(".") + "."
},
parseGeoJson(item) {
return item
}
},
beforeRouteEnter(to, from, next) {
next(async vm => {
axios.get('/api/data/item/import/get/' + vm.id).then(response => {
if (!response.data.success) {
vm.handleError(response.data.msg)
} else {
if (Object.keys(response.data.geojson).length > 0) {
vm.geoJsonData = response.data.geojson
}
vm.msg = response.data.msg
}
}).catch(error => {
vm.handleError(error.message)
});
})
},
};
</script>
<style scoped>
</style>

View File

@ -1,103 +0,0 @@
<template>
<div class="mb-10">
<p>import data</p>
<p>Only KML/KMZ files supported.</p>
<p>Be careful not to upload duplicate files of the opposite type. For example, do not upload both
<kbd>example.kml</kbd>
and <kbd>example.kmz</kbd>. Currently, the system can't detect duplicate cross-file types.</p>
</div>
<div class="relative w-[90%] m-auto">
<div>
<input id="uploadInput" :disabled="disableUpload" type="file" @change="onFileChange">
<button :disabled="disableUpload" @click="upload">Upload</button>
</div>
</div>
<div v-if="uploadMsg !== ''" class="w-[90%] m-auto mt-10" v-html="uploadMsg"></div>
</template>
<script>
import {mapState} from "vuex"
import {authMixin} from "@/assets/js/authMixin.js";
import axios from "axios";
import {capitalizeFirstLetter} from "@/assets/js/string.js";
// TODO: after import, don't disable the upload, instead add the new item to a table at the button and then prompt the user to continue
export default {
computed: {
...mapState(["userInfo"]),
},
components: {},
mixins: [authMixin],
data() {
return {
file: null,
disableUpload: false,
uploadMsg: "",
processQueue: []
}
},
methods: {
onFileChange(e) {
this.file = e.target.files[0]
const fileType = this.file.name.split('.').pop().toLowerCase()
if (fileType !== 'kmz' && fileType !== 'kml') {
alert('Invalid file type. Only KMZ and KML files are allowed.') // TODO: have this be a message on the page?
e.target.value = "" // Reset the input value
}
},
async upload() {
this.uploadMsg = ""
if (this.file == null) {
return
}
let formData = new FormData()
formData.append('file', this.file)
try {
this.disableUpload = true
const response = await axios.post('/api/data/item/import/upload/', formData, {
headers: {
'Content-Type': 'multipart/form-data',
'X-CSRFToken': this.userInfo.csrftoken
}
})
this.uploadMsg = `<p>${capitalizeFirstLetter(response.data.msg).trim(".")}.</p><p><a href="/#/import/process/${response.data.id}">Continue to Import</a>`
await this.fetchQueueList()
this.file = null
document.getElementById("uploadInput").value = ""
this.disableUpload = false
} catch (error) {
this.handleError(error)
}
},
handleError(error) {
console.error("Upload failed:", error)
if (error.response.data.msg != null) {
this.uploadMsg = error.response.data.msg
}
},
async fetchQueueList() {
const response = await axios.get('/api/data/item/import/get/mine')
this.processQueue = response.data.data
},
},
async created() {
},
async mounted() {
},
beforeRouteEnter(to, from, next) {
next(async vm => {
vm.file = null
vm.disableUpload = false
vm.uploadMsg = ""
await vm.fetchQueueList()
})
},
watch: {},
}
</script>
<style scoped>
</style>

View File

@ -0,0 +1,85 @@
<!DOCTYPE html>
<html style="width: 100%; height: 100%;">
<head>
<title>OpenLayers LineString Example</title>
<link href="https://openlayers.org/en/v6.5.0/css/ol.css" rel="stylesheet" type="text/css">
<script src="https://openlayers.org/en/v6.5.0/build/ol.js"></script>
</head>
<body style="width: 100%; height: 100%; margin:0">
<div id="map" style="width: 100%; height: 100%;"></div>
<script>
const geojsonData = {
"type": "FeatureCollection",
"features": []
};
const features = new ol.format.GeoJSON().readFeatures(geojsonData, {
featureProjection: 'EPSG:3857',
dataProjection: 'EPSG:4326'
});
const vectorSource = new ol.source.Vector({
features: features
});
const vectorLayer = new ol.layer.Vector({
source: vectorSource,
style: function (feature) {
const rendering = feature.get('rendering');
const geometryType = feature.getGeometry().getType();
if (geometryType === 'Point') {
return new ol.style.Style({
image: new ol.style.Circle({
radius: 6,
fill: new ol.style.Fill({
color: rendering.strokeColor
}),
stroke: new ol.style.Stroke({
color: rendering.strokeColor,
width: rendering.strokeWidth
})
})
});
} else if (geometryType === 'LineString') {
return new ol.style.Style({
stroke: new ol.style.Stroke({
color: rendering.strokeColor,
width: rendering.strokeWidth
})
});
} else if (geometryType === 'Polygon') {
return new ol.style.Style({
stroke: new ol.style.Stroke({
color: rendering.strokeColor,
width: rendering.strokeWidth
}),
fill: new ol.style.Fill({
color: rendering.fillColor
})
});
}
}
});
const map = new ol.Map({
target: 'map',
layers: [
new ol.layer.Tile({
source: new ol.source.OSM()
}),
vectorLayer
],
view: new ol.View({
center: ol.proj.fromLonLat([-104.692626, 38.881215]),
zoom: 10
})
});
</script>
<style>
.ol-attribution {
display: none;
}
</style>
</body>
</html>

View File

@ -0,0 +1,110 @@
<template>
<table class="mt-6 w-full border-collapse">
<thead>
<tr class="bg-gray-100">
<th class="px-4 py-2 text-left w-[50%]">File Name</th>
<th class="px-4 py-2 text-center">Features</th>
<th class="px-4 py-2 w-[10%]"></th>
</tr>
</thead>
<tbody>
<tr v-for="(item, index) in importQueue" :key="`item-${index}`" class="border-t">
<td class="px-4 py-2 w-[50%]">
<a :href="`/#/import/process/${item.id}`" class="text-blue-500 hover:text-blue-700">{{
item.original_filename
}}</a>
</td>
<td class="px-4 py-2 text-center">
{{ item.processing === true ? "processing" : item.feature_count }}
</td>
<td class="px-4 py-2 w-[10%]">
<button class="px-4 py-2 bg-red-500 text-white rounded hover:bg-red-600" @click="deleteItem(item, index)">
Delete
</button>
</td>
</tr>
<tr v-if="isLoading && importQueue.length === 0" class="animate-pulse border-t">
<td class="px-4 py-2 text-left w-[50%]">
<div class="w-32 h-8 bg-gray-200 rounded-s"></div>
</td>
<td class="px-4 py-2 text-center">
<div class="w-32 h-8 bg-gray-200 rounded-s mx-auto"></div>
</td>
<td class="px-4 py-2 invisible w-[10%]">
<button class="px-4 py-2 bg-red-500 text-white rounded hover:bg-red-600">
Delete
</button>
</td>
</tr>
</tbody>
</table>
</template>
<script>
import {mapState} from "vuex";
import {authMixin} from "@/assets/js/authMixin.js";
import axios from "axios";
import {IMPORT_QUEUE_LIST_URL} from "@/assets/js/import/url.js";
import {ImportQueueItem} from "@/assets/js/types/import-types";
import {getCookie} from "@/assets/js/auth.js";
export default {
computed: {
...mapState(["userInfo", "importQueue"]),
},
components: {},
mixins: [authMixin],
data() {
return {
isLoading: true,
}
},
methods: {
subscribeToRefreshMutation() {
this.$store.subscribe((mutation, state) => {
if (mutation.type === 'triggerImportQueueRefresh') {
this.refreshData();
}
});
},
async refreshData() {
console.log("IMPORT QUEUE: refreshing")
await this.fetchQueueList()
},
async fetchQueueList() {
this.isLoading = true
const response = await axios.get(IMPORT_QUEUE_LIST_URL)
const ourImportQueue = response.data.data.map((item) => new ImportQueueItem(item))
this.$store.commit('setImportQueue', ourImportQueue)
this.isLoading = false
},
async deleteItem(item, index) {
if (window.confirm(`Delete "${item.original_filename}" (#${item.id})`)) {
try {
this.importQueue.splice(index, 1);
const response = await axios.delete('/api/data/item/import/delete/' + item.id, {
headers: {
'X-CSRFToken': getCookie('csrftoken')
}
});
if (!response.data.success) {
throw new Error("server reported failure");
}
await this.refreshData(); // Refresh the data after deleting an item
} catch (error) {
alert(`Failed to delete ${item.id}: ${error.message}`);
this.importQueue.splice(index, 0, item);
}
}
},
},
async created() {
await this.fetchQueueList()
this.subscribeToRefreshMutation()
},
}
</script>
<style scoped>
</style>

View File

@ -0,0 +1,22 @@
<template>
<div role="status">
<svg aria-hidden="true" class="h-8 text-gray-200 animate-spin dark:text-gray-600 fill-blue-600 w-full"
fill="none" viewBox="0 0 100 101" xmlns="http://www.w3.org/2000/svg">
<path
d="M100 50.5908C100 78.2051 77.6142 100.591 50 100.591C22.3858 100.591 0 78.2051 0 50.5908C0 22.9766 22.3858 0.59082 50 0.59082C77.6142 0.59082 100 22.9766 100 50.5908ZM9.08144 50.5908C9.08144 73.1895 27.4013 91.5094 50 91.5094C72.5987 91.5094 90.9186 73.1895 90.9186 50.5908C90.9186 27.9921 72.5987 9.67226 50 9.67226C27.4013 9.67226 9.08144 27.9921 9.08144 50.5908Z"
fill="currentColor"/>
<path
d="M93.9676 39.0409C96.393 38.4038 97.8624 35.9116 97.0079 33.5539C95.2932 28.8227 92.871 24.3692 89.8167 20.348C85.8452 15.1192 80.8826 10.7238 75.2124 7.41289C69.5422 4.10194 63.2754 1.94025 56.7698 1.05124C51.7666 0.367541 46.6976 0.446843 41.7345 1.27873C39.2613 1.69328 37.813 4.19778 38.4501 6.62326C39.0873 9.04874 41.5694 10.4717 44.0505 10.1071C47.8511 9.54855 51.7191 9.52689 55.5402 10.0491C60.8642 10.7766 65.9928 12.5457 70.6331 15.2552C75.2735 17.9648 79.3347 21.5619 82.5849 25.841C84.9175 28.9121 86.7997 32.2913 88.1811 35.8758C89.083 38.2158 91.5421 39.6781 93.9676 39.0409Z"
fill="currentFill"/>
</svg>
<span class="sr-only">Loading...</span>
</div>
</template>
<script setup>
</script>
<style scoped>
</style>

View File

@ -19,12 +19,12 @@ const routes = [
{
path: '/import/upload',
name: 'Import Data',
component: () => import('./components/import/Upload.vue'),
component: () => import('./components/import/ImportUpload.vue'),
},
{
path: '/import/process/:id',
name: 'Process Data',
component: () => import('./components/import/Process.vue'),
component: () => import('./components/import/ImportProcess.vue'),
props: true
}
]

View File

@ -4,6 +4,8 @@ export default {
theme: {
extend: {},
},
plugins: [],
plugins: [
require('@tailwindcss/typography'),
],
}

View File

@ -22,6 +22,11 @@ export default defineConfig({
changeOrigin: true,
secure: false,
},
'/account': {
target: 'http://127.0.0.1:8000',
changeOrigin: true,
secure: false,
},
},
},
})