improve import logging, get django and npm dev working well, fix login url

This commit is contained in:
Cyberes 2024-09-28 14:34:42 -06:00
parent 4be332aff6
commit 82fbcea183
8 changed files with 74 additions and 36 deletions

View File

@ -9,7 +9,7 @@ from psycopg2.extras import RealDictCursor
from geo_lib.daemon.database.connection import CursorFromConnectionFromPool
from geo_lib.daemon.database.locking import DBLockManager
from geo_lib.daemon.workers.workers_lib.importer.kml import kml_to_geojson
from geo_lib.daemon.workers.workers_lib.importer.logging import create_import_log_msg
from geo_lib.daemon.workers.workers_lib.importer.logging import ImportLog
from geo_lib.logging.database import log_to_db, DatabaseLogLevel, DatabaseLogSource
from geo_lib.time import get_time_ms
from geo_lib.types.feature import geojson_to_geofeature
@ -42,12 +42,13 @@ def import_worker():
start = get_time_ms()
success = False
geofetures = []
messages = []
import_log = ImportLog()
import_log.add('Processing start')
try:
geojson_data, kml_conv_messages = kml_to_geojson(item['raw_kml'])
messages.extend(kml_conv_messages)
import_log.extend(kml_conv_messages)
geofetures, typing_messages = geojson_to_geofeature(geojson_data)
messages.extend(typing_messages)
import_log.extend(typing_messages)
success = True
except Exception as e:
err_name = e.__class__.__name__
@ -55,15 +56,17 @@ def import_worker():
if hasattr(e, 'message'):
err_msg = e.message
msg = f'Failed to import item #{item["id"]} "{item["original_filename"]}", encountered {err_name}. {err_msg}'
messages.append(create_import_log_msg(f'{err_name}: {err_msg}'))
import_log.add(f'{err_name}: {err_msg}')
log_to_db(msg, level=DatabaseLogLevel.ERROR, user_id=item['user_id'], source=DatabaseLogSource.IMPORT)
traceback.print_exc()
features = [] # dummy data
if success:
features = [json.loads(x.model_dump_json()) for x in geofetures]
time.sleep(1)
import_log.add(f'Processing finished {"un" if not success else ""}successfully')
with CursorFromConnectionFromPool(cursor_factory=RealDictCursor) as cursor:
data = json.dumps(features)
cursor.execute(_SQL_INSERT_PROCESSED_ITEM, (data, json.dumps(messages), item['id']))
cursor.execute(_SQL_INSERT_PROCESSED_ITEM, (data, import_log.json(), item['id']))
lock_manager.unlock_row('data_importqueue', item['id'])
_logger.info(f'IMPORT: processed #{item["id"]} in {round((get_time_ms() - start) / 1000, 2)} seconds -- {worker_id}')

View File

@ -8,7 +8,7 @@ import kml2geojson
from dateparser import parse
from geojson import Point, LineString, Polygon, FeatureCollection
from geo_lib.daemon.workers.workers_lib.importer.logging import create_import_log_msg
from geo_lib.daemon.workers.workers_lib.importer.logging import ImportLog
from geo_lib.types.geojson import GeojsonRawProperty
@ -26,23 +26,21 @@ def kmz_to_kml(kml_bytes: Union[str, bytes]) -> str:
return kml_bytes.decode('utf-8')
def kml_to_geojson(kml_bytes) -> Tuple[dict, list]:
def kml_to_geojson(kml_bytes) -> Tuple[dict, ImportLog]:
# TODO: preserve KML object styling, such as color and opacity
doc = kmz_to_kml(kml_bytes)
converted_kml = kml2geojson.main.convert(io.BytesIO(doc.encode('utf-8')))
features, messages = process_feature(converted_kml)
features, import_log = process_feature(converted_kml)
data = {
'type': 'FeatureCollection',
'features': features
}
return load_geojson_type(data), messages
return load_geojson_type(data), import_log
def process_feature(converted_kml):
def process_feature(converted_kml) -> Tuple[list, ImportLog]:
features = []
messages = []
import_log = ImportLog()
for feature in converted_kml[0]['features']:
if feature['geometry']['type'] in ['Point', 'LineString', 'Polygon']:
if feature['properties'].get('times'):
@ -53,8 +51,8 @@ def process_feature(converted_kml):
features.append(feature)
else:
# Log the error
messages.append(create_import_log_msg(f'Feature type {feature["properties"]["type"]} not supported'))
return features, messages
import_log.add(f'Feature type {feature["properties"]["type"]} not supported')
return features, import_log
def load_geojson_type(data: dict) -> dict:

View File

@ -1,5 +1,29 @@
from datetime import datetime
import datetime
import json
from typing import List, Optional
from pydantic import BaseModel
def create_import_log_msg(msg: str):
return datetime.now().isoformat(), msg
class ImportLogMsg(BaseModel):
timestamp: Optional[str] = datetime.datetime.now(datetime.timezone.utc).isoformat()
msg: str
class ImportLog:
def __init__(self):
self._messages: List[ImportLogMsg] = []
def add(self, msg: str):
assert isinstance(msg, str)
self._messages.append(ImportLogMsg(msg=msg))
def extend(self, msgs: 'ImportLog'):
for msg in msgs.get():
self._messages.append(msg)
def get(self) -> List[ImportLogMsg]:
return self._messages.copy()
def json(self) -> str:
return json.dumps([x.model_dump() for x in self._messages])

View File

@ -5,7 +5,7 @@ from typing import Optional, List, Union, Tuple
import pytz
from pydantic import Field, BaseModel
from geo_lib.daemon.workers.workers_lib.importer.logging import create_import_log_msg
from geo_lib.daemon.workers.workers_lib.importer.logging import ImportLog
from geo_lib.geo_backend import SOFTWARE_NAME, SOFTWARE_VERSION
@ -53,9 +53,9 @@ class GeoPolygon(GeoFeature):
GeoFeatureSupported = Union[GeoPoint, GeoLineString, GeoPolygon]
def geojson_to_geofeature(geojson: dict) -> Tuple[List[GeoFeatureSupported], List[str]]:
def geojson_to_geofeature(geojson: dict) -> Tuple[List[GeoFeatureSupported], ImportLog]:
result = []
log = []
import_log = ImportLog()
for item in geojson['features']:
match item['geometry']['type'].lower():
case 'point':
@ -65,7 +65,7 @@ def geojson_to_geofeature(geojson: dict) -> Tuple[List[GeoFeatureSupported], Lis
case 'polygon':
c = GeoPolygon
case _:
log.append(create_import_log_msg(f'Feature named "{item["properties"]["title"]}" had unsupported type "{item["geometry"]["type"]}".'))
import_log.add(f'Feature named "{item["properties"]["title"]}" had unsupported type "{item["geometry"]["type"]}".')
continue
result.append(c(
name=item['properties']['title'],
@ -75,4 +75,4 @@ def geojson_to_geofeature(geojson: dict) -> Tuple[List[GeoFeatureSupported], Lis
geometry=item['geometry']['coordinates']
))
return result, log
return result, import_log

View File

@ -135,3 +135,7 @@ STATICFILES_DIRS = [
]
APPEND_SLASH = True
LOGIN_URL = '/account/login'
CSRF_TRUSTED_ORIGINS = ['http://localhost:5173']

View File

@ -22,7 +22,7 @@ from website.views import index
urlpatterns = [
path('', index),
re_path(r"^website/", include("django.contrib.auth.urls")),
re_path(r"^account/", include("django.contrib.auth.urls")),
path('admin/', admin.site.urls),
path('', include("users.urls")),
path('api/data/', include("data.urls"))

View File

@ -11,18 +11,19 @@
</div>
<div id="importLog"
class="w-full my-10 mx-auto overflow-auto h-32 bg-white shadow rounded-lg p-4">
<div id="importLog" class="w-full my-10 mx-auto bg-white shadow rounded-lg p-4">
<h2 class="text-lg font-semibold text-gray-700 mb-2">Logs</h2>
<hr class="mb-4 border-t border-gray-200">
<ul class="space-y-2">
<li v-for="(item, index) in workerLog" :key="`item-${index}`" class="border-b border-gray-200 last:border-b-0">
<p class="text-sm">{{ item }}</p>
</li>
</ul>
<div class="h-32 overflow-auto">
<ul class="space-y-2">
<li v-for="(item, index) in workerLog" :key="`logitem-${index}`"
class="border-b border-gray-200 last:border-b-0">
<p class="text-sm">{{ item.timestamp }} - {{ item.msg }}</p>
</li>
</ul>
</div>
</div>
<Loader v-if="originalFilename == null"/>
@ -204,6 +205,7 @@ export default {
}
,
beforeRouteEnter(to, from, next) {
const now = new Date().toISOString()
let ready = false
next(async vm => {
if (vm.currentId !== vm.id) {
@ -227,11 +229,13 @@ export default {
vm.originalItems = JSON.parse(JSON.stringify(vm.itemsForUser))
}
if (!response.data.processing) {
vm.workerLog.push(response.data.msg)
vm.workerLog.concat(response.data.log)
vm.workerLog = vm.workerLog.concat(response.data.log)
if (response.data.msg != null && response.data.msg.length > 0) {
vm.workerLog.push({timestamp: now, msg: response.data.msg})
}
ready = true
} else {
vm.workerLog = [`${new Date().toISOString()} -- uploaded data still processing`]
vm.workerLog = [{timestamp: now, msg: "uploaded data still processing"}]
await new Promise(r => setTimeout(r, 1000));
}
}

View File

@ -22,6 +22,11 @@ export default defineConfig({
changeOrigin: true,
secure: false,
},
'/account': {
target: 'http://127.0.0.1:8000',
changeOrigin: true,
secure: false,
},
},
},
})