Merge pull request 'Merge development into master' (#1) from development into master
Reviewed-on: http://git.masasana.ai/StadtMG/DrohnenKI/pulls/1
This commit is contained in:
commit
97c580a933
21
README.md
21
README.md
|
|
@ -24,6 +24,27 @@ auszuführen.
|
|||
|
||||
Dieser Abschnitt geht davon aus, dass die Umgebung korrekt installiert ist.
|
||||
|
||||
### Vorverarbeitung der GIS-Daten
|
||||
Da die Daten aus dem stadteigenen GIS nicht direkt von der KI erstanden werden, benötigt es einer Vorverarbeitung.
|
||||
Diese passiert in der Datei [`belag.py`](preprocessing/belag.py).
|
||||
|
||||
Voraussetzung hierfür ist, dass eine Datei `Fleachenbelaege.json` und eine `Referenzpunkte.csv` vorhanden sind.
|
||||
Sollten die Namen oder Pfade abweichen, kann dies in der Datei `belag.py` angepasst werden.
|
||||
|
||||
In der Datei `Flaechenbelaege.json` werden Flächendefinitionen im [GeoJSON](https://geojson.org/)-Format erwartet.
|
||||
Diese Datei enthält alle Flächen, die zum Training verwendet werden sollen.
|
||||
|
||||
Die Datei `Referenzpunkte.csv` gibt in den ersten 4 Reihen die Eckpunkte des zu verarbeitenden Bildes aus.
|
||||
Diese werden verwendet, um die Koordinaten aus der Flächenbeschreibung den Pixel-Koordinaten im Bild zuzuweisen.
|
||||
|
||||
Sobald alle Dateien vorhanden sind, kann das Skript mit folgendem Befehl ausgeführt werden:
|
||||
```shell
|
||||
python belag.py
|
||||
```
|
||||
|
||||
Das Resultat daraus ist zum einen ein Ordner mit allen 1000x1000 Pixel großen Bildern und zum anderen eine Datei `belaege.json`.
|
||||
Diese beiden Dateien können anschließend zum Training genutzt werden.
|
||||
|
||||
### Erkennung
|
||||
|
||||
Zur Erkennung einzelner Bilder kann die Datei `predict.py` verwendet werden.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,72 @@
|
|||
.travis.yaml
|
||||
.openapi-generator-ignore
|
||||
README.md
|
||||
tox.ini
|
||||
git_push.sh
|
||||
test-requirements.txt
|
||||
setup.py
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
venv/
|
||||
.python-version
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
#Ipython Notebook
|
||||
.ipynb_checkpoints
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
venv/
|
||||
.venv/
|
||||
.python-version
|
||||
.pytest_cache
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
#Ipython Notebook
|
||||
.ipynb_checkpoints
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# OpenAPI Generator Ignore
|
||||
# Generated by openapi-generator https://github.com/openapitools/openapi-generator
|
||||
|
||||
# Use this file to prevent files from being overwritten by the generator.
|
||||
# The patterns follow closely to .gitignore or .dockerignore.
|
||||
|
||||
# As an example, the C# client generator defines ApiClient.cs.
|
||||
# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
|
||||
#ApiClient.cs
|
||||
|
||||
# You can match any string of characters against a directory, file or extension with a single asterisk (*):
|
||||
#foo/*/qux
|
||||
# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
|
||||
|
||||
# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
|
||||
#foo/**/qux
|
||||
# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
|
||||
|
||||
# You can also negate patterns with an exclamation (!).
|
||||
# For example, you can ignore all files in a docs folder with the file extension .md:
|
||||
#docs/*.md
|
||||
# Then explicitly reverse the ignore rule for a single file:
|
||||
#!docs/README.md
|
||||
|
||||
|
||||
openapi_server/controllers/drohne_controller.py
|
||||
requirements.txt
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
.dockerignore
|
||||
.gitignore
|
||||
.travis.yml
|
||||
Dockerfile
|
||||
README.md
|
||||
git_push.sh
|
||||
openapi_server/__init__.py
|
||||
openapi_server/__main__.py
|
||||
openapi_server/controllers/__init__.py
|
||||
openapi_server/controllers/security_controller_.py
|
||||
openapi_server/encoder.py
|
||||
openapi_server/models/__init__.py
|
||||
openapi_server/models/base_model_.py
|
||||
openapi_server/openapi/openapi.yaml
|
||||
openapi_server/test/__init__.py
|
||||
openapi_server/typing_utils.py
|
||||
openapi_server/util.py
|
||||
requirements.txt
|
||||
setup.py
|
||||
test-requirements.txt
|
||||
tox.ini
|
||||
|
|
@ -0,0 +1 @@
|
|||
5.0.0
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# ref: https://docs.travis-ci.com/user/languages/python
|
||||
language: python
|
||||
python:
|
||||
- "3.2"
|
||||
- "3.3"
|
||||
- "3.4"
|
||||
- "3.5"
|
||||
- "3.6"
|
||||
- "3.7"
|
||||
- "3.8"
|
||||
# command to install dependencies
|
||||
install: "pip install -r requirements.txt"
|
||||
# command to run tests
|
||||
script: nosetests
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
FROM python:3-alpine
|
||||
|
||||
RUN mkdir -p /usr/src/app
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY requirements.txt /usr/src/app/
|
||||
|
||||
RUN pip3 install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . /usr/src/app
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["python3"]
|
||||
|
||||
CMD ["-m", "openapi_server"]
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
# OpenAPI generated server
|
||||
|
||||
## Overview
|
||||
This server was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the
|
||||
[OpenAPI-Spec](https://openapis.org) from a remote server, you can easily generate a server stub. This
|
||||
is an example of building a OpenAPI-enabled Flask server.
|
||||
|
||||
This example uses the [Connexion](https://github.com/zalando/connexion) library on top of Flask.
|
||||
|
||||
## Requirements
|
||||
Python 3.5.2+
|
||||
|
||||
## Usage
|
||||
To run the server, please execute the following from the root directory:
|
||||
|
||||
```
|
||||
pip3 install -r requirements.txt
|
||||
python3 -m openapi_server
|
||||
```
|
||||
|
||||
and open your browser to here:
|
||||
|
||||
```
|
||||
http://localhost:8080/ui/
|
||||
```
|
||||
|
||||
Your OpenAPI definition lives here:
|
||||
|
||||
```
|
||||
http://localhost:8080/openapi.json
|
||||
```
|
||||
|
||||
To launch the integration tests, use tox:
|
||||
```
|
||||
sudo pip install tox
|
||||
tox
|
||||
```
|
||||
|
||||
## Running with Docker
|
||||
|
||||
To run the server on a Docker container, please execute the following from the root directory:
|
||||
|
||||
```bash
|
||||
# building the image
|
||||
docker build -t openapi_server .
|
||||
|
||||
# starting up a container
|
||||
docker run -p 8080:8080 openapi_server
|
||||
```
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
#!/bin/sh
|
||||
# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/
|
||||
#
|
||||
# Usage example: /bin/sh ./git_push.sh wing328 openapi-pestore-perl "minor update" "gitlab.com"
|
||||
|
||||
git_user_id=$1
|
||||
git_repo_id=$2
|
||||
release_note=$3
|
||||
git_host=$4
|
||||
|
||||
if [ "$git_host" = "" ]; then
|
||||
git_host="github.com"
|
||||
echo "[INFO] No command line input provided. Set \$git_host to $git_host"
|
||||
fi
|
||||
|
||||
if [ "$git_user_id" = "" ]; then
|
||||
git_user_id="GIT_USER_ID"
|
||||
echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id"
|
||||
fi
|
||||
|
||||
if [ "$git_repo_id" = "" ]; then
|
||||
git_repo_id="GIT_REPO_ID"
|
||||
echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id"
|
||||
fi
|
||||
|
||||
if [ "$release_note" = "" ]; then
|
||||
release_note="Minor update"
|
||||
echo "[INFO] No command line input provided. Set \$release_note to $release_note"
|
||||
fi
|
||||
|
||||
# Initialize the local directory as a Git repository
|
||||
git init
|
||||
|
||||
# Adds the files in the local repository and stages them for commit.
|
||||
git add .
|
||||
|
||||
# Commits the tracked changes and prepares them to be pushed to a remote repository.
|
||||
git commit -m "$release_note"
|
||||
|
||||
# Sets the new remote
|
||||
git_remote=`git remote`
|
||||
if [ "$git_remote" = "" ]; then # git remote not defined
|
||||
|
||||
if [ "$GIT_TOKEN" = "" ]; then
|
||||
echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment."
|
||||
git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git
|
||||
else
|
||||
git remote add origin https://${git_user_id}:${GIT_TOKEN}@${git_host}/${git_user_id}/${git_repo_id}.git
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
git pull origin master
|
||||
|
||||
# Pushes (Forces) the changes in the local repository up to the remote repository
|
||||
echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git"
|
||||
git push origin master 2>&1 | grep -v 'To https'
|
||||
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import connexion
|
||||
|
||||
from openapi_server import encoder
|
||||
|
||||
|
||||
def main():
|
||||
app = connexion.App(__name__, specification_dir='./openapi/')
|
||||
app.app.json_encoder = encoder.JSONEncoder
|
||||
app.add_api('openapi.yaml',
|
||||
arguments={'title': 'Stadt MG - Drohne'},
|
||||
pythonic_params=True)
|
||||
app.run(port=8080)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,112 @@
|
|||
import logging
|
||||
from uuid import uuid4
|
||||
|
||||
import connexion
|
||||
import six
|
||||
|
||||
from openapi_server import util
|
||||
from pymongo import MongoClient
|
||||
from flask import Response
|
||||
|
||||
from kubernetes import client, config
|
||||
|
||||
logging.basicConfig(format='[%(asctime)s] [%(levelname)s] [%(name)s] %(message)s')
|
||||
logger = logging.getLogger("API")
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.info("Hello there")
|
||||
|
||||
config.load_incluster_config() # or config.load_kube_config()
|
||||
collection = MongoClient("mongo").get_database("stadtmg").get_collection("predictions")
|
||||
|
||||
with client.ApiClient() as api_client:
|
||||
app_api_instance = client.AppsV1Api(api_client)
|
||||
core_api_instance = client.CoreV1Api(api_client)
|
||||
|
||||
|
||||
def detect_post(body=None): # noqa: E501
|
||||
"""detect_post
|
||||
|
||||
# noqa: E501
|
||||
|
||||
:param body:
|
||||
:type body: str
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
image_id = str(uuid4())
|
||||
logger.debug(f"Processing image '{image_id}'")
|
||||
while collection.find_one({"id": image_id}) is not None:
|
||||
image_id = str(uuid4())
|
||||
|
||||
collection.insert_one({
|
||||
"id": image_id,
|
||||
"input": body,
|
||||
})
|
||||
|
||||
job_name = f"bodenerkennung-{image_id}"
|
||||
metadata = client.V1ObjectMeta(
|
||||
name=job_name,
|
||||
labels={
|
||||
"io.kompose.service": job_name,
|
||||
},
|
||||
namespace="stadtmg",
|
||||
)
|
||||
spec = client.V1JobSpec(
|
||||
backoff_limit=0,
|
||||
ttl_seconds_after_finished=500,
|
||||
template=dict(
|
||||
spec=dict(
|
||||
containers=[
|
||||
dict(
|
||||
name="bodenerkennung",
|
||||
image="masasana.azurecr.io/stadt_mg_bodenerkennung:1.1.1",
|
||||
imagePullPolicy="Always",
|
||||
command=["python", "predict.py"],
|
||||
args=[
|
||||
"--source", "mongo://mongo",
|
||||
"--image_id", image_id,
|
||||
"--category_json", "",
|
||||
]
|
||||
)
|
||||
],
|
||||
imagePullSecrets=[{"name": "acr-secret"}],
|
||||
restartPolicy="Never",
|
||||
)
|
||||
),
|
||||
)
|
||||
logger.debug(metadata)
|
||||
logger.debug(spec)
|
||||
|
||||
job = client.V1Job(
|
||||
api_version="batch/v1",
|
||||
kind="Job",
|
||||
metadata=metadata,
|
||||
spec=spec,
|
||||
)
|
||||
logger.debug(job)
|
||||
|
||||
batch_api = client.BatchV1Api()
|
||||
batch_api.create_namespaced_job("stadtmg", job)
|
||||
|
||||
return Response(image_id, status=200)
|
||||
|
||||
|
||||
def image_image_id_get(image_id): # noqa: E501
|
||||
"""image_image_id_get
|
||||
|
||||
# noqa: E501
|
||||
|
||||
:param image_id:
|
||||
:type image_id:
|
||||
|
||||
:rtype: file
|
||||
"""
|
||||
db_object = collection.find_one({"id": image_id})
|
||||
if db_object is None:
|
||||
return Response(f"Image with id '{image_id}' not found", status=404)
|
||||
|
||||
image = db_object.get("output")
|
||||
if image is None:
|
||||
return Response(status=204)
|
||||
|
||||
return Response(image, status=200, mimetype="image/png")
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
from typing import List
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
from connexion.apps.flask_app import FlaskJSONEncoder
|
||||
import six
|
||||
|
||||
from openapi_server.models.base_model_ import Model
|
||||
|
||||
|
||||
class JSONEncoder(FlaskJSONEncoder):
|
||||
include_nulls = False
|
||||
|
||||
def default(self, o):
|
||||
if isinstance(o, Model):
|
||||
dikt = {}
|
||||
for attr, _ in six.iteritems(o.openapi_types):
|
||||
value = getattr(o, attr)
|
||||
if value is None and not self.include_nulls:
|
||||
continue
|
||||
attr = o.attribute_map[attr]
|
||||
dikt[attr] = value
|
||||
return dikt
|
||||
return FlaskJSONEncoder.default(self, o)
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# coding: utf-8
|
||||
|
||||
# flake8: noqa
|
||||
from __future__ import absolute_import
|
||||
# import models into model package
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
import pprint
|
||||
|
||||
import six
|
||||
import typing
|
||||
|
||||
from openapi_server import util
|
||||
|
||||
T = typing.TypeVar('T')
|
||||
|
||||
|
||||
class Model(object):
|
||||
# openapiTypes: The key is attribute name and the
|
||||
# value is attribute type.
|
||||
openapi_types = {}
|
||||
|
||||
# attributeMap: The key is attribute name and the
|
||||
# value is json key in definition.
|
||||
attribute_map = {}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls: typing.Type[T], dikt) -> T:
|
||||
"""Returns the dict as a model"""
|
||||
return util.deserialize_model(dikt, cls)
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns the model properties as a dict
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
result = {}
|
||||
|
||||
for attr, _ in six.iteritems(self.openapi_types):
|
||||
value = getattr(self, attr)
|
||||
if isinstance(value, list):
|
||||
result[attr] = list(map(
|
||||
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
||||
value
|
||||
))
|
||||
elif hasattr(value, "to_dict"):
|
||||
result[attr] = value.to_dict()
|
||||
elif isinstance(value, dict):
|
||||
result[attr] = dict(map(
|
||||
lambda item: (item[0], item[1].to_dict())
|
||||
if hasattr(item[1], "to_dict") else item,
|
||||
value.items()
|
||||
))
|
||||
else:
|
||||
result[attr] = value
|
||||
|
||||
return result
|
||||
|
||||
def to_str(self):
|
||||
"""Returns the string representation of the model
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
return pprint.pformat(self.to_dict())
|
||||
|
||||
def __repr__(self):
|
||||
"""For `print` and `pprint`"""
|
||||
return self.to_str()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Returns true if both objects are equal"""
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
def __ne__(self, other):
|
||||
"""Returns true if both objects are not equal"""
|
||||
return not self == other
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
openapi: 3.0.3
|
||||
info:
|
||||
description: Stadt MG - Drohne
|
||||
title: Stadt MG - Drohne
|
||||
version: 1.0.0
|
||||
servers:
|
||||
- url: https://drohne.masasana.ai
|
||||
tags:
|
||||
- name: Drohne
|
||||
paths:
|
||||
/detect:
|
||||
post:
|
||||
operationId: detect_post
|
||||
requestBody:
|
||||
content:
|
||||
image/*:
|
||||
schema:
|
||||
format: binary
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
format: uuid
|
||||
type: string
|
||||
description: ok
|
||||
tags:
|
||||
- Drohne
|
||||
x-openapi-router-controller: openapi_server.controllers.drohne_controller
|
||||
/image/{image_id}:
|
||||
get:
|
||||
operationId: image_image_id_get
|
||||
parameters:
|
||||
- explode: false
|
||||
in: path
|
||||
name: image_id
|
||||
required: true
|
||||
schema:
|
||||
format: uuid
|
||||
type: string
|
||||
style: simple
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
image/*:
|
||||
schema:
|
||||
format: binary
|
||||
type: string
|
||||
description: ok
|
||||
"204":
|
||||
description: The image is still in processing and no content can be provided
|
||||
just yet.
|
||||
"404":
|
||||
description: This id doesn't exist
|
||||
tags:
|
||||
- Drohne
|
||||
x-openapi-router-controller: openapi_server.controllers.drohne_controller
|
||||
components:
|
||||
schemas: {}
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
import logging
|
||||
|
||||
import connexion
|
||||
from flask_testing import TestCase
|
||||
|
||||
from openapi_server.encoder import JSONEncoder
|
||||
|
||||
|
||||
class BaseTestCase(TestCase):
|
||||
|
||||
def create_app(self):
|
||||
logging.getLogger('connexion.operation').setLevel('ERROR')
|
||||
app = connexion.App(__name__, specification_dir='../openapi/')
|
||||
app.app.json_encoder = JSONEncoder
|
||||
app.add_api('openapi.yaml', pythonic_params=True)
|
||||
return app.app
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
import unittest
|
||||
|
||||
from flask import json
|
||||
from six import BytesIO
|
||||
|
||||
from openapi_server.test import BaseTestCase
|
||||
|
||||
|
||||
class TestDrohneController(BaseTestCase):
|
||||
"""DrohneController integration test stubs"""
|
||||
|
||||
@unittest.skip("image/* not supported by Connexion")
|
||||
def test_detect_post(self):
|
||||
"""Test case for detect_post
|
||||
|
||||
|
||||
"""
|
||||
body = (BytesIO(b'some file data'), 'file.txt')
|
||||
headers = {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'image/*',
|
||||
}
|
||||
response = self.client.open(
|
||||
'/detect',
|
||||
method='POST',
|
||||
headers=headers,
|
||||
data=json.dumps(body),
|
||||
content_type='image/*')
|
||||
self.assert200(response,
|
||||
'Response body is : ' + response.data.decode('utf-8'))
|
||||
|
||||
def test_image_image_id_get(self):
|
||||
"""Test case for image_image_id_get
|
||||
|
||||
|
||||
"""
|
||||
headers = {
|
||||
'Accept': 'image/*',
|
||||
}
|
||||
response = self.client.open(
|
||||
'/image/<image_id>'.format(image_id='image_id_example'),
|
||||
method='GET',
|
||||
headers=headers)
|
||||
self.assert200(response,
|
||||
'Response body is : ' + response.data.decode('utf-8'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
# coding: utf-8
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 7):
|
||||
import typing
|
||||
|
||||
def is_generic(klass):
|
||||
""" Determine whether klass is a generic class """
|
||||
return type(klass) == typing.GenericMeta
|
||||
|
||||
def is_dict(klass):
|
||||
""" Determine whether klass is a Dict """
|
||||
return klass.__extra__ == dict
|
||||
|
||||
def is_list(klass):
|
||||
""" Determine whether klass is a List """
|
||||
return klass.__extra__ == list
|
||||
|
||||
else:
|
||||
|
||||
def is_generic(klass):
|
||||
""" Determine whether klass is a generic class """
|
||||
return hasattr(klass, '__origin__')
|
||||
|
||||
def is_dict(klass):
|
||||
""" Determine whether klass is a Dict """
|
||||
return klass.__origin__ == dict
|
||||
|
||||
def is_list(klass):
|
||||
""" Determine whether klass is a List """
|
||||
return klass.__origin__ == list
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
import datetime
|
||||
|
||||
import six
|
||||
import typing
|
||||
from openapi_server import typing_utils
|
||||
|
||||
|
||||
def _deserialize(data, klass):
|
||||
"""Deserializes dict, list, str into an object.
|
||||
|
||||
:param data: dict, list or str.
|
||||
:param klass: class literal, or string of class name.
|
||||
|
||||
:return: object.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
if klass in six.integer_types or klass in (float, str, bool, bytearray):
|
||||
return _deserialize_primitive(data, klass)
|
||||
elif klass == object:
|
||||
return _deserialize_object(data)
|
||||
elif klass == datetime.date:
|
||||
return deserialize_date(data)
|
||||
elif klass == datetime.datetime:
|
||||
return deserialize_datetime(data)
|
||||
elif typing_utils.is_generic(klass):
|
||||
if typing_utils.is_list(klass):
|
||||
return _deserialize_list(data, klass.__args__[0])
|
||||
if typing_utils.is_dict(klass):
|
||||
return _deserialize_dict(data, klass.__args__[1])
|
||||
else:
|
||||
return deserialize_model(data, klass)
|
||||
|
||||
|
||||
def _deserialize_primitive(data, klass):
|
||||
"""Deserializes to primitive type.
|
||||
|
||||
:param data: data to deserialize.
|
||||
:param klass: class literal.
|
||||
|
||||
:return: int, long, float, str, bool.
|
||||
:rtype: int | long | float | str | bool
|
||||
"""
|
||||
try:
|
||||
value = klass(data)
|
||||
except UnicodeEncodeError:
|
||||
value = six.u(data)
|
||||
except TypeError:
|
||||
value = data
|
||||
return value
|
||||
|
||||
|
||||
def _deserialize_object(value):
|
||||
"""Return an original value.
|
||||
|
||||
:return: object.
|
||||
"""
|
||||
return value
|
||||
|
||||
|
||||
def deserialize_date(string):
|
||||
"""Deserializes string to date.
|
||||
|
||||
:param string: str.
|
||||
:type string: str
|
||||
:return: date.
|
||||
:rtype: date
|
||||
"""
|
||||
try:
|
||||
from dateutil.parser import parse
|
||||
return parse(string).date()
|
||||
except ImportError:
|
||||
return string
|
||||
|
||||
|
||||
def deserialize_datetime(string):
|
||||
"""Deserializes string to datetime.
|
||||
|
||||
The string should be in iso8601 datetime format.
|
||||
|
||||
:param string: str.
|
||||
:type string: str
|
||||
:return: datetime.
|
||||
:rtype: datetime
|
||||
"""
|
||||
try:
|
||||
from dateutil.parser import parse
|
||||
return parse(string)
|
||||
except ImportError:
|
||||
return string
|
||||
|
||||
|
||||
def deserialize_model(data, klass):
|
||||
"""Deserializes list or dict to model.
|
||||
|
||||
:param data: dict, list.
|
||||
:type data: dict | list
|
||||
:param klass: class literal.
|
||||
:return: model object.
|
||||
"""
|
||||
instance = klass()
|
||||
|
||||
if not instance.openapi_types:
|
||||
return data
|
||||
|
||||
for attr, attr_type in six.iteritems(instance.openapi_types):
|
||||
if data is not None \
|
||||
and instance.attribute_map[attr] in data \
|
||||
and isinstance(data, (list, dict)):
|
||||
value = data[instance.attribute_map[attr]]
|
||||
setattr(instance, attr, _deserialize(value, attr_type))
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
def _deserialize_list(data, boxed_type):
|
||||
"""Deserializes a list and its elements.
|
||||
|
||||
:param data: list to deserialize.
|
||||
:type data: list
|
||||
:param boxed_type: class literal.
|
||||
|
||||
:return: deserialized list.
|
||||
:rtype: list
|
||||
"""
|
||||
return [_deserialize(sub_data, boxed_type)
|
||||
for sub_data in data]
|
||||
|
||||
|
||||
def _deserialize_dict(data, boxed_type):
|
||||
"""Deserializes a dict and its elements.
|
||||
|
||||
:param data: dict to deserialize.
|
||||
:type data: dict
|
||||
:param boxed_type: class literal.
|
||||
|
||||
:return: deserialized dict.
|
||||
:rtype: dict
|
||||
"""
|
||||
return {k: _deserialize(v, boxed_type)
|
||||
for k, v in six.iteritems(data)}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
connexion[swagger-ui] >= 2.6.0; python_version>="3.6"
|
||||
# 2.3 is the last version that supports python 3.4-3.5
|
||||
connexion[swagger-ui] <= 2.3.0; python_version=="3.5" or python_version=="3.4"
|
||||
# connexion requires werkzeug but connexion < 2.4.0 does not install werkzeug
|
||||
# we must peg werkzeug versions below to fix connexion
|
||||
# https://github.com/zalando/connexion/pull/1044
|
||||
werkzeug == 0.16.1; python_version=="3.5" or python_version=="3.4"
|
||||
swagger-ui-bundle >= 0.0.2
|
||||
python_dateutil >= 2.6.0
|
||||
setuptools >= 21.0.0
|
||||
|
||||
|
||||
pymongo==4.2.0
|
||||
kubernetes==24.2.0
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# coding: utf-8
|
||||
|
||||
import sys
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
NAME = "openapi_server"
|
||||
VERSION = "1.0.0"
|
||||
|
||||
# To install the library, run the following
|
||||
#
|
||||
# python setup.py install
|
||||
#
|
||||
# prerequisite: setuptools
|
||||
# http://pypi.python.org/pypi/setuptools
|
||||
|
||||
REQUIRES = [
|
||||
"connexion>=2.0.2",
|
||||
"swagger-ui-bundle>=0.0.2",
|
||||
"python_dateutil>=2.6.0"
|
||||
]
|
||||
|
||||
setup(
|
||||
name=NAME,
|
||||
version=VERSION,
|
||||
description="Stadt MG - Drohne",
|
||||
author_email="",
|
||||
url="",
|
||||
keywords=["OpenAPI", "Stadt MG - Drohne"],
|
||||
install_requires=REQUIRES,
|
||||
packages=find_packages(),
|
||||
package_data={'': ['openapi/openapi.yaml']},
|
||||
include_package_data=True,
|
||||
entry_points={
|
||||
'console_scripts': ['openapi_server=openapi_server.__main__:main']},
|
||||
long_description="""\
|
||||
Stadt MG - Drohne
|
||||
"""
|
||||
)
|
||||
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
pytest~=4.6.7 # needed for python 2.7+3.4
|
||||
pytest-cov>=2.8.1
|
||||
pytest-randomly==1.2.3 # needed for python 2.7+3.4
|
||||
Flask-Testing==0.8.0
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
[tox]
|
||||
envlist = py3
|
||||
skipsdist=True
|
||||
|
||||
[testenv]
|
||||
deps=-r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
{toxinidir}
|
||||
|
||||
commands=
|
||||
pytest --cov=openapi_server
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: api
|
||||
namespace: stadtmg
|
||||
labels:
|
||||
io.kompose.service: detection_api
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
io.kompose.service: detection_api
|
||||
replicas: 1
|
||||
strategy: {}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
io.kompose.service: detection_api
|
||||
spec:
|
||||
containers:
|
||||
- name: api
|
||||
image: masasana.azurecr.io/stadt_mg_detection_api
|
||||
resources: {}
|
||||
restartPolicy: Always
|
||||
imagePullSecrets:
|
||||
- name: acr-secret
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: "nginx"
|
||||
nginx.ingress.kubernetes.io/rewrite-target: /
|
||||
cert-manager.io/cluster-issuer: "letsencrypt-staging"
|
||||
name: api
|
||||
namespace: stadtmg
|
||||
spec:
|
||||
tls:
|
||||
- hosts:
|
||||
- drohne.masasana.ai
|
||||
secretName: stadtmg-drohne-key
|
||||
rules:
|
||||
- host: drohne.masasana.ai
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: api-service
|
||||
port:
|
||||
number: 80
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: api-service
|
||||
namespace: stadtmg
|
||||
labels:
|
||||
io.kompose.service: detection_api
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
io.kompose.service: detection_api
|
||||
ports:
|
||||
- protocol: TCP
|
||||
name: http
|
||||
port: 80
|
||||
targetPort: 8080
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
labels:
|
||||
io.kompose.service: bodenerkennung
|
||||
name: bodenerkennung
|
||||
namespace: stadtmg
|
||||
spec:
|
||||
ttlSecondsAfterFinished: 100
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: bodenerkennung
|
||||
image: masasana.azurecr.io/stadt_mg_bodenerkennung:1.1.1
|
||||
imagePullPolicy: Always
|
||||
command:
|
||||
- python
|
||||
- predict.py
|
||||
args:
|
||||
- --output_dir=mongo://mongo
|
||||
imagePullSecrets:
|
||||
- name: acr-secret
|
||||
restartPolicy: Never
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: mongo-pv-claim
|
||||
namespace: stadtmg
|
||||
labels:
|
||||
app: mongo
|
||||
spec:
|
||||
storageClassName: microk8s-hostpath
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
io.kompose.service: mongo
|
||||
name: mongo
|
||||
namespace: stadtmg
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
io.kompose.service: mongo
|
||||
strategy:
|
||||
type: Recreate
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
io.kompose.service: mongo
|
||||
spec:
|
||||
containers:
|
||||
- image: mongo:5.0.3
|
||||
name: mongo
|
||||
resources: {}
|
||||
volumeMounts:
|
||||
- mountPath: /data/db
|
||||
name: mongo-data
|
||||
restartPolicy: Always
|
||||
volumes:
|
||||
- name: mongo-data
|
||||
persistentVolumeClaim:
|
||||
claimName: mongo-pv-claim
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: mongo
|
||||
namespace: stadtmg
|
||||
labels:
|
||||
io.kompose.service: mongo
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
io.kompose.service: mongo
|
||||
ports:
|
||||
- protocol: TCP
|
||||
name: http
|
||||
port: 27017
|
||||
targetPort: 27017
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: stadtmg
|
||||
namespace: stadtmg
|
||||
labels:
|
||||
app.kubernetes.io/name: stadtmg
|
||||
app.kubernetes.io/instance: stadtmg
|
||||
annotations:
|
||||
linkerd.io/inject: enabled
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
io.kompose.service: bodenerkennung
|
||||
name: bodenerkennung
|
||||
namespace: stadtmg
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
io.kompose.service: bodenerkennung
|
||||
strategy: { }
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
io.kompose.service: bodenerkennung
|
||||
spec:
|
||||
containers:
|
||||
- image: masasana.azurecr.io/stadt_mg_bodenerkennung:1.1.1
|
||||
name: bodenerkennung
|
||||
resources: { }
|
||||
args: [ "--output_dir mongo://mongo" , "--image_id testimage" , "--category_json m" ]
|
||||
env:
|
||||
- name: POD_ID
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.name
|
||||
imagePullSecrets:
|
||||
- name: acr-secret
|
||||
restartPolicy: Always
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
kind: RoleBinding
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
metadata:
|
||||
name: job-creator
|
||||
namespace: stadtmg
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: default
|
||||
namespace: stadtmg
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: Role
|
||||
name: job-creator
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
kind: Role
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
metadata:
|
||||
name: job-creator
|
||||
rules:
|
||||
- apiGroups:
|
||||
- "batch"
|
||||
resources:
|
||||
- jobs
|
||||
verbs:
|
||||
- get
|
||||
- list
|
||||
- watch
|
||||
- create
|
||||
- delete
|
||||
|
|
@ -0,0 +1,180 @@
|
|||
import json
|
||||
import os
|
||||
from pprint import pprint
|
||||
|
||||
from itertools import product
|
||||
from typing import List
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from PIL import Image, ImageDraw
|
||||
from detectron2.structures import BoxMode
|
||||
from shapely.geometry import Polygon, box, MultiPolygon
|
||||
import geopandas as gpd
|
||||
|
||||
Image.MAX_IMAGE_PIXELS = None
|
||||
|
||||
# with open("../data/GeoJson/Flaechenbelaege.json", "r") as file:
|
||||
# flaechenbelaege = json.load(file)
|
||||
# print(flaechenbelaege)
|
||||
|
||||
flaechenbelaege = gpd.read_file("../data/GeoJson/Flaechenbelaege.json")
|
||||
|
||||
reference_points = pd.read_csv("../data/Referenzpunkte.csv")
|
||||
|
||||
dir_in = "../data/images/"
|
||||
main_image_filename = "WestendDOP2.tif"
|
||||
|
||||
name, ext = os.path.splitext(main_image_filename)
|
||||
img = Image.open(os.path.join(dir_in, main_image_filename))
|
||||
w, h = img.size
|
||||
corners = reference_points.head(4)
|
||||
left = min(corners["X"])
|
||||
right = max(corners["X"])
|
||||
bottom = min(corners["Y"])
|
||||
top = max(corners["Y"])
|
||||
x_scale = (right - left) / w
|
||||
y_scale = (bottom - top) / h
|
||||
|
||||
print(left, top)
|
||||
print(right, bottom)
|
||||
|
||||
print("Creating canvas...")
|
||||
draw = ImageDraw.Draw(img)
|
||||
d = 1000
|
||||
grid = product(range(0, h - h % d, d), range(0, w - w % d, d))
|
||||
|
||||
images = []
|
||||
category_translations = {
|
||||
'Asphalt': "Festweg",
|
||||
'Bepflanzte_Flaechen': "Baumbestand",
|
||||
'Beton': "Festweg",
|
||||
'Noppenpflaster': "Pflaster",
|
||||
'Pflaster': "Pflaster",
|
||||
'Platten': "Pflaster",
|
||||
'Rasen': "Wiese",
|
||||
'Rasengittersteine': "Pflaster",
|
||||
'Rippenpflaster': "Pflaster",
|
||||
'Sand': "Festweg",
|
||||
'unbefestigt': "Festweg",
|
||||
}
|
||||
categories = [
|
||||
"Baumbestand",
|
||||
"Festweg",
|
||||
"Pflaster",
|
||||
"Wiese",
|
||||
"Wasser",
|
||||
"Gullydeckel",
|
||||
]
|
||||
|
||||
for i, j in grid:
|
||||
# if i > 14000:
|
||||
# break
|
||||
|
||||
view_box = (
|
||||
j,
|
||||
i,
|
||||
j + d,
|
||||
i + d,
|
||||
)
|
||||
annotations = []
|
||||
filename = f"data/images/westend/belag/{i}_{j}.tif"
|
||||
image = dict(
|
||||
image_id=len(images),
|
||||
width=d,
|
||||
height=d,
|
||||
file_name=f"data/images/cropped/WestendDOP2_{i}_{j}.tif",
|
||||
annotations=annotations,
|
||||
)
|
||||
|
||||
# print(flaechenbelaege.head())
|
||||
# print(flaechenbelaege.columns)
|
||||
|
||||
# loop over every shape
|
||||
for _, row in flaechenbelaege.iterrows():
|
||||
kind = row["Art"]
|
||||
category_name = category_translations.get(kind, "")
|
||||
category_id = categories.index(category_name)
|
||||
first_obj: Polygon = row["geometry"] # ["coordinates"][0]
|
||||
# pprint(first_obj)
|
||||
# print(first_obj.bounds)
|
||||
# polygon = Polygon(first_obj)
|
||||
# print(polygon)
|
||||
# print(polygon.bounds)
|
||||
# print(list(first_obj.exterior.coords))
|
||||
|
||||
scaled = list(map(lambda xy: ((xy[0] - left) / x_scale, (xy[1] - top) / y_scale),
|
||||
first_obj.exterior.coords))
|
||||
|
||||
# pprint(scaled[:10])
|
||||
belag = Polygon(scaled)
|
||||
# print(belag.bounds)
|
||||
intersection = belag.intersection(box(*view_box))
|
||||
if not intersection.bounds:
|
||||
# print(f"No shape found in {i, j}")
|
||||
continue
|
||||
# print()
|
||||
# print(i, j)
|
||||
# print(intersection.bounds)
|
||||
# exit()
|
||||
|
||||
# print("Drawing polygon...")
|
||||
if isinstance(intersection, MultiPolygon):
|
||||
intersects: List[Polygon] = intersection.geoms
|
||||
# print(f"error at {a}_{b}")
|
||||
# continue
|
||||
else:
|
||||
intersects: List[Polygon] = [intersection]
|
||||
for intersect in intersects:
|
||||
assert isinstance(intersect, Polygon)
|
||||
# print(intersect)
|
||||
# draw.rectangle(
|
||||
# intersect.bounds,
|
||||
# # fill="#000",
|
||||
# )
|
||||
global_segmentation = list(intersect.exterior.coords)
|
||||
global_boundaries = intersect.bounds
|
||||
draw.polygon(
|
||||
global_segmentation,
|
||||
fill=(
|
||||
255 * (category_id % 1),
|
||||
255 * (category_id % 2),
|
||||
255 * (category_id % 4),
|
||||
),
|
||||
outline="#f00",
|
||||
)
|
||||
|
||||
local_segmentation = [x for xs in global_segmentation for x in xs]
|
||||
local_segmentation = [[
|
||||
x - (i * (c % 2)) - (j * (1 - c % 2))
|
||||
for c, x in enumerate(local_segmentation)
|
||||
]]
|
||||
local_boundaries = [
|
||||
global_boundaries[0] - j,
|
||||
global_boundaries[1] - i,
|
||||
global_boundaries[2] - j,
|
||||
global_boundaries[3] - i,
|
||||
]
|
||||
|
||||
annotation = dict(
|
||||
category_id=category_id,
|
||||
category_name=f"{category_name} ({kind})",
|
||||
ignore=0,
|
||||
iscrowd=0,
|
||||
bbox=local_boundaries,
|
||||
bbox_mode=BoxMode.XYXY_ABS,
|
||||
segmentation=local_segmentation,
|
||||
)
|
||||
annotations.append(annotation)
|
||||
|
||||
if annotations:
|
||||
images.append(image)
|
||||
# end loop
|
||||
|
||||
# print(f"Saving file {a}_{b}.tif ...")
|
||||
# img.crop((b, a, b + d, a + d)).save(filename)
|
||||
# img.save("data/images/westend/belag/zzz.tif")
|
||||
|
||||
with open("../data/json/belaege.json", "w") as file:
|
||||
json.dump(images, file, indent=2)
|
||||
Loading…
Reference in New Issue