Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions preprocessors/autour/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ RUN pip3 install --upgrade pip && \

COPY /schemas /app/schemas
COPY /config /app/config
COPY /utils /app/utils
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does it make sense to copy only /utils/validation to the preprocessors that don't use other utilities to reduce the Docker image size?

COPY /preprocessors/autour/ /app

EXPOSE 5000
Expand Down
91 changes: 16 additions & 75 deletions preprocessors/autour/autour.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,56 +15,36 @@
# <https://github.com/Shared-Reality-Lab/IMAGE-server/blob/main/LICENSE>.

import os
import json
import time
import logging
import jsonschema
import requests
from flask import Flask, request, jsonify
from datetime import datetime
from config.logging_utils import configure_logging
from utils.validation import Validator

configure_logging()

app = Flask(__name__)
logging.basicConfig(level=logging.DEBUG)

# Initialize shared validator
VALIDATOR = Validator(data_schema='./schemas/preprocessors/autour.schema.json')


@app.route('/preprocessor', methods=['POST', 'GET'])
def get_map_data():
"""
Gets data on locations nearby a map from the Autour API
"""
logging.debug("Received request")
# Load schemas
with open('./schemas/preprocessors/autour.schema.json') as jsonfile:
data_schema = json.load(jsonfile)
with open('./schemas/preprocessor-response.schema.json') as jsonfile:
schema = json.load(jsonfile)
with open('./schemas/definitions.json') as jsonfile:
definition_schema = json.load(jsonfile)
schema_store = {
data_schema['$id']: data_schema,
schema['$id']: schema,
definition_schema['$id']: definition_schema
}

content = request.get_json()

with open('./schemas/request.schema.json') as jsonfile:
request_schema = json.load(jsonfile)
# Validate incoming request
resolver = jsonschema.RefResolver.from_schema(
request_schema, store=schema_store)

validated = validate(
schema=request_schema,
data=content,
resolver=resolver,
json_message="Invalid Request JSON format",
error_code=400)

if validated is not None:
return validated
ok, _ = VALIDATOR.check_request(content)
if not ok:
return jsonify("Invalid Request JSON format"), 400
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We never access the error message, so I'm not sure we need it at all.

Does it make sense to modify the check_* functions to return the Boolean values (instead of tuples)?
Example:

def check_request(self, data):
        """
        Validate final response envelope; return (ok, err).
        Logs on failure via validate_response().
        """
        try:
            self.validate_request(data)
            return True
        except jsonschema.exceptions.ValidationError:
            return False

Then we can simplify the check in the preprocessors to

if not VALIDATOR.check_request(content):
    return jsonify("Invalid Request JSON format"), 400

Results:

  1. Validation errors are logged with details in the development environment by the module.
  2. Validation errors are logged with generic messages in the production environment by the module.
  3. The component doesn't receive error messages at all.

Let me know if I'm missing an obvious reason not to do that!


# Check if request is for a map
if 'coordinates' not in content and 'placeID' not in content:
Expand Down Expand Up @@ -113,19 +93,10 @@ def get_map_data():
'places': results,
}

# Use response schema to validate response
resolver = jsonschema.RefResolver.from_schema(
schema, store=schema_store)

validated = validate(
schema=data_schema,
data=data,
resolver=resolver,
json_message='Invalid Preprocessor JSON format',
error_code=500)

if validated is not None:
return validated
# Validate preprocessor data against its schema
ok, _ = VALIDATOR.check_data(data)
if not ok:
return jsonify('Invalid Preprocessor JSON format'), 500

response = {
'request_uuid': request_uuid,
Expand All @@ -134,45 +105,15 @@ def get_map_data():
'data': data
}

validated = validate(
schema=schema,
data=response,
resolver=resolver,
json_message='Invalid Preprocessor JSON format',
error_code=500)

if validated is not None:
return validated
# Validate full response
ok, _ = VALIDATOR.check_response(response)
if not ok:
return jsonify('Invalid Preprocessor JSON format'), 500

logging.debug("Sending response")
return response


def validate(schema, data, resolver, json_message, error_code):
"""
Validate a piece of data against a schema

Args:
schema: a JSON schema to check against
data: the data to check
resolver: a JSON schema resolver
json_messaage: the error to jsonify and return
error_code: the error code to return

Returns:
None or Tuple[flask.Response, int]
"""
try:
validator = jsonschema.Draft7Validator(schema, resolver=resolver)
validator.validate(data)
except jsonschema.exceptions.ValidationError as error:
logging.error("Validation error occurred")
logging.pii(f"Validation error: {error.message}")
return jsonify(json_message), error_code

return None


def get_coordinates(content):
"""
Retrieve the coordinates of a map from the
Expand Down
1 change: 1 addition & 0 deletions preprocessors/celebrity-detector/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ RUN pip3 install --upgrade pip && \
pip3 install -r /app/requirements.txt

COPY /schemas /app/schemas
COPY /utils /app/utils
COPY /preprocessors/celebrity-detector /app

EXPOSE 5000
Expand Down
56 changes: 20 additions & 36 deletions preprocessors/celebrity-detector/celebrity-detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,21 +19,23 @@
import operator

import os
import json
import time
import jsonschema
import logging
import base64
from flask import Flask, request, jsonify
import cv2
import numpy as np
from datetime import datetime
from utils.validation import Validator

app = Flask(__name__)

# extract the required results from the API returned values
# Initialize shared validator once
VALIDATOR = Validator(
data_schema='./schemas/preprocessors/celebrity.schema.json')


# extract the required results from the API returned values
def process_results(response, labels):
logging.debug(response)
if not response["categories"]:
Expand Down Expand Up @@ -120,30 +122,14 @@ def categorise():
logging.debug("Received request")
# load the schema
labels = ["other", "indoor", "outdoor", "people"]
with open('./schemas/preprocessors/celebrity.schema.json') \
as jsonfile:
data_schema = json.load(jsonfile)
with open('./schemas/preprocessor-response.schema.json') \
as jsonfile:
schema = json.load(jsonfile)
with open('./schemas/definitions.json') as jsonfile:
definitionSchema = json.load(jsonfile)
with open('./schemas/request.schema.json') as jsonfile:
first_schema = json.load(jsonfile)
schema_store = {
schema['$id']: schema,
definitionSchema['$id']: definitionSchema
}
resolver = jsonschema.RefResolver.from_schema(
schema, store=schema_store)

content = request.get_json()
try:
validator = jsonschema.Draft7Validator(first_schema, resolver=resolver)
validator.validate(content)
except jsonschema.exceptions.ValidationError as e:
logging.error(e)

# request schema validation
ok, _ = VALIDATOR.check_request(content)
if not ok:
return jsonify("Invalid Preprocessor JSON format"), 400

request_uuid = content["request_uuid"]
timestamp = time.time()
preprocessor_name = "ca.mcgill.a11y.image.preprocessor.celebrityDetector"
Expand Down Expand Up @@ -195,26 +181,24 @@ def categorise():
}
final_data.append(celebrities)
data = {"celebrities": final_data}
try:
validator = jsonschema.Draft7Validator(data_schema)
validator.validate(data)
except jsonschema.exceptions.ValidationError as e:
logging.error(e)

# data schema validation
ok, _ = VALIDATOR.check_data(data)
if not ok:
return jsonify("Invalid Preprocessor JSON format"), 500

response = {
"request_uuid": request_uuid,
"timestamp": int(timestamp),
"name": preprocessor_name,
"data": data
}
# validate the results to check if they are in correct format
try:
validator = jsonschema.Draft7Validator(schema,
resolver=resolver)
validator.validate(response)
except jsonschema.exceptions.ValidationError as e:
logging.error(e)

# response validation
ok, _ = VALIDATOR.check_response(response)
if not ok:
return jsonify("Invalid Preprocessor JSON format"), 500

logging.debug("Detected " + str(len(final_data)) +
"celebrities out of " + str(len(objects)) + "objects")
return response
Expand Down
2 changes: 2 additions & 0 deletions preprocessors/clothes-detector/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ RUN pip3 install --upgrade pip && \
COPY /schemas /app/schemas
# COPY ./schemas /app/schemas

COPY /utils /app/utils

# good practice to remove the archive
RUN wget https://image.a11y.mcgill.ca/models/clothesDetector/yolo.zip && \
unzip yolo.zip && \
Expand Down
54 changes: 19 additions & 35 deletions preprocessors/clothes-detector/clothes.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,7 @@
# If not, see
# <https://github.com/Shared-Reality-Lab/IMAGE-server/blob/main/LICENSE>.

import json
import time
import jsonschema
import logging
import base64
from flask import Flask, request, jsonify
Expand All @@ -35,10 +33,15 @@
from yolo.utils.utils import load_classes
from predictors.YOLOv3 import YOLOv3Predictor
from datetime import datetime
from utils.validation import Validator

app = Flask(__name__)
logging.basicConfig(level=logging.NOTSET)

# Initialize shared validator
VALIDATOR = Validator(
data_schema='./schemas/preprocessors/clothes.schema.json')

# code referred from
# https://medium.com/codex/rgb-to-color-names-in-python-the-robust-way-ec4a9d97a01f

Expand Down Expand Up @@ -102,30 +105,14 @@ def get_clothes(img):
def categorise():
final_data = []
logging.debug("Received request")
with open('./schemas/preprocessors/clothes.schema.json') \
as jsonfile:
data_schema = json.load(jsonfile)
with open('./schemas/preprocessor-response.schema.json') \
as jsonfile:
schema = json.load(jsonfile)
with open('./schemas/definitions.json') as jsonfile:
definitionSchema = json.load(jsonfile)
with open('./schemas/request.schema.json') as jsonfile:
first_schema = json.load(jsonfile)
schema_store = {
schema['$id']: schema,
definitionSchema['$id']: definitionSchema
}
resolver = jsonschema.RefResolver.from_schema(
schema, store=schema_store)

content = request.get_json()
try:
validator = jsonschema.Draft7Validator(first_schema, resolver=resolver)
validator.validate(content)
except jsonschema.exceptions.ValidationError as e:
logging.error(e)

# request schema validation
ok, _ = VALIDATOR.check_request(content)
if not ok:
return jsonify("Invalid Preprocessor JSON format"), 400

request_uuid = content["request_uuid"]
timestamp = time.time()
preprocessor_name = "ca.mcgill.a11y.image.preprocessor.clothesDetector"
Expand Down Expand Up @@ -172,26 +159,23 @@ def categorise():
final_data.append(clothes)
logging.info(final_data)
data = {"clothes": final_data}
try:
validator = jsonschema.Draft7Validator(data_schema)
validator.validate(data)
except jsonschema.exceptions.ValidationError as e:
logging.error(e)

# data schema validation
ok, _ = VALIDATOR.check_data(data)
if not ok:
return jsonify("Invalid Preprocessor JSON format"), 500

response = {
"request_uuid": request_uuid,
"timestamp": int(timestamp),
"name": preprocessor_name,
"data": data
}
# validate the results to check if they are in correct format
try:
validator = jsonschema.Draft7Validator(schema,
resolver=resolver)
validator.validate(response)
except jsonschema.exceptions.ValidationError as e:
logging.error(e)
# response validation
ok, _ = VALIDATOR.check_response(response)
if not ok:
return jsonify("Invalid Preprocessor JSON format"), 500

logging.debug("Sending response")
return response

Expand Down
1 change: 1 addition & 0 deletions preprocessors/collage-detector/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ RUN pip3 install --upgrade pip && \

COPY /schemas /app/schemas
COPY /config /app/config
COPY /utils /app/utils
COPY /preprocessors/collage-detector/ /app

EXPOSE 5000
Expand Down
Loading