38 lines
1.5 KiB
Python
38 lines
1.5 KiB
Python
from werkzeug.datastructures import FileStorage
|
|
|
|
from backend import api_bp, dataset_path, service
|
|
from backend.classification.dto import ClassificationResultDto
|
|
from backend.dataset.dto import DatasetUploadDto
|
|
from backend.dataset.model import DatasetParams
|
|
from backend.dto import ClassificationDto, RegressionDto
|
|
from backend.regression.dto import RegressionResultDto
|
|
from backend.tree.model import DecisionTreeParams
|
|
|
|
|
|
@api_bp.post("/regression")
|
|
@api_bp.input(DatasetUploadDto, location="files")
|
|
@api_bp.input(RegressionDto, location="query")
|
|
@api_bp.output(RegressionResultDto)
|
|
def regression(files_data, query_data):
|
|
uploaded_file: FileStorage = files_data["dataset"]
|
|
schema = RegressionDto()
|
|
dataset_params: DatasetParams = schema.get_dataset_params(query_data)
|
|
tree_params: DecisionTreeParams = schema.get_tree_params(query_data)
|
|
return service.run_regression(
|
|
dataset_path, uploaded_file, dataset_params, tree_params
|
|
)
|
|
|
|
|
|
@api_bp.post("/classification")
|
|
@api_bp.input(DatasetUploadDto, location="files")
|
|
@api_bp.input(ClassificationDto, location="query")
|
|
@api_bp.output(ClassificationResultDto)
|
|
def classification(files_data, query_data):
|
|
uploaded_file: FileStorage = files_data["dataset"]
|
|
schema = ClassificationDto()
|
|
dataset_params: DatasetParams = schema.get_dataset_params(query_data)
|
|
tree_params: DecisionTreeParams = schema.get_tree_params(query_data)
|
|
return service.run_classification(
|
|
dataset_path, uploaded_file, dataset_params, tree_params
|
|
)
|