diff --git a/.gitignore b/.gitignore index 977065f4e4..591b14f571 100644 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,9 @@ _history # Institution specific config config/*_setting.rb !config/no_institution_setting.rb + +# Ignore TorchServe logs and config +ml_services/logs/* + +# Ignore TorchServe auto-generated key file +ml_services/key_file.json diff --git a/Gemfile.lock b/Gemfile.lock index 6bb903f2a6..0a935f1e2a 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -174,6 +174,7 @@ GEM faraday-net_http (3.4.0) net-http (>= 0.5.0) ffi (1.17.1-aarch64-linux-gnu) + ffi (1.17.1-arm64-darwin) ffi (1.17.1-x86_64-linux-gnu) fugit (1.11.1) et-orbi (~> 1, >= 1.2.11) @@ -281,6 +282,8 @@ GEM nio4r (2.7.4) nokogiri (1.18.7-aarch64-linux-gnu) racc (~> 1.4) + nokogiri (1.18.7-arm64-darwin) + racc (~> 1.4) nokogiri (1.18.7-x86_64-linux-gnu) racc (~> 1.4) numerizer (0.1.1) @@ -559,6 +562,7 @@ GEM PLATFORMS aarch64-linux + arm64-darwin-23 x86_64-linux DEPENDENCIES diff --git a/app/api/tasks_api.rb b/app/api/tasks_api.rb index a810354367..fec32c49bf 100644 --- a/app/api/tasks_api.rb +++ b/app/api/tasks_api.rb @@ -519,4 +519,15 @@ class TasksApi < Grape::API true end + # effort prediction endpoint for task.. + desc 'Predict effort for a task' + params do + requires :features, type: Array[Float], desc: 'Feature values' + end + post :predict_effort do + features = params[:features] + prediction = EffortPredictionService.predict(features) + { predicted_effort: prediction } + end + end diff --git a/app/controllers/task_downloads_controller.rb b/app/controllers/task_downloads_controller.rb index 2a0c8d0763..072c66240b 100644 --- a/app/controllers/task_downloads_controller.rb +++ b/app/controllers/task_downloads_controller.rb @@ -49,4 +49,28 @@ def index rescue MyException => e render json: e.message, status: e.status end + + # prediction effort function + + protect_from_forgery with: :null_session # allow API POST without CSRF token + # skip_before_action :verify_authenticity_token, only: [:predict_effort] + + # POST /tasks/predict_effort + def predict_effort + features = params[:features] + + if features.blank? + render json: { error: "Features parameter is required" }, status: :bad_request + return + end + + prediction_value = EffortPredictionService.predicted_effort(features) + + if prediction_value + render json: { predicted_effort: prediction_value } + else + render json: { error: "Prediction failed" }, status: :internal_server_error + end + end + end diff --git a/app/services/effort_prediction_service.rb b/app/services/effort_prediction_service.rb new file mode 100644 index 0000000000..8530fcea74 --- /dev/null +++ b/app/services/effort_prediction_service.rb @@ -0,0 +1,45 @@ +# app/services/effort_prediction_service.rb + +require 'net/http' +require 'json' + +class EffortPredictionService + TORCHSERVE_URL = ENV.fetch("TORCHSERVE_URL", "http://effort-predictor:8080/predictions/effort-predictor") + + def self.predict(features) + uri = URI(TORCHSERVE_URL) + headers = { + "Content-Type" => "application/json", + "Authorization" => "Bearer #{ENV.fetch('TORCHSERVE_INFERENCE_KEY', nil)}" + } + body = { features: features }.to_json + + response = Net::HTTP.post(uri, body, headers) + + if response.is_a?(Net::HTTPSuccess) + parsed = begin + JSON.parse(response.body) + rescue StandardError + response.body + end + parsed.is_a?(Hash) ? parsed["predicted_effort"] || parsed.values.first : parsed + else + Rails.logger.error("TorchServe error: #{response.code} #{response.body}") + nil + end + end + + # Added a new helper method + def self.predicted_effort(features) + result = predict(features) + case result + when Array + result.first + when Hash + result["predicted_effort"] || result.values.first + else + result + end + end + +end diff --git a/config/routes.rb b/config/routes.rb index ea52a79000..dc98fcaf84 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -5,6 +5,7 @@ get 'api/submission/unit/:id/task_definitions/:task_def_id/download_submissions', to: 'task_downloads#index' get 'api/submission/unit/:id/task_definitions/:task_def_id/student_pdfs', to: 'task_submission_pdfs#index' get 'api/units/:id/all_resources', to: 'lecture_resource_downloads#index' + post 'tasks/predict_effort', to: 'task_downloads#predict_effort' mount ApiRoot => '/' mount GrapeSwaggerRails::Engine => '/api/docs' diff --git a/ml_services/dummy_model.py b/ml_services/dummy_model.py new file mode 100644 index 0000000000..d505bdd4ab --- /dev/null +++ b/ml_services/dummy_model.py @@ -0,0 +1,16 @@ +import torch +import torch.nn as nn + +class EffortPredictor(nn.Module): + def __init__(self, input_dim): + super(EffortPredictor, self).__init__() + self.fc = nn.Linear(input_dim, 1) + + def forward(self, x): + return self.fc(x) + +# Create a dummy model with 10 input features +model = EffortPredictor(input_dim=10) + +# Save its state dict as effort_model.pth +torch.save(model.state_dict(), "effort_model.pth") diff --git a/ml_services/effort_model.pth b/ml_services/effort_model.pth new file mode 100644 index 0000000000..be08bddf8a Binary files /dev/null and b/ml_services/effort_model.pth differ diff --git a/ml_services/handlers/effort_regression_handler.py b/ml_services/handlers/effort_regression_handler.py new file mode 100644 index 0000000000..f53fb735e1 --- /dev/null +++ b/ml_services/handlers/effort_regression_handler.py @@ -0,0 +1,31 @@ + +import json +import torch +from ts.torch_handler.base_handler import BaseHandler + +class EffortRegressionHandler(BaseHandler): + def postprocess(self, data): + if isinstance(data, torch.Tensor): + return {"predicted_effort": float(data.item())} + return {"predicted_effort": data} + + def handle(self, data, context): + try: + # Extract request body + body = data[0].get("body") + + features = body["features"] # no json.loads + + # Convert to tensor + tensor = torch.tensor(features).float().unsqueeze(0) + + # Run model + + output = max(0.0, self.model(tensor).item()) # to have positive output + + + # Return JSON response + return [json.dumps({"predicted_effort": output})] + + except Exception as e: + return [json.dumps({"error": str(e)})] diff --git a/ml_services/key_file.json b/ml_services/key_file.json new file mode 100644 index 0000000000..903a04b42b --- /dev/null +++ b/ml_services/key_file.json @@ -0,0 +1,13 @@ +{ + "management": { + "key": "ZT_SKghL", + "expiration time": "2026-04-13T15:16:21.380749386Z" + }, + "inference": { + "key": "AJVoXRTf", + "expiration time": "2026-04-13T15:16:21.380728803Z" + }, + "API": { + "key": "hIvWRijq" + } +} \ No newline at end of file diff --git a/ml_services/model_store/effort-predictor.mar b/ml_services/model_store/effort-predictor.mar new file mode 100644 index 0000000000..5cf81964da Binary files /dev/null and b/ml_services/model_store/effort-predictor.mar differ diff --git a/ml_services/models/effort_model.py b/ml_services/models/effort_model.py new file mode 100644 index 0000000000..43baa3fb3b --- /dev/null +++ b/ml_services/models/effort_model.py @@ -0,0 +1,16 @@ +import torch +import torch.nn as nn + +class EffortPredictor(nn.Module): + def __init__(self, input_dim=10): # initialise with a default value + super(EffortPredictor, self).__init__() + self.fc = nn.Linear(input_dim, 1) + + def forward(self, x): + return self.fc(x) + +def get_model(): + model = EffortPredictor(input_dim=10) # adjust to your features + model.load_state_dict(torch.load("effort_model.pth")) + model.eval() + return model diff --git a/ml_services/scripts/build_mar.sh b/ml_services/scripts/build_mar.sh new file mode 100644 index 0000000000..fdbe265ead --- /dev/null +++ b/ml_services/scripts/build_mar.sh @@ -0,0 +1,9 @@ +#!/bin/bash +torch-model-archiver \ + --model-name effort-predictor \ + --version 1.0 \ + --model-file models/effort_model.py \ + --serialized-file effort_model.pth \ + --handler handlers/effort_regression_handler.py \ + --export-path model_store \ + --force diff --git a/texlive.Dockerfile b/texlive.Dockerfile index c68816ce24..968abf3506 100644 --- a/texlive.Dockerfile +++ b/texlive.Dockerfile @@ -31,7 +31,8 @@ RUN apt-get update && \ ENV PATH=$PATH:/opt/texlive/bin/x86_64-linux:/opt/texlive/bin/aarch64-linux # Install required TeX Live packages for lualatex compilation -RUN tlmgr install \ +RUN tlmgr update --self && tlmgr update --all && tlmgr install \ +#RUN tlmgr install \ catchfile \ csvsimple \ environ \ @@ -53,7 +54,7 @@ RUN tlmgr install \ paralist \ pdfcol \ pdflscape \ - pdfmanagement-testphase \ + #pdfmanagement-testphase \ pdfpages \ tagpdf \ tcolorbox \