Spaces:
Sleeping
Sleeping
| # Import necessary libraries | |
| import numpy as np | |
| import joblib # For loading the serialized model | |
| import pandas as pd # For data manipulation | |
| from flask import Flask, request, jsonify # For creating the Flask API | |
| from pathlib import Path # For using a robust, absolute path | |
| # Define the base directory of the script | |
| BASE_DIR = Path(__file__).resolve().parent | |
| # Define the full path to your model file | |
| MODEL_PATH = BASE_DIR / "xgb_tuned.joblib" | |
| # Initialize Flask application | |
| superkart_api = Flask("SuperKart Sales Predictor") | |
| # Load the trained machine learning model | |
| model = joblib.load(MODEL_PATH) | |
| # Define a route for the home page (GET request) | |
| def home(): | |
| """ | |
| This function handles GET requests to the root URL ('/') of the API. | |
| It returns a simple welcome message. | |
| """ | |
| return "Welcome to the SuperKart Sales Predictor API !" | |
| # Define an endpoint to predict for a single observation | |
| def predict_sales(): | |
| """ | |
| This function handles POST requests to the '/v1/predict' endpoint. | |
| It expects a JSON payload containing property details and returns | |
| the predicted rental price as a JSON response. | |
| """ | |
| # Get JSON data from the request | |
| data = request.get_json() | |
| # Extract relevant customer features from the input data. The order of the column names matters. | |
| sample = { | |
| 'Product_Weight': data['Product_Weight'], | |
| 'Product_MRP': data['Product_MRP'], | |
| 'Product_Allocated_Area': data['Product_Allocated_Area'], | |
| 'Product_Sugar_Content': data['Product_Sugar_Content'], | |
| 'Store_Size': data['Store_Size'], | |
| 'Store_Location_City_Type': data['Store_Location_City_Type'], | |
| 'Store_Type': data['Store_Type'], | |
| 'Store_Age_Years': data['Store_Age_Years'], | |
| 'Product_Id_prefix': data['Product_Id_prefix'], | |
| 'Product_FD_perishable': data['Product_FD_perishable'], | |
| } | |
| # Convert the extracted data into a DataFrame | |
| input_data = pd.DataFrame([sample]) | |
| # Make a store sales prediction using the trained model | |
| prediction = model.predict(input_data).tolist()[0] | |
| # Return the prediction as a JSON response | |
| return jsonify({'Sales': prediction}) | |
| # Define an endpoint for batch prediction (POST request) | |
| def predict_sales_batch(): | |
| """ | |
| This function handles POST requests to the '/v1/batch' endpoint. | |
| It expects a CSV file containing property details for multiple properties | |
| and returns the predicted rental prices as a dictionary in the JSON response. | |
| """ | |
| # Get the uploaded CSV file from the request | |
| file = request.files['file'] | |
| # Read the CSV file into a Pandas DataFrame | |
| input_data = pd.read_csv(file) | |
| # Make predictions for all properties in the DataFrame | |
| predicted_sales = model.predict(input_data).tolist() | |
| # Create a dictionary of predictions with property IDs as keys | |
| product_ids = input_data['Product_Id'].tolist() | |
| output_dict = dict(zip(product_ids, predicted_sales)) | |
| # Return the predictions dictionary as a JSON response | |
| return output_dict | |
| # Run the Flask app in debug mode | |
| if __name__ == '__main__': | |
| superkart_api.run(debug=True) | |