did not do part 2. but must complete even if of no use.

This commit is contained in:
2024-03-17 22:16:52 -07:00
parent 56c521ae2e
commit 582233502d
1273 changed files with 9072 additions and 0 deletions

View File

@@ -0,0 +1,54 @@
# CSE546 workload_generator
This repository contains code examples for you to use our workload generator.
Usage:
```
usage: workload_generator.py [-h] [--num_request NUM_REQUEST] [--url URL] [--image_folder IMAGE_FOLDER] [--prediction_file PREDICTION_FILE]
Upload images
options:
-h, --help show this help message and exit
--num_request NUM_REQUEST
one image per request
--url URL URL to the backend server, e.g. http://3.86.108.221:8000/
--image_folder IMAGE_FOLDER
the path of the folder where images are saved
--prediction_file PREDICTION_FILE
the path of the classification results file
```
Examples:
The following command sends three requests to a php backend.
```
python3 workload_generator.py \
--num_request 3 \
--url 'http://your_host_ip/php_server.php' \
--image_folder "your_local_image_folder" \
--prediction_file "ground_truth_prediction_file"
```
The following command sends three requests to a node.js/Python backend.
```
python3 workload_generator.py \
--num_request 3 \
--url 'http://<server_ip>:<server_port>' \
--image_folder "your_local_image_folder" \
--prediction_file "ground_truth_prediction_file"
```
**Note**: The workload generator accepts the absolute path in the command line arguments.
Sample output:
```
test_411.jpg uploaded!
Classification result: test_411:Emily
Waiting ...
.
+++++ Test Result Statistics +++++
Total number of requests: 1000
Total number of requests completed successfully: 1000
Total number of failed requests: 0
Total number of correct predictions: 1000
Total number of wrong predictions: 0
Total Test Duration: 2.104762077331543 (seconds)
```

View File

@@ -0,0 +1,95 @@
__copyright__ = "Copyright 2024, VISA Lab"
__license__ = "MIT"
import sys
import os
import time
import _thread
import argparse
import requests
import subprocess
import numpy as np
import pandas as pd
import threading
from concurrent.futures import ThreadPoolExecutor
from concurrent.futures import as_completed
parser = argparse.ArgumentParser(description='Upload images')
parser.add_argument('--num_request', type=int, help='one image per request')
parser.add_argument('--url', type=str, help='URL to the backend server, e.g. http://3.86.108.221:8000/')
parser.add_argument('--image_folder', type=str, help='the path of the folder where images are saved')
parser.add_argument('--prediction_file', type=str, help='the path of the classification results file')
args = parser.parse_args()
num_request = args.num_request
url = args.url
image_folder = args.image_folder
prediction_file = args.prediction_file
prediction_df = pd.read_csv(prediction_file)
responses = 0
err_responses = 0
correct_predictions = 0
wrong_predictions = 0
ex_requests = []
def send_one_request(image_path):
global prediction_df, responses, err_responses, correct_predictions, wrong_predictions, ex_requests
# Define http payload, "myfile" is the key of the http payload
file = {"inputFile": open(image_path,'rb')}
try:
response = requests.post(url, files=file)
# Print error message if failed
if response.status_code != 200:
print('sendErr: '+response.url)
err_responses +=1
else :
filename = os.path.basename(image_path)
image_msg = filename + ' uploaded!'
msg = image_msg + '\n' + 'Classification result: ' + response.text
print(msg)
responses +=1
correct_result = prediction_df.loc[prediction_df['Image'] == filename.split('.')[0], 'Results'].iloc[0]
if correct_result.strip() == response.text.split(':')[1].strip():
correct_predictions +=1
else:
wrong_predictions +=1
except requests.exceptions.RequestException as errex:
print("Exception:", errex)
ex_requests.append(image_path)
num_max_workers = 100
image_path_list = []
test_start_time = time.time()
for i, name in enumerate(os.listdir(image_folder)):
if i == num_request:
break
image_path_list.append(os.path.join(image_folder,name))
with ThreadPoolExecutor(max_workers = num_max_workers) as executor:
executor.map(send_one_request, image_path_list)
print(f"Attempt-1 {responses}/{num_request} requests successful.")
# Retry requests until all requests are successful or ex_requests is empty
retry_attempt=2
while ex_requests:
retry_requests = ex_requests.copy()
ex_requests.clear()
with ThreadPoolExecutor(max_workers=num_max_workers) as executor:
executor.map(send_one_request, retry_requests)
print(f"Attempt-{retry_attempt} {responses}/{num_request} requests successful.")
retry_attempt += 1
test_duration = time.time() - test_start_time
print("All requests have been processed or retried.")
if num_request == (responses + err_responses):
print (f"+++++ Test Result Statistics +++++")
print (f"Total number of requests: {num_request}")
print (f"Total number of requests completed successfully: {responses}")
print (f"Total number of failed requests: {err_responses}")
print (f"Total number of correct predictions : {correct_predictions}")
print (f"Total number of wrong predictions: {wrong_predictions}")
print (f"Total Test Duration: {test_duration} (seconds)")
print ("++++++++++++++++++++++++++++++++++++")