forked from BerriAI/proxy_load_tester_2
-
Notifications
You must be signed in to change notification settings - Fork 0
/
run_test.sh
executable file
·57 lines (40 loc) · 1.7 KB
/
run_test.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
#!/bin/bash
# Function to run Locust test
function run_all_cache_hits_locust_test() {
locust -f all_cache_hits.py --headless -u 20 -r 20 -H http://a472dc7c273fd47fd9a20434f463afd1-393291597.us-west-2.elb.amazonaws.com:4000/ -t 300 --csv load_test
}
function run_no_cache_hits_locust_test() {
locust -f no_cache_hits.py --headless -u 20 -r 20 -H http://a472dc7c273fd47fd9a20434f463afd1-393291597.us-west-2.elb.amazonaws.com:4000/ -t 300 --csv load_test
}
function run_cache_off_locust_test() {
locust -f no_cache.py --headless -u 20 -r 20 -H http://a472dc7c273fd47fd9a20434f463afd1-393291597.us-west-2.elb.amazonaws.com:4000/ -t 300 --csv load_test
}
function run_simple_openai_proxy_locust_test() {
locust -f no_cache_hits.py --headless -u 20 -r 20 -H https://simplelitellmproxy-production.up.railway.app/openai/ -t 300 --csv load_test
}
# Deploy your project (assuming deployment commands are here)
# Replace the following line with your deployment commands
echo "Deploying your project..."
# print content in current dir
ls -lAh
# Run tests indefinitely
while true; do
echo "Running tests..."
# All Cache hits test
run_all_cache_hits_locust_test
# Run the load test script
python3 interpret_load_test.py all_cache_hits
# # Wait for 20 seconds
# echo "Waiting for 20 seconds..."
# sleep 20
# # No cache hits test
run_no_cache_hits_locust_test
# Run the load test script again
python3 interpret_load_test.py no_cache_hits
# Cache off test
run_cache_off_locust_test
python3 interpret_load_test.py cache_off_test
# Simple OpenAI Proxy Load Test
run_simple_openai_proxy_locust_test
python3 interpret_load_test.py simple_openai_proxy
done