-
Notifications
You must be signed in to change notification settings - Fork 1
/
cold_start_inference.sh
executable file
·36 lines (27 loc) · 1.77 KB
/
cold_start_inference.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
#TODO: Replace with the full path of the dataset
DATA_FILE="/data/SUSY.csv"
#TODO: Replace this with the absolute path of the directory in which the packed model is located.
ORIG_PACK_DIR="/data/"
#TODO replace with the number of copies you want to make of the model. Each inference is done on a difference copy of the model in a cyclical manner to ensure cold start latency. Note that This is only for the benchmarks to ensure a cold start latency.
NUM_FILES=1
#TODO: Replace with desired location of the logs(inference latency and block sizes). Note: You have to create the log directory.
LOG_DIR='/data/logs_cifar_eightthread/'
#TODO: Replace it with the column containing the label in the test data
LAB_COL='0'
#TODO: Replace it with the block size
BLOCKSIZE=128
#TODO: Replace with batchsize, i.e number of inference sample per batch (for benchmark purposes)
BATCHSIZE=1
#TODO: Change to "gradientboost" for gradient boosted trees
ALGORITHM="randomforest"
#TODO: Change to "regression" for regression
TASK="classification"
declare -a layoutArray=("binstatdfs")
for val in ${layoutArray[@]}; do
#TODO: You can replace this with the full absolute path of the packed model and metadata
PACK_FILE="packedmodel${val}"
META_FILE="metadata${val}.txt"
#python3 scripts/copy_files_single_dir.py $NUM_FILES $ORIG_PACK_DIR $PACK_FILE
./exe --batchsize $BATCHSIZE --blocksize $BLOCKSIZE --mode inference --logdir $LOG_DIR --format binary --metadatafilename "${ORIG_PACK_DIR}/${META_FILE}" --labelcol $LAB_COL --layout ${layoutArray[${ARR_COUNT}]} --intertwine 4 --modelfilename "${ORIG_PACK_DIR}/${PACK_FILE}" --numfiles $NUM_FILES --datafilename $DATA_FILE --numthreads 1 --package sklearn --algorithm $ALGORITHM --task $TASK
ARR_COUNT=$((ARR_COUNT+1))
done