#!/bin/bash EXP1NAME=exp1-many-files [ -z "$2" -o -z "$1" ] && echo "FAILURE need 2 parameters: param1 is size of each file (e.g. 10k), param2 is number of final files" && exit EXP1SIZE=$1 EXP1COUNT=$2 echo "parameters are all good" echo "EXP1SIZE = $EXP1SIZE" echo "EXP1COUNT = $EXP1COUNT" # back to home dir cd $HOME rm -rf ${EXP1NAME} 2> /dev/null mkdir ${EXP1NAME} echo 'START' # using dd to generate 1 big file then split to split # we use random data to make sure data is written. cd $HOME cd ${EXP1NAME} echo "Generating one file by adding ${EXP1COUNT} blocks of ${EXP1SIZE} with random content from /dev/urandom" time dd if=/dev/urandom of=random.img count=${EXP1COUNT} bs=${EXP1SIZE} > /dev/null 2>&1 echo "splitting the file into ${EXP1COUNT} pieces" time split -n ${EXP1COUNT} random.img sync echo 'END'