#!/usr/bin/env python3 """ Does parallelization makes copying faster? What are scenarios? 1. Sources or destination on: HDD, SSD, network share 2. All sources and dest are on same mount (physical drive, net share) 3. All sources are on same mount, but dest on another drive 4. All sources are on different drives 5. Sources are several large files (= number of jobs) 6. Sources are lots of small files """ DIRS = [ ('HDD1', '/mnt/data1/tmp'), ('HDD2', '/mnt/data2/tmp'), ('SSD', '/home/nik/tmp'), ('SSH', '/home/nik/acid/tmp'), ('MEM', '/tmp/bench'), ] small = 100 med = 100_000 large = 50_000_000 for dest in DIRS: for src_dirs in chain(src_same_as(dest), single_src_except(dest), multiple_src_except(dest)): for file_size in (small, med, large): # device_type = {hdd, ssd, net, mem} # # for dest in dest_dirs: # for src_dirs in same_src_as(dest), sundry_src_single(dest), sundry_src_all(dest): # for file_size in (small, med, large): # sources = make_sources([src_dirs], file_size=N) # for jobs in range(1, 12): # run_test(dest_dir, sources, jobs=jobs)