Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

The code by Ebahr worked for multiple files with outfile=outfile instead of outfile='' in xlsout().

A version based on apply_async (below) allows for specifying a pool delay to minimize data collusion, noticed during saving data to disk:

import os, sys
import multiprocessing as mp
import math
import time

# The following may be different depending on PSSE version
PSSE33BINPATH = r"""C:\\Program Files (x86)\PTI\\PSSE33\\PSSBIN\\"""
sys.path.insert(0, PSSE33BINPATH)
import dyntools

resulst = []

def build_xls((outfile, signals)):
    if signals:
           print outfile, signals
    else:
         print outfile, '[ALL]'
    dyntools.CHNF(outfile).xlsout(channels=signals, show=False, outfile=outfile)
    return True

def call_back(result):
    # This is called whenever build_xls returns a result.
    # resulst is modified only by the main process, not by the pool workers.
    resulst.append(result)

def main():
    pooldelay  = 3      #delay between jobs, sec
    out_files  = ['savnw_cct2.out','savnw_cct3.out','savnw_cct4.out', 'savnw_cct5.out', 'savnw_cct6.out']
    signals    = [1,2,4,5,7,8] # some channels
    #signals    = []                        # to export ALL channels
    # Change number_of_threads to however many threads you want to use, this is just an example of using all threads in CPU
    number_of_threads = int(math.ceil(mp.cpu_count()))
    # Here I set number_of_threads to equal processes if I don't need to use all available threads
    if len(out_files) < number_of_threads:
       number_of_threads = len(out_files)

    # Loop through out files to run in parallel
    pool = mp.Pool(processes=number_of_threads)
    for out_file in out_files:
        arguments = (out_file, signals)
        pool.apply_async(build_xls, args = (arguments,), callback = call_back)
        time.sleep(pooldelay)
    pool.close()
    pool.join()  # Wait until all threads are finished

if __name__=='__main__':
    main()