Commit dc81ab71 authored by LAMBERT Jean-charles's avatar LAMBERT Jean-charles
Browse files

merge inplace to master

parent b41aa060
[Dolphin]
SortFoldersFirst=false
SortOrder=1
SortRole=date
Timestamp=2017,1,24,16,43,17
Version=3
ViewMode=1
#
import numpy as np
import os
from multiprocessing import Pool
import Queue
import multiprocessing
zz=20
q=multiprocessing.Queue()
for i in np.arange(30):
q.put(i)
def f(x):
global zz
try:
a=q.get() # get one more element
zz = zz+1
print x*x , os.getpid() , x, zz, " >> a= ",a
return x*x
except Queue.Empty:
print "queue empty"
if __name__ == '__main__':
p = Pool(5)
a=p.map(f,np.arange(30) )
print ">>>",a
print "zz=",zz
#!/usr/bin/env python
import numpy as np
import os,time
import sys
from multiprocessing import Process
import Queue
import multiprocessing
import numpy as np
import argparse
sys.path=['/home/jcl/works/GIT/uns_projects/py/modules/','/home/jcl/works/GIT/uns_projects/py/modules/simulations']+sys.path
from simulations.cuns_analysis import *
#
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# commandLine, parse the command line
def commandLine():
dbname=None
ncores=None
# help
parser = argparse.ArgumentParser(description="Parallel pipeline analysis program",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# options
parser.add_argument('simname', help='Simulation name')
parser.add_argument('script', help='Analysis script')
parser.add_argument('--ncores', help='Use ncores, None means all',default=ncores,type=int)
parser.add_argument('--dbname',help='UNS database file name', default=dbname)
parser.add_argument('--verbose',help='verbose mode', default=False)
# parse
args = parser.parse_args()
# start main funciton
process(args)
# -----------------------------------------------------
# process, is the core function
def process(args):
analysis=CUnsAnalysis(simname=args.simname, script=args.script)
analysis.compute(args.ncores)
# -----------------------------------------------------
# main program
if __name__ == '__main__':
commandLine()
#!/usr/bin/env python
import numpy as np
import os,time
from multiprocessing import Process
import Queue
import multiprocessing
# detect #cores
nprocs=multiprocessing.cpu_count()
zz=20
def myFunc(q):
global zz
stop=False
while (not stop):
try:
time.sleep(.5)
x=q.get(True,0.01) # get one more element, True,0.01, allow to have empty queue
zz= zz+1
print "%04d"%(x*x) , os.getpid() , x, zz
except Queue.Empty:
print "queue empty"
stop=True
if __name__ == '__main__':
# create a Queue variable to store jobs todo
q=multiprocessing.Queue()
# fill queue with jobs to do
for i in np.arange(30):
q.put(i)
# list to store processes which will be created
processes=[]
# loop to create parallel processes in respect to #cores requested
for i in range(nprocs):
p = Process(target=myFunc, args=(q,)) # create process
print "start process #",i
p.start() # start process in parallel
processes.append(p) # append list of process, used for joining
# wait all processes to complete
try:
for p in processes:
print ("waiting.. ",p)
p.join()
except KeyboardInterrupt: # allow to interupt all workers with CTRL+C
for p in processes:
print ("Terminating.. ",p)
p.terminate()
p.join()
while not q.empty():
q.get(block=False)
#!/usr/bin/env python
def pp(data):
ok,time=data.uns_snap.getData("time")
print ("ok, time",ok ,time)
data.first=False
#print (">>",self.simname)
print ("<<FIRST=",data.first)
pp(data)
print (">>FIRST=",data.first)
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment