|
@@ -55,8 +55,8 @@ def partition(ls, n_block):
|
|
|
|
|
|
def future_generator(iterable, n_jobs, dict_task):
|
|
|
# iterable is generated by delayed function, after converting to a list, the format is [function, (arg1, arg2, ... ,)]
|
|
|
- print("iterable type is ", type(iterable))
|
|
|
- print("iterable is", iterable)
|
|
|
+ #print("iterable type is ", type(iterable))
|
|
|
+ #print("iterable is", iterable)
|
|
|
|
|
|
# get the number of block
|
|
|
if n_jobs<-cpu_count()-1 or n_jobs>cpu_count():
|
|
@@ -77,8 +77,10 @@ def future_generator(iterable, n_jobs, dict_task):
|
|
|
L_args.append(arr_split[i])
|
|
|
else:
|
|
|
L_args.append(iterable[1][j])
|
|
|
- print("L_args is", L_args)
|
|
|
- fut=starpu.task_submit()(iterable[0], *L_args)
|
|
|
+ #print("L_args is", L_args)
|
|
|
+ fut=starpu.task_submit(name=dict_task['name'], synchronous=dict_task['synchronous'], priority=dict_task['priority'],\
|
|
|
+ color=dict_task['color'], flops=dict_task['flops'])\
|
|
|
+ (iterable[0], *L_args)
|
|
|
L_fut.append(fut)
|
|
|
return L_fut
|
|
|
|
|
@@ -142,7 +144,8 @@ class Parallel(object):
|
|
|
self._backend=backend
|
|
|
|
|
|
def print_progress(self):
|
|
|
- pass
|
|
|
+ #pass
|
|
|
+ print("", starpupy.task_nsubmitted())
|
|
|
|
|
|
def __call__(self,iterable):
|
|
|
#generate the dictionary of task_submit
|
|
@@ -154,12 +157,11 @@ class Parallel(object):
|
|
|
async def asy_main():
|
|
|
L_fut=future_generator(iterable, self.n_jobs, dict_task)
|
|
|
res=[]
|
|
|
- print(L_fut)
|
|
|
for i in range(len(L_fut)):
|
|
|
L_res=await L_fut[i]
|
|
|
- print(L_res)
|
|
|
res.extend(L_res)
|
|
|
- print(res)
|
|
|
+ #print(res)
|
|
|
+ #print("type of result is", type(res))
|
|
|
return res
|
|
|
asyncio.run(asy_main())
|
|
|
retVal=asy_main
|