forked from lijiext/lammps
Fixes a memory leak when using OpenCL
The GPU package uses OpenCL events for measuring time. These have to be released to free up memory. I removed the clReleaseEvent() calls in the clear() method because in some cases they don't exist yet and I couldn't find a way to check for a valid event (clRetainEvent didn't work). This at least fixes the massive leak during simulations. See issue #1006
This commit is contained in:
parent
fa73fab5df
commit
2eb8d779e8
|
@ -49,8 +49,6 @@ class UCL_Timer {
|
|||
inline void clear() {
|
||||
if (_initialized) {
|
||||
CL_DESTRUCT_CALL(clReleaseCommandQueue(_cq));
|
||||
clReleaseEvent(start_event);
|
||||
clReleaseEvent(stop_event);
|
||||
_initialized=false;
|
||||
_total_time=0.0;
|
||||
}
|
||||
|
@ -107,6 +105,8 @@ class UCL_Timer {
|
|||
CL_SAFE_CALL(clGetEventProfilingInfo(start_event,
|
||||
CL_PROFILING_COMMAND_END,
|
||||
sizeof(cl_ulong), &tstart, NULL));
|
||||
clReleaseEvent(start_event);
|
||||
clReleaseEvent(stop_event);
|
||||
return (tend-tstart)*t_factor;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue