[phobos] State of std.parallelism unit tests

David Simcha dsimcha at gmail.com
Wed Jun 8 07:15:34 PDT 2011


??? This doesn't appear to use std.parallelism anywhere.

On Wed, Jun 8, 2011 at 10:12 AM, SK <sk at metrokings.com> wrote:

> On Wed, Jun 8, 2011 at 5:54 AM, David Simcha <dsimcha at gmail.com> wrote:
>
>> On 6/7/2011 11:04 PM, SK wrote:
>>
>>> I have a test (~100 loc) that fails immediately on 64-bit Linux without
>>> resorting to fiber migration.  Using the FiberFixes branch allows it to
>>> pass.  If you're interested I'd be happy to provide.
>>> Also, will I asked before but never got an answer:  Is there any plan to
>>> merge FiberFixes with the the trunk?
>>>
>> Yes, please provide.  I know there is still an outstanding issue with
>> std.parallelism, but I can't reproduce it so there's not much I can do about
>> it.  I'll take any help I can get
>>
>>
>>
>
> This test launches 10 threads that each launch 100 fibers that each yield
> 1000 times.  I compile with /usr/bin/dmd  -w -wi -gc.
> HTH,
> -steve
>
> //import std.stdio;
> import core.thread;
> import std.stdio;
> import std.exception;
> shared uint join_done = 0;
> version( Windows ) { import core.sys.windows.windows; }
> class fiber_worker_t
> {
>     this( uint yield_count )
>     {
>  m_done = false;
>         m_yield_count = yield_count;
>         m_fiber = new Fiber( &func );
>     }
>     bool call()
>     {
>         m_fiber.call();
>         return is_term();
>     }
>     bool is_term() { return( m_fiber.state() == m_fiber.State.TERM ); }
>     bool is_done() { return( m_done ); }
> protected:
>     uint m_yield_count;
>     Fiber m_fiber;
>     bool m_done;
>     void func()
>     {
>         uint i = m_yield_count;
>         while( --i )
>         m_fiber.yield();
>         m_done = true;
>     }
> }
> class thread_worker_t
> {
>     this( uint fiber_worker_count, uint fiber_yield_count )
>     {
>         m_fiber_worker_count = fiber_worker_count;
>         m_fiber_yield_count = fiber_yield_count;
>         m_thread = new Thread( &func );
> /*
>         // I moved this to the thread itself
>         m_fib_array = new fiber_worker_t[fiber_worker_count];
>         foreach( ref f; m_fib_array )
>             f = new fiber_worker_t(fiber_yield_count);
> */
>     }
>     void start()
>     {
>         m_thread.start();
>     }
> protected:
>     uint m_fiber_worker_count;
>     uint m_fiber_yield_count;
>     Thread m_thread;
>     // func() executes in each thread's context
>     void func()
>     {
>         fiber_worker_t[] m_fib_array = new
> fiber_worker_t[m_fiber_worker_count];
>         foreach( ref f; m_fib_array )
>             f = new fiber_worker_t(m_fiber_yield_count);
>         // fibers are cooperative and need a driver loop
>         bool done;
>         do
>         {
>             done = true;
>             foreach( f; m_fib_array )
>             {
>                 done &= f.call();
>                 // writeln( &this, " ", f, " ", &f );
>             }
>         } while( !done );
>     // verify that fibers are really done
>     foreach( f; m_fib_array )
>         enforce( f.is_done() );
>     }
> }
> void thread_fiber_test( const uint thread_count, const uint fiber_count,
> const uint fiber_yield_count )
> {
>     thread_worker_t[] thread_worker_array = new
> thread_worker_t[thread_count];
>     foreach( ref t; thread_worker_array )
>         t = new thread_worker_t(fiber_count, fiber_yield_count);
>     foreach( t; thread_worker_array ) t.start();
>     thread_joinAll();
>     join_done = 1;
> }
> int main()
> {
>     const uint thread_count = 10;
>     const uint fiber_count = 100;
>     const uint fiber_yield_count = 1000;
>     thread_fiber_test( thread_count, fiber_count, fiber_yield_count );
>     return 0;
> }
>
>
> _______________________________________________
> phobos mailing list
> phobos at puremagic.com
> http://lists.puremagic.com/mailman/listinfo/phobos
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.puremagic.com/pipermail/phobos/attachments/20110608/55bee2ca/attachment-0001.html>


More information about the phobos mailing list