require 'concurrent-edge' # => true # This little bit more complicated commented example aims to # demonstrate some of the capabilities of concurrent-ruby new abstractions. # It is a concurrent processing pipeline which on one side has several web crawlers. # They are searching the web for data and filling buffer. # On the other side there are data processors which are pop the data from buffer. # They are processing the data and storing results into a DB # which has limited concurrency level. # Some of the parts like Web and DB are just stubs. # Each part logs and increments counters to keep some stats about the pipeline. # There is also a periodical readout of the stats into log scheduled. # Schema of the pipeline: # web-crawlers -> buffer -> data-processing -> DB # \____________________________\_____\___> logging # TODO (pitr-ch 10-Mar-2019): replace with a better more realistic example using # * actors for limited concurrency with state - local DB connection # * throttled futures for REST API - limiting server load # The central logger is defined first. # It has state like the logger instance, therefore the actor is used. # It is better to exactly define the communication protocol of the logging actor. # It will only understand these messages. Log = Concurrent::ImmutableStruct.new :severity, :message # => Log SetLevel = Concurrent::ImmutableStruct.new :level # => SetLevel require 'logger' # => false require 'stringio' # => false # Including actor constants so this scope understands ANY etc. include Concurrent::ErlangActor::EnvironmentConstants # => Object # The logger does not need a dedicated thread, let's use a pool. LOGGING = Concurrent::ErlangActor.spawn Logger::FATAL, type: :on_pool, name: 'logger' do |level| # a Logger instance with nicer formatting is created @logger = Logger.new($captured_out) @logger.level = level @logger.formatter = lambda do |severity, datetime, progname, msg| = case msg when String msg when Exception format "%s (%s)\n%s", msg., msg.class, (msg.backtrace || []).join("\n") else msg.inspect end format "[%s] %5s -- %s: %s\n", datetime.strftime('%Y-%m-%d %H:%M:%S.%L'), severity, progname, end # definition of the logging actor behaviour receive( # log messages on(Log) { || @logger.log .severity, . }, # change level on(SetLevel) { || @logger.level = .level }, # It is a good practice to read and log bad messages, # otherwise they would accumulate in the inbox. on(ANY) { || @logger.error bad_message: }, # The logger has static behaviour, therefore keep can be used, and the actor # will behave the same with each message received as defined below. keep: true) end # => #<Concurrent::ErlangActor::Pid:0x000002 logger running> # testing the logger works as expected LOGGING.tell Log[Logger::FATAL, :tornado] # => #<Concurrent::ErlangActor::Pid:0x000002 logger running> LOGGING.tell Log[Logger::INFO, :wind] # => #<Concurrent::ErlangActor::Pid:0x000002 logger running> LOGGING.tell SetLevel[Logger::DEBUG] # => #<Concurrent::ErlangActor::Pid:0x000002 logger running> LOGGING.tell Log[Logger::INFO, :breeze] # => #<Concurrent::ErlangActor::Pid:0x000002 logger running> sleep 0.05 # the logging is asynchronous, we need to wait a bit until it's written get_captured_output # => "[2024-10-19 18:19:02.621] FATAL -- : :tornado\n" + # "[2024-10-19 18:19:02.622] INFO -- : :breeze\n" # the logging could be wrapped in a method def log(severity, ) LOGGING.tell Log[severity, ] true end # => :log include Logger::Severity # => Object log INFO, 'alive' # => true sleep 0.05 # => 0 get_captured_output # => "[2024-10-19 18:19:02.671] INFO -- : alive\n" # The stub which will represent the web module Web @counter = Concurrent::AtomicFixnum.new def self.search sleep 0.01 @counter.increment.to_s(16) end end # The cancellation which will be used to cancel the whole processing pipeline. @cancellation, origin = Concurrent::Cancellation.new # => #<Concurrent::Cancellation:0x000003 pending> # Buffer for work buffer_capacity = 10 # => 10 @buffer = Concurrent::Promises::Channel.new buffer_capacity # => #<Concurrent::Promises::Channel:0x000004 capacity taken 0 of 10> web_crawler_count = 4 # => 4 # Track the number of data provided by each crawler crawler_data_counter = Array.new(web_crawler_count) do |i| # this is accessed by multiple threads so it should be a tread-safe counter Concurrent::AtomicFixnum.new end # the array is frozen which makes it immutable, # therefore safe to use when concurrently accessed. # Otherwise if it was being modified it wound has to be Concurrent::Array to make it safe. crawler_data_counter.freeze # => [#<Concurrent::AtomicFixnum:0x000005 value:0>, # #<Concurrent::AtomicFixnum:0x000006 value:0>, # #<Concurrent::AtomicFixnum:0x000007 value:0>, # #<Concurrent::AtomicFixnum:0x000008 value:0>] # The web crawlers are defined directly with threads to start the example simply. # They search the web and immediately as they find something they push # the data into the buffer. # The push will block if the buffer is full, # regulating how fast is the work being found. # This is called backpressure. crawlers = Array.new web_crawler_count do |i| Thread.new do while true # crawl the web until cancelled break if @cancellation.canceled? # will block and slow down the crawler if the buffer is full data = Web.search until @buffer.push data, 0.1 # It is a good practice to use timeouts on all blocking operations # If the pipeline is cancelled and the data-processors finish # before taking data from buffer a crawler could get stack on this push. break if @cancellation.canceled? end # it pushed data, increment its counter crawler_data_counter[i].increment log DEBUG, "crawler #{i} found #{data}" end end end.freeze # => [#<Thread:0x000009@medium-example.in.rb:130 run>, # #<Thread:0x00000a@medium-example.in.rb:130 run>, # #<Thread:0x00000b@medium-example.in.rb:130 run>, # #<Thread:0x00000c@medium-example.in.rb:130 run>] # So far only the crawlers looking for data are defined # pushing data into the buffer. # The data processing definition follows. # Threads are not used again directly but rather the data processing # is defined using Futures. # Even though that makes the definition more complicated # it has a big advantage that data processors will not require a Thread each # but they will share and run on a Thread pool. # That removes an important limitation of the total number of threads process can have, # which can be an issue in larger systems. # This example would be fine with using the Threads # however it would not demonstrate the more advanced usage then. # The data processing stores results in a DB, # therefore the stub definition of a database precedes the data processing. module DB @data = Concurrent::Map.new # increment a counter for char def self.add(char, count) @data.compute char do |old| (old || 0) + count end true end # return the stored data as Hash def self.data @data.each_pair.reduce({}) { |h, (k, v)| h.update k => v } end end # => :data # Lets assume that instead having this DB # we have limited number of connections # and therefore there is a limit on # how many threads can communicate with the DB at the same time. # The throttle is created to limit the number of concurrent access to DB. @db_throttle = Concurrent::Throttle.new 4 # => #<Concurrent::Throttle:0x00000d capacity available 4 of 4> # The data processing definition follows data_processing_count = 20 # this could actually be thousands if required # track the number of data received by data processors @data_processing_counters = Array.new data_processing_count do Concurrent::AtomicFixnum.new end.freeze # => [#<Concurrent::AtomicFixnum:0x00000e value:0>, # #<Concurrent::AtomicFixnum:0x00000f value:0>, # #<Concurrent::AtomicFixnum:0x000010 value:0>, # #<Concurrent::AtomicFixnum:0x000011 value:0>, # #<Concurrent::AtomicFixnum:0x000012 value:0>, # #<Concurrent::AtomicFixnum:0x000013 value:0>, # #<Concurrent::AtomicFixnum:0x000014 value:0>, # #<Concurrent::AtomicFixnum:0x000015 value:0>, # #<Concurrent::AtomicFixnum:0x000016 value:0>, # #<Concurrent::AtomicFixnum:0x000017 value:0>, # #<Concurrent::AtomicFixnum:0x000018 value:0>, # #<Concurrent::AtomicFixnum:0x000019 value:0>, # #<Concurrent::AtomicFixnum:0x00001a value:0>, # #<Concurrent::AtomicFixnum:0x00001b value:0>, # #<Concurrent::AtomicFixnum:0x00001c value:0>, # #<Concurrent::AtomicFixnum:0x00001d value:0>, # #<Concurrent::AtomicFixnum:0x00001e value:0>, # #<Concurrent::AtomicFixnum:0x00001f value:0>, # #<Concurrent::AtomicFixnum:0x000020 value:0>, # #<Concurrent::AtomicFixnum:0x000021 value:0>] def data_processing(i) # pop_op returns a future which is fulfilled with a message from buffer # when a message is valuable. @buffer.pop_op.then_on(:fast) do |data| # then we process the message on :fast pool since this has no blocking log DEBUG, "data-processor #{i} got #{data}" @data_processing_counters[i].increment sleep 0.1 # simulate it actually doing something which take some time # find the most frequent char data.chars. group_by { |v| v }. map { |ch, arr| [ch, arr.size] }. max_by { |ch, size| size } end.then_on(@db_throttle.on(:io)) do |char, count| # the db access has to be limited therefore the db_throttle is used # DBs use io therefore this part is executed on global thread pool wor :io DB.add char, count end.then_on(:fast) do |_| # last section executes back on :fast executor # checks if it was cancelled # if not then it calls itself recursively # which in combination with #run will turn this into infinite data processing # (until cancelled) # The #run will keep flatting to the inner future as long the value is a future. if @cancellation.canceled? # return something else then future, #run will stop executing :done else # continue running with a future returned by data_processing data_processing i end end end # create the data processors data_processors = Array.new data_processing_count do |i| data_processing(i).run end # => [#<Concurrent::Promises::Future:0x000022 pending>, # #<Concurrent::Promises::Future:0x000023 pending>, # #<Concurrent::Promises::Future:0x000024 pending>, # #<Concurrent::Promises::Future:0x000025 pending>, # #<Concurrent::Promises::Future:0x000026 pending>, # #<Concurrent::Promises::Future:0x000027 pending>, # #<Concurrent::Promises::Future:0x000028 pending>, # #<Concurrent::Promises::Future:0x000029 pending>, # #<Concurrent::Promises::Future:0x00002a pending>, # #<Concurrent::Promises::Future:0x00002b pending>, # #<Concurrent::Promises::Future:0x00002c pending>, # #<Concurrent::Promises::Future:0x00002d pending>, # #<Concurrent::Promises::Future:0x00002e pending>, # #<Concurrent::Promises::Future:0x00002f pending>, # #<Concurrent::Promises::Future:0x000030 pending>, # #<Concurrent::Promises::Future:0x000031 pending>, # #<Concurrent::Promises::Future:0x000032 pending>, # #<Concurrent::Promises::Future:0x000033 pending>, # #<Concurrent::Promises::Future:0x000034 pending>, # #<Concurrent::Promises::Future:0x000035 pending>] # Some statics are collected in crawler_data_counter # and @data_processing_counters. # Schedule a periodical readout to a log. def readout(crawler_data_counter) # schedule readout in 0.4 sec or on cancellation (@cancellation.origin | Concurrent::Promises.schedule(0.4)).then do log INFO, "\ncrawlers found: #{crawler_data_counter.map(&:value).join(', ')}\n" + "data processors consumed: #{@data_processing_counters.map(&:value).join(', ')}" end.then do # reschedule if not cancelled readout crawler_data_counter unless @cancellation.canceled? end end # => :readout # start the periodical readouts readouts = readout(crawler_data_counter).run # => #<Concurrent::Promises::Future:0x000036 pending> sleep 2 # let the whole processing pipeline work # cancel everything origin.resolve # => #<Concurrent::Promises::ResolvableEvent:0x000037 resolved> # wait for everything to stop crawlers.each(&:join) # => [#<Thread:0x000009@medium-example.in.rb:130 dead>, # #<Thread:0x00000a@medium-example.in.rb:130 dead>, # #<Thread:0x00000b@medium-example.in.rb:130 dead>, # #<Thread:0x00000c@medium-example.in.rb:130 dead>] data_processors.each(&:wait!)[0..10] # => [#<Concurrent::Promises::Future:0x000022 fulfilled with :done>, # #<Concurrent::Promises::Future:0x000023 fulfilled with :done>, # #<Concurrent::Promises::Future:0x000024 fulfilled with :done>, # #<Concurrent::Promises::Future:0x000025 fulfilled with :done>, # #<Concurrent::Promises::Future:0x000026 fulfilled with :done>, # #<Concurrent::Promises::Future:0x000027 fulfilled with :done>, # #<Concurrent::Promises::Future:0x000028 fulfilled with :done>, # #<Concurrent::Promises::Future:0x000029 fulfilled with :done>, # #<Concurrent::Promises::Future:0x00002a fulfilled with :done>, # #<Concurrent::Promises::Future:0x00002b fulfilled with :done>, # #<Concurrent::Promises::Future:0x00002c fulfilled with :done>] readouts.wait! # => #<Concurrent::Promises::Future:0x000036 fulfilled with nil> # terminate the logger Concurrent::ErlangActor.terminate LOGGING, :cancelled # => true LOGGING.terminated.wait # => #<Concurrent::Promises::Future:0x000038 rejected with :cancelled> # inspect collected char frequencies DB.data # => {"1"=>18, # "2"=>18, # "3"=>18, # "4"=>18, # "5"=>6, # "6"=>1, # "7"=>1, # "8"=>1, # "9"=>1, # "a"=>1, # "b"=>1, # "c"=>1, # "d"=>1, # "e"=>1, # "f"=>1} # see the logger output get_captured_output # => "[2024-10-19 18:19:02.736] DEBUG -- : crawler 1 found 1\n" + # "[2024-10-19 18:19:02.737] DEBUG -- : crawler 0 found 2\n" + # "[2024-10-19 18:19:02.737] DEBUG -- : data-processor 1 got 2\n" + # "[2024-10-19 18:19:02.737] DEBUG -- : data-processor 0 got 1\n" + # "[2024-10-19 18:19:02.737] DEBUG -- : crawler 2 found 3\n" + # "[2024-10-19 18:19:02.738] DEBUG -- : crawler 3 found 4\n" + # "[2024-10-19 18:19:02.738] DEBUG -- : data-processor 2 got 3\n" + # "[2024-10-19 18:19:02.738] DEBUG -- : data-processor 3 got 4\n" + # "[2024-10-19 18:19:02.746] DEBUG -- : crawler 1 found 5\n" + # "[2024-10-19 18:19:02.746] DEBUG -- : crawler 0 found 6\n" + # "[2024-10-19 18:19:02.747] DEBUG -- : crawler 2 found 7\n" + # "[2024-10-19 18:19:02.747] DEBUG -- : crawler 3 found 8\n" + # "[2024-10-19 18:19:02.756] DEBUG -- : crawler 1 found 9\n" + # "[2024-10-19 18:19:02.757] DEBUG -- : crawler 0 found a\n" + # "[2024-10-19 18:19:02.757] DEBUG -- : crawler 2 found b\n" + # "[2024-10-19 18:19:02.757] DEBUG -- : crawler 3 found c\n" + # "[2024-10-19 18:19:02.766] DEBUG -- : crawler 1 found d\n" + # "[2024-10-19 18:19:02.767] DEBUG -- : crawler 0 found e\n" + # "[2024-10-19 18:19:02.767] DEBUG -- : crawler 2 found f\n" + # "[2024-10-19 18:19:02.767] DEBUG -- : crawler 3 found 10\n" + # "[2024-10-19 18:19:02.777] DEBUG -- : crawler 1 found 11\n" + # "[2024-10-19 18:19:02.777] DEBUG -- : crawler 0 found 12\n" + # "[2024-10-19 18:19:02.777] DEBUG -- : crawler 2 found 13\n" + # "[2024-10-19 18:19:02.777] DEBUG -- : crawler 3 found 14\n" + # "[2024-10-19 18:19:02.787] DEBUG -- : crawler 1 found 15\n" + # "[2024-10-19 18:19:02.787] DEBUG -- : crawler 0 found 16\n" + # "[2024-10-19 18:19:02.787] DEBUG -- : crawler 2 found 17\n" + # "[2024-10-19 18:19:02.788] DEBUG -- : crawler 3 found 18\n" + # "[2024-10-19 18:19:02.797] DEBUG -- : crawler 1 found 19\n" + # "[2024-10-19 18:19:02.797] DEBUG -- : crawler 0 found 1a\n" + # "[2024-10-19 18:19:02.797] DEBUG -- : crawler 2 found 1b\n" + # "[2024-10-19 18:19:02.798] DEBUG -- : crawler 3 found 1c\n" + # "[2024-10-19 18:19:02.807] DEBUG -- : crawler 1 found 1d\n" + # "[2024-10-19 18:19:02.807] DEBUG -- : crawler 0 found 1e\n" + # "[2024-10-19 18:19:02.837] DEBUG -- : data-processor 4 got 5\n" + # "[2024-10-19 18:19:02.837] DEBUG -- : data-processor 5 got 6\n" + # "[2024-10-19 18:19:02.837] DEBUG -- : data-processor 6 got 7\n" + # "[2024-10-19 18:19:02.838] DEBUG -- : data-processor 7 got 8\n" + # "[2024-10-19 18:19:02.937] DEBUG -- : data-processor 8 got 9\n" + # "[2024-10-19 18:19:02.937] DEBUG -- : data-processor 9 got a\n" + # "[2024-10-19 18:19:02.938] DEBUG -- : data-processor 10 got b\n" + # "[2024-10-19 18:19:02.938] DEBUG -- : data-processor 11 got c\n" + # "[2024-10-19 18:19:03.037] DEBUG -- : data-processor 12 got d\n" + # "[2024-10-19 18:19:03.038] DEBUG -- : data-processor 13 got e\n" + # "[2024-10-19 18:19:03.038] DEBUG -- : data-processor 14 got f\n" + # "[2024-10-19 18:19:03.038] DEBUG -- : data-processor 15 got 10\n" + # "[2024-10-19 18:19:03.126] INFO -- : \n" + # "crawlers found: 8, 8, 7, 7\n" + # "data processors consumed: 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0\n" + # "[2024-10-19 18:19:03.137] DEBUG -- : data-processor 16 got 11\n" + # "[2024-10-19 18:19:03.138] DEBUG -- : data-processor 17 got 12\n" + # "[2024-10-19 18:19:03.138] DEBUG -- : data-processor 18 got 13\n" + # "[2024-10-19 18:19:03.138] DEBUG -- : data-processor 19 got 14\n" + # "[2024-10-19 18:19:03.240] DEBUG -- : data-processor 0 got 15\n" + # "[2024-10-19 18:19:03.240] DEBUG -- : crawler 2 found 1f\n" + # "[2024-10-19 18:19:03.240] DEBUG -- : crawler 3 found 20\n" + # "[2024-10-19 18:19:03.240] DEBUG -- : data-processor 1 got 16\n" + # "[2024-10-19 18:19:03.240] DEBUG -- : crawler 1 found 21\n" + # "[2024-10-19 18:19:03.240] DEBUG -- : crawler 0 found 22\n" + # "[2024-10-19 18:19:03.241] DEBUG -- : data-processor 2 got 17\n" + # "[2024-10-19 18:19:03.241] DEBUG -- : data-processor 3 got 18\n" + # "[2024-10-19 18:19:03.249] DEBUG -- : crawler 2 found 23\n" + # "[2024-10-19 18:19:03.249] DEBUG -- : crawler 3 found 24\n" + # "[2024-10-19 18:19:03.249] DEBUG -- : crawler 1 found 25\n" + # "[2024-10-19 18:19:03.250] DEBUG -- : crawler 0 found 26\n" + # "[2024-10-19 18:19:03.259] DEBUG -- : crawler 2 found 27\n" + # "[2024-10-19 18:19:03.259] DEBUG -- : crawler 3 found 28\n" + # "[2024-10-19 18:19:03.260] DEBUG -- : crawler 1 found 29\n" + # "[2024-10-19 18:19:03.260] DEBUG -- : crawler 0 found 2a\n" + # "[2024-10-19 18:19:03.269] DEBUG -- : crawler 2 found 2b\n" + # "[2024-10-19 18:19:03.269] DEBUG -- : crawler 3 found 2c\n" + # "[2024-10-19 18:19:03.270] DEBUG -- : crawler 1 found 2d\n" + # "[2024-10-19 18:19:03.270] DEBUG -- : crawler 0 found 2e\n" + # "[2024-10-19 18:19:03.339] DEBUG -- : data-processor 4 got 19\n" + # "[2024-10-19 18:19:03.340] DEBUG -- : data-processor 5 got 1a\n" + # "[2024-10-19 18:19:03.340] DEBUG -- : data-processor 6 got 1b\n" + # "[2024-10-19 18:19:03.340] DEBUG -- : data-processor 7 got 1c\n" + # "[2024-10-19 18:19:03.439] DEBUG -- : data-processor 8 got 1d\n" + # "[2024-10-19 18:19:03.440] DEBUG -- : data-processor 9 got 1e\n" + # "[2024-10-19 18:19:03.440] DEBUG -- : data-processor 10 got 1f\n" + # "[2024-10-19 18:19:03.441] DEBUG -- : data-processor 11 got 20\n" + # "[2024-10-19 18:19:03.526] INFO -- : \n" + # "crawlers found: 12, 12, 11, 11\n" + # "data processors consumed: 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1\n" + # "[2024-10-19 18:19:03.540] DEBUG -- : data-processor 12 got 21\n" + # "[2024-10-19 18:19:03.541] DEBUG -- : data-processor 13 got 22\n" + # "[2024-10-19 18:19:03.541] DEBUG -- : data-processor 14 got 23\n" + # "[2024-10-19 18:19:03.541] DEBUG -- : data-processor 15 got 24\n" + # "[2024-10-19 18:19:03.541] DEBUG -- : crawler 2 found 2f\n" + # "[2024-10-19 18:19:03.542] DEBUG -- : crawler 3 found 30\n" + # "[2024-10-19 18:19:03.542] DEBUG -- : crawler 1 found 31\n" + # "[2024-10-19 18:19:03.551] DEBUG -- : crawler 1 found 33\n" + # "[2024-10-19 18:19:03.641] DEBUG -- : data-processor 16 got 25\n" + # "[2024-10-19 18:19:03.641] DEBUG -- : crawler 3 found 35\n" + # "[2024-10-19 18:19:03.642] DEBUG -- : data-processor 17 got 26\n" + # "[2024-10-19 18:19:03.642] DEBUG -- : crawler 2 found 34\n" + # "[2024-10-19 18:19:03.642] DEBUG -- : crawler 1 found 36\n" + # "[2024-10-19 18:19:03.644] DEBUG -- : crawler 0 found 32\n" + # "[2024-10-19 18:19:03.644] DEBUG -- : data-processor 18 got 27\n" + # "[2024-10-19 18:19:03.645] DEBUG -- : data-processor 19 got 28\n" + # "[2024-10-19 18:19:03.651] DEBUG -- : crawler 3 found 37\n" + # "[2024-10-19 18:19:03.651] DEBUG -- : crawler 2 found 38\n" + # "[2024-10-19 18:19:03.652] DEBUG -- : crawler 1 found 39\n" + # "[2024-10-19 18:19:03.652] DEBUG -- : crawler 0 found 3a\n" + # "[2024-10-19 18:19:03.661] DEBUG -- : crawler 3 found 3b\n" + # "[2024-10-19 18:19:03.662] DEBUG -- : crawler 2 found 3c\n" + # "[2024-10-19 18:19:03.742] DEBUG -- : data-processor 0 got 29\n" + # "[2024-10-19 18:19:03.742] DEBUG -- : crawler 1 found 3d\n" + # "[2024-10-19 18:19:03.742] DEBUG -- : data-processor 1 got 2a\n" + # "[2024-10-19 18:19:03.742] DEBUG -- : crawler 0 found 3e\n" + # "[2024-10-19 18:19:03.743] DEBUG -- : data-processor 2 got 2b\n" + # "[2024-10-19 18:19:03.743] DEBUG -- : data-processor 3 got 2c\n" + # "[2024-10-19 18:19:03.842] DEBUG -- : data-processor 4 got 2d\n" + # "[2024-10-19 18:19:03.842] DEBUG -- : data-processor 5 got 2e\n" + # "[2024-10-19 18:19:03.842] DEBUG -- : data-processor 6 got 2f\n" + # "[2024-10-19 18:19:03.842] DEBUG -- : data-processor 7 got 30\n" + # "[2024-10-19 18:19:03.927] INFO -- : \n" + # "crawlers found: 15, 17, 15, 15\n" + # "data processors consumed: 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2\n" + # "[2024-10-19 18:19:03.942] DEBUG -- : data-processor 8 got 31\n" + # "[2024-10-19 18:19:03.943] DEBUG -- : data-processor 9 got 33\n" + # "[2024-10-19 18:19:03.943] DEBUG -- : data-processor 10 got 35\n" + # "[2024-10-19 18:19:03.943] DEBUG -- : data-processor 11 got 34\n" + # "[2024-10-19 18:19:03.943] DEBUG -- : crawler 1 found 41\n" + # "[2024-10-19 18:19:03.943] DEBUG -- : crawler 3 found 3f\n" + # "[2024-10-19 18:19:03.944] DEBUG -- : crawler 2 found 40\n" + # "[2024-10-19 18:19:03.944] DEBUG -- : crawler 0 found 42\n" + # "[2024-10-19 18:19:04.043] DEBUG -- : data-processor 12 got 36\n" + # "[2024-10-19 18:19:04.043] DEBUG -- : crawler 2 found 43\n" + # "[2024-10-19 18:19:04.043] DEBUG -- : data-processor 13 got 32\n" + # "[2024-10-19 18:19:04.044] DEBUG -- : crawler 1 found 44\n" + # "[2024-10-19 18:19:04.044] DEBUG -- : crawler 0 found 45\n" + # "[2024-10-19 18:19:04.044] DEBUG -- : crawler 3 found 46\n" + # "[2024-10-19 18:19:04.044] DEBUG -- : data-processor 14 got 37\n" + # "[2024-10-19 18:19:04.044] DEBUG -- : data-processor 15 got 38\n" + # "[2024-10-19 18:19:04.053] DEBUG -- : crawler 2 found 47\n" + # "[2024-10-19 18:19:04.053] DEBUG -- : crawler 1 found 48\n" + # "[2024-10-19 18:19:04.053] DEBUG -- : crawler 0 found 49\n" + # "[2024-10-19 18:19:04.054] DEBUG -- : crawler 3 found 4a\n" + # "[2024-10-19 18:19:04.063] DEBUG -- : crawler 2 found 4b\n" + # "[2024-10-19 18:19:04.063] DEBUG -- : crawler 1 found 4c\n" + # "[2024-10-19 18:19:04.144] DEBUG -- : data-processor 16 got 39\n" + # "[2024-10-19 18:19:04.144] DEBUG -- : crawler 0 found 4d\n" + # "[2024-10-19 18:19:04.144] DEBUG -- : data-processor 17 got 3a\n" + # "[2024-10-19 18:19:04.144] DEBUG -- : crawler 3 found 4e\n" + # "[2024-10-19 18:19:04.144] DEBUG -- : data-processor 18 got 3b\n" + # "[2024-10-19 18:19:04.145] DEBUG -- : data-processor 19 got 3c\n" + # "[2024-10-19 18:19:04.244] DEBUG -- : data-processor 0 got 3d\n" + # "[2024-10-19 18:19:04.244] DEBUG -- : data-processor 1 got 3e\n" + # "[2024-10-19 18:19:04.245] DEBUG -- : data-processor 2 got 41\n" + # "[2024-10-19 18:19:04.245] DEBUG -- : data-processor 3 got 3f\n" + # "[2024-10-19 18:19:04.328] INFO -- : \n" + # "crawlers found: 19, 21, 19, 19\n" + # "data processors consumed: 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3\n" + # "[2024-10-19 18:19:04.344] DEBUG -- : data-processor 4 got 40\n" + # "[2024-10-19 18:19:04.346] DEBUG -- : data-processor 5 got 42\n" + # "[2024-10-19 18:19:04.346] DEBUG -- : data-processor 6 got 43\n" + # "[2024-10-19 18:19:04.346] DEBUG -- : crawler 0 found 51\n" + # "[2024-10-19 18:19:04.347] DEBUG -- : data-processor 7 got 44\n" + # "[2024-10-19 18:19:04.347] DEBUG -- : crawler 2 found 4f\n" + # "[2024-10-19 18:19:04.347] DEBUG -- : crawler 1 found 50\n" + # "[2024-10-19 18:19:04.347] DEBUG -- : crawler 3 found 52\n" + # "[2024-10-19 18:19:04.446] DEBUG -- : data-processor 8 got 45\n" + # "[2024-10-19 18:19:04.447] DEBUG -- : crawler 0 found 53\n" + # "[2024-10-19 18:19:04.447] DEBUG -- : data-processor 9 got 46\n" + # "[2024-10-19 18:19:04.447] DEBUG -- : crawler 2 found 54\n" + # "[2024-10-19 18:19:04.448] DEBUG -- : crawler 1 found 55\n" + # "[2024-10-19 18:19:04.448] DEBUG -- : crawler 3 found 56\n" + # "[2024-10-19 18:19:04.448] DEBUG -- : data-processor 10 got 47\n" + # "[2024-10-19 18:19:04.449] DEBUG -- : data-processor 11 got 48\n" + # "[2024-10-19 18:19:04.456] DEBUG -- : crawler 0 found 57\n" + # "[2024-10-19 18:19:04.457] DEBUG -- : crawler 2 found 58\n" + # "[2024-10-19 18:19:04.457] DEBUG -- : crawler 1 found 59\n" + # "[2024-10-19 18:19:04.457] DEBUG -- : crawler 3 found 5a\n" + # "[2024-10-19 18:19:04.467] DEBUG -- : crawler 0 found 5b\n" + # "[2024-10-19 18:19:04.467] DEBUG -- : crawler 2 found 5c\n" + # "[2024-10-19 18:19:04.547] DEBUG -- : data-processor 12 got 49\n" + # "[2024-10-19 18:19:04.548] DEBUG -- : crawler 1 found 5d\n" + # "[2024-10-19 18:19:04.548] DEBUG -- : data-processor 13 got 4a\n" + # "[2024-10-19 18:19:04.548] DEBUG -- : crawler 3 found 5e\n" + # "[2024-10-19 18:19:04.548] DEBUG -- : data-processor 14 got 4b\n" + # "[2024-10-19 18:19:04.549] DEBUG -- : data-processor 15 got 4c\n" + # "[2024-10-19 18:19:04.647] DEBUG -- : data-processor 16 got 4d\n" + # "[2024-10-19 18:19:04.648] DEBUG -- : data-processor 18 got 4e\n" + # "[2024-10-19 18:19:04.648] DEBUG -- : data-processor 19 got 51\n" + # "[2024-10-19 18:19:04.648] DEBUG -- : data-processor 17 got 4f\n" + # "[2024-10-19 18:19:04.729] INFO -- : \n" + # "crawlers found: 23, 25, 23, 23\n" + # "data processors consumed: 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4\n" + # "[2024-10-19 18:19:04.747] DEBUG -- : data-processor 0 got 50\n" + # "[2024-10-19 18:19:04.748] DEBUG -- : data-processor 1 got 52\n" + # "[2024-10-19 18:19:04.749] DEBUG -- : data-processor 2 got 53\n" + # "[2024-10-19 18:19:04.749] DEBUG -- : data-processor 3 got 54\n" + # "[2024-10-19 18:19:04.757] DEBUG -- : crawler 1 found 61\n" + # "[2024-10-19 18:19:04.758] DEBUG -- : crawler 3 found 62\n" + # "[2024-10-19 18:19:04.777] DEBUG -- : crawler 0 found 5f\n" + # "[2024-10-19 18:19:04.778] DEBUG -- : crawler 2 found 60\n"