flow = "tokenizer" docs = "DESCRIPTION.md" [[process]] # Read a single line of inputs text into a stringn source = "context://stdio/readline" [[process]] # Split a string on the space (" ") separator source = "lib://flowstdlib/data/split" input.separator = { always = " " } [[connection]] # Take the input string and pass it to splitter from = "readline/string" to = "split/string" [[connection]] # Loop back partial splits to be split further from = "split/partial" to = "split/string" [[process]] # Adder to track pending work alias = "work-counter" source = "lib://flowstdlib/math/add" input.i2 = { once = 1 } [[process]] # Adder to count tokens alias = "token-counter" source = "lib://flowstdlib/math/add" input.i2 = { once = 0 } [[connection]] # Pass the change in amount of work pending to the work counter from = "split/delta" to = "work-counter/i1" [[connection]] # Loop back the sum to accumulate the pending work count name = "pending-work" from = "work-counter" to = "work-counter/i2" [[connection]] # Count the tokens passing thru 'count' name = "tokens" from = "split/token-count" to = "token-counter/i1" [[connection]] # Loop back to count token total from = "token-counter" to = "token-counter/i2" [[process]] # A Tap to limit output of count until we are done source = "lib://flowstdlib/control/tap" [[connection]] # Pass the token count to the tap name = "token-count" from = "token-counter" to = "tap/data" [[process]] # compare pending work to zero source = "lib://flowstdlib/math/compare" input.left = { always = 0 } [[connection]] # compare pending work to zero name = "pending-work" from = "work-counter" to = "compare/right" [[connection]] # Open tap when we are done! name = "done" from = "compare/equal" to = "tap/control" [[process]] # stdout for showing text source = "context://stdio/stdout" [[connection]] # output the last token count when done name = "last-token-count" from = "tap" to = "stdout"