aboutsummaryrefslogtreecommitdiffstats
path: root/source/serial/tests/proof_of_concepts
diff options
context:
space:
mode:
Diffstat (limited to 'source/serial/tests/proof_of_concepts')
-rw-r--r--source/serial/tests/proof_of_concepts/mdc2250.cc1
-rw-r--r--source/serial/tests/proof_of_concepts/python_serial_test.py15
-rw-r--r--source/serial/tests/proof_of_concepts/tokenizer.cc31
3 files changed, 47 insertions, 0 deletions
diff --git a/source/serial/tests/proof_of_concepts/mdc2250.cc b/source/serial/tests/proof_of_concepts/mdc2250.cc
new file mode 100644
index 0000000..ff7ec1b
--- /dev/null
+++ b/source/serial/tests/proof_of_concepts/mdc2250.cc
@@ -0,0 +1 @@
+#include "" \ No newline at end of file
diff --git a/source/serial/tests/proof_of_concepts/python_serial_test.py b/source/serial/tests/proof_of_concepts/python_serial_test.py
new file mode 100644
index 0000000..6f92b84
--- /dev/null
+++ b/source/serial/tests/proof_of_concepts/python_serial_test.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+
+import serial, sys
+
+if len(sys.argv) != 2:
+ print "python: Usage_serial_test <port name like: /dev/ttyUSB0>"
+ sys.exit(1)
+
+sio = serial.Serial(sys.argv[1], 115200)
+sio.timeout = 250
+
+while True:
+ sio.write("Testing.")
+ print sio.read(8)
+
diff --git a/source/serial/tests/proof_of_concepts/tokenizer.cc b/source/serial/tests/proof_of_concepts/tokenizer.cc
new file mode 100644
index 0000000..da15a09
--- /dev/null
+++ b/source/serial/tests/proof_of_concepts/tokenizer.cc
@@ -0,0 +1,31 @@
+#include <iostream>
+#include <string>
+#include <vector>
+
+#include <boost/bind.hpp>
+#include <boost/function.hpp>
+#include <boost/algorithm/string.hpp>
+#include <boost/foreach.hpp>
+
+void
+_delimeter_tokenizer (std::string &data, std::vector<std::string> &tokens,
+ std::string delimeter)
+{
+ boost::split(tokens, data, boost::is_any_of(delimeter));
+}
+
+typedef boost::function<void(std::string&,std::vector<std::string>&)> TokenizerType;
+
+int main(void) {
+ std::string data = "a\rb\rc\r";
+ std::vector<std::string> tokens;
+ std::string delimeter = "\r";
+
+ TokenizerType f = boost::bind(_delimeter_tokenizer, _1, _2, delimeter);
+ f(data, tokens);
+
+ BOOST_FOREACH(std::string token, tokens)
+ std::cout << token << std::endl;
+
+ return 0;
+} \ No newline at end of file