about summary refs log tree commit diff stats
path: root/furries.cpp
diff options
context:
space:
mode:
authorKelly Rauchenberger <fefferburbia@gmail.com>2017-02-04 10:32:55 -0500
committerKelly Rauchenberger <fefferburbia@gmail.com>2017-02-04 10:32:55 -0500
commit8c2e7c646f2a549ea9b4db831d8088f57b3287ae (patch)
treefa9070e88a34284c6089b2939ea1d1ca6bef147e /furries.cpp
parentcb77fded0b9a8a9034ace592be04176c8778ddca (diff)
downloadfurries-8c2e7c646f2a549ea9b4db831d8088f57b3287ae.tar.gz
furries-8c2e7c646f2a549ea9b4db831d8088f57b3287ae.tar.bz2
furries-8c2e7c646f2a549ea9b4db831d8088f57b3287ae.zip
Updated verbly (new API)
Notably, the bot should not be able to use ethnic slurs now.

sentence.cpp is basically just copied from advice.
Diffstat (limited to 'furries.cpp')
-rw-r--r--furries.cpp460
1 files changed, 16 insertions, 444 deletions
diff --git a/furries.cpp b/furries.cpp index de2fa02..3f9c76d 100644 --- a/furries.cpp +++ b/furries.cpp
@@ -6,429 +6,7 @@
6#include <chrono> 6#include <chrono>
7#include <thread> 7#include <thread>
8#include <random> 8#include <random>
9 9#include "sentence.h"
10class fill_blanks {
11 private:
12 verbly::data& database;
13
14 public:
15 fill_blanks(verbly::data& database) : database(database)
16 {
17
18 }
19
20 verbly::filter<verbly::noun> parse_selrestrs(verbly::frame::selrestr selrestr)
21 {
22 switch (selrestr.get_type())
23 {
24 case verbly::frame::selrestr::type::empty:
25 {
26 return verbly::filter<verbly::noun>{};
27 }
28
29 case verbly::frame::selrestr::type::singleton:
30 {
31 verbly::noun n;
32
33 if (selrestr.get_restriction() == "concrete")
34 {
35 n = database.nouns().with_singular_form("physical entity").limit(1).run().front();
36 } else if (selrestr.get_restriction() == "time")
37 {
38 n = database.nouns().with_singular_form("time").limit(1).run().front();
39 } else if (selrestr.get_restriction() == "state")
40 {
41 n = database.nouns().with_singular_form("state").limit(1).run().front();
42 } else if (selrestr.get_restriction() == "abstract")
43 {
44 n = database.nouns().with_singular_form("abstract entity").limit(1).run().front();
45 } else if (selrestr.get_restriction() == "time")
46 {
47 n = database.nouns().with_singular_form("time").limit(1).run().front();
48 } else if (selrestr.get_restriction() == "scalar")
49 {
50 n = database.nouns().with_singular_form("number").limit(1).run().front();
51 } else if (selrestr.get_restriction() == "currency")
52 {
53 auto nn2 = database.nouns().with_singular_form("currency").limit(2).run();
54 std::vector<verbly::noun> nn(std::begin(nn2), std::end(nn2));
55 n = nn[1];
56 } else if (selrestr.get_restriction() == "location")
57 {
58 n = database.nouns().with_singular_form("location").limit(1).run().front();
59 } else if (selrestr.get_restriction() == "organization")
60 {
61 n = database.nouns().with_singular_form("organization").limit(1).run().front();
62 } else if (selrestr.get_restriction() == "int_control")
63 {
64 n = database.nouns().with_singular_form("causal agent").limit(1).run().front();
65 } else if (selrestr.get_restriction() == "natural")
66 {
67 n = database.nouns().with_singular_form("natural object").limit(1).run().front();
68 } else if (selrestr.get_restriction() == "phys_obj")
69 {
70 n = database.nouns().with_singular_form("physical object").limit(1).run().front();
71 } else if (selrestr.get_restriction() == "solid")
72 {
73 n = database.nouns().with_singular_form("solid").limit(1).run().front();
74 } else if (selrestr.get_restriction() == "shape")
75 {
76 n = database.nouns().with_singular_form("shape").limit(1).run().front();
77 } else if (selrestr.get_restriction() == "substance")
78 {
79 n = database.nouns().with_singular_form("substance").limit(1).run().front();
80 } else if (selrestr.get_restriction() == "idea")
81 {
82 n = database.nouns().with_singular_form("idea").limit(1).run().front();
83 } else if (selrestr.get_restriction() == "sound")
84 {
85 auto nn2 = database.nouns().with_singular_form("sound").limit(4).run();
86 std::vector<verbly::noun> nn(std::begin(nn2), std::end(nn2));
87 n = nn[3];
88 } else if (selrestr.get_restriction() == "communication")
89 {
90 n = database.nouns().with_singular_form("communication").limit(1).run().front();
91 } else if (selrestr.get_restriction() == "region")
92 {
93 n = database.nouns().with_singular_form("region").limit(1).run().front();
94 } else if (selrestr.get_restriction() == "place")
95 {
96 n = database.nouns().with_singular_form("place").limit(1).run().front();
97 } else if (selrestr.get_restriction() == "machine")
98 {
99 n = database.nouns().with_singular_form("machine").limit(1).run().front();
100 } else if (selrestr.get_restriction() == "animate")
101 {
102 n = database.nouns().with_singular_form("animate being").limit(1).run().front();
103 } else if (selrestr.get_restriction() == "plant")
104 {
105 auto nn2 = database.nouns().with_singular_form("plant").limit(2).run();
106 std::vector<verbly::noun> nn(std::begin(nn2), std::end(nn2));
107 n = nn[1];
108 } else if (selrestr.get_restriction() == "comestible")
109 {
110 n = database.nouns().with_singular_form("food").limit(1).run().front();
111 } else if (selrestr.get_restriction() == "artifact")
112 {
113 n = database.nouns().with_singular_form("artifact").limit(1).run().front();
114 } else if (selrestr.get_restriction() == "vehicle")
115 {
116 n = database.nouns().with_singular_form("vehicle").limit(1).run().front();
117 } else if (selrestr.get_restriction() == "human")
118 {
119 n = database.nouns().with_singular_form("person").limit(1).run().front();
120 } else if (selrestr.get_restriction() == "animal")
121 {
122 n = database.nouns().with_singular_form("animal").limit(1).run().front();
123 } else if (selrestr.get_restriction() == "body_part")
124 {
125 n = database.nouns().with_singular_form("body part").limit(1).run().front();
126 } else if (selrestr.get_restriction() == "garment")
127 {
128 n = database.nouns().with_singular_form("clothing").limit(1).run().front();
129 } else if (selrestr.get_restriction() == "tool")
130 {
131 n = database.nouns().with_singular_form("tool").limit(1).run().front();
132 } else {
133 return verbly::filter<verbly::noun>{};
134 }
135
136 return verbly::filter<verbly::noun>{n, !selrestr.get_pos()};
137 }
138
139 case verbly::frame::selrestr::type::group:
140 {
141 verbly::filter<verbly::noun> ret;
142 ret.set_orlogic(selrestr.get_orlogic());
143
144 std::transform(std::begin(selrestr), std::end(selrestr), std::back_inserter(ret), [&] (verbly::frame::selrestr sr) {
145 return parse_selrestrs(sr);
146 });
147
148 return ret;
149 }
150 }
151 }
152
153 template <typename RNG>
154 void visit(verbly::token& it, RNG&& rng)
155 {
156 switch (it.get_type())
157 {
158 case verbly::token::type::utterance:
159 {
160 for (auto& tkn : it)
161 {
162 if (!tkn.is_complete())
163 {
164 visit(tkn, rng);
165
166 break;
167 }
168 }
169
170 break;
171 }
172
173 case verbly::token::type::fillin:
174 {
175 switch (it.get_fillin_type())
176 {
177 case verbly::token::fillin_type::participle_phrase:
178 {
179 for (;;)
180 {
181 verbly::verb v = database.verbs().has_frames().random().limit(1).run().front();
182 auto frames = v.frames().run();
183 std::vector<verbly::frame> filtered;
184 std::remove_copy_if(std::begin(frames), std::end(frames), std::back_inserter(filtered), [] (verbly::frame& f) {
185 if (f.parts().size() < 2)
186 {
187 return true;
188 }
189
190 if (f.parts()[0].get_type() != verbly::frame::part::type::noun_phrase)
191 {
192 return true;
193 }
194
195 if (f.parts()[0].get_role() != "Agent")
196 {
197 return true;
198 }
199
200 if (f.parts()[1].get_type() != verbly::frame::part::type::verb)
201 {
202 return true;
203 }
204
205 return false;
206 });
207
208 if (filtered.empty())
209 {
210 continue;
211 }
212
213 int fr_i = std::uniform_int_distribution<int>(0, filtered.size()-1)(rng);
214 verbly::frame fr = filtered[fr_i];
215 verbly::token utter;
216 for (auto part : fr.parts())
217 {
218 switch (part.get_type())
219 {
220 case verbly::frame::part::type::noun_phrase:
221 {
222 if (part.get_role() == "Agent")
223 {
224 continue;
225 }
226
227 if (part.get_synrestrs().count("adjp") == 1)
228 {
229 utter << verbly::token{verbly::token::fillin_type::adjective_phrase};
230
231 continue;
232 } else if ((part.get_synrestrs().count("be_sc_ing") == 1)
233 || (part.get_synrestrs().count("ac_ing") == 1)
234 || (part.get_synrestrs().count("sc_ing") == 1)
235 || (part.get_synrestrs().count("np_omit_ing") == 1)
236 || (part.get_synrestrs().count("oc_ing") == 1))
237 {
238 utter << verbly::token{verbly::token::fillin_type::participle_phrase};
239
240 continue;
241 } else if ((part.get_synrestrs().count("poss_ing") == 1)
242 || (part.get_synrestrs().count("possing") == 1)
243 || (part.get_synrestrs().count("pos_ing") == 1))
244 {
245 utter << verbly::token{"their"};
246 utter << verbly::token{verbly::token::fillin_type::participle_phrase};
247
248 continue;
249 } else if (part.get_synrestrs().count("genitive") == 1)
250 {
251 utter << verbly::token{"their"};
252
253 continue;
254 } else if (part.get_synrestrs().count("adv_loc") == 1)
255 {
256 if (std::bernoulli_distribution(1.0/2.0)(rng))
257 {
258 utter << verbly::token{"here"};
259 } else {
260 utter << verbly::token{"there"};
261 }
262
263 continue;
264 } else if (part.get_synrestrs().count("refl") == 1)
265 {
266 utter << verbly::token{"themselves"};
267
268 continue;
269 } else if ((part.get_synrestrs().count("sc_to_inf") == 1)
270 || (part.get_synrestrs().count("ac_to_inf") == 1)
271 || (part.get_synrestrs().count("vc_to_inf") == 1)
272 || (part.get_synrestrs().count("rs_to_inf") == 1)
273 || (part.get_synrestrs().count("oc_to_inf") == 1))
274 {
275 utter << verbly::token{verbly::token::fillin_type::infinitive_phrase};
276
277 continue;
278 } else if (part.get_synrestrs().count("oc_bare_inf") == 1)
279 {
280 verbly::token tkn{verbly::token::fillin_type::infinitive_phrase};
281 tkn.set_extra(1);
282
283 utter << tkn;
284
285 continue;
286 }
287
288 auto selrestrs = fr.roles()[part.get_role()];
289 auto query = database.nouns().limit(1).random().is_not_proper().full_hyponym_of(parse_selrestrs(selrestrs));
290 verbly::noun n = query.run().front();
291 if ((std::bernoulli_distribution(1.0/2.0)(rng)) && (part.get_synrestrs().count("definite") == 0))
292 {
293 utter << verbly::token{"the"};
294 } else {
295 if (n.starts_with_vowel_sound())
296 {
297 utter << verbly::token{"an"};
298 } else {
299 utter << verbly::token{"a"};
300 }
301 }
302
303 if (part.get_synrestrs().count("plural") == 1)
304 {
305 utter << verbly::token{n, verbly::token::noun_inflection::plural};
306 } else {
307 utter << verbly::token{n};
308 }
309
310 if (part.get_synrestrs().count("acc_ing") == 1)
311 {
312 utter << verbly::token{verbly::token::fillin_type::participle_phrase};
313 }
314
315 break;
316 }
317
318 case verbly::frame::part::type::verb:
319 {
320 utter << verbly::token{v, verbly::token::verb_inflection::ing_form};
321
322 break;
323 }
324
325 case verbly::frame::part::type::literal_preposition:
326 {
327 int ch_i = std::uniform_int_distribution<int>(0, part.get_choices().size()-1)(rng);
328 utter << verbly::token{part.get_choices()[ch_i]};
329
330 break;
331 }
332
333 case verbly::frame::part::type::selection_preposition:
334 {
335 auto query = database.prepositions();
336 for (auto preprestr : part.get_preprestrs())
337 {
338 query.in_group(preprestr);
339 }
340 utter << verbly::token{query.random().limit(1).run().front()};
341
342 break;
343 }
344
345 case verbly::frame::part::type::adjective:
346 {
347 utter << verbly::token{verbly::token::fillin_type::adjective_phrase};
348
349 break;
350 }
351
352 case verbly::frame::part::type::adverb:
353 {
354 utter << verbly::token{verbly::token::fillin_type::adverb_phrase};
355
356 break;
357 }
358
359 case verbly::frame::part::type::literal:
360 {
361 utter << verbly::token{part.get_literal()};
362
363 break;
364 }
365 }
366 }
367
368 it = utter;
369
370 break;
371 }
372
373 break;
374 }
375
376 case verbly::token::fillin_type::adjective_phrase:
377 {
378 verbly::token phrase;
379
380 if (std::bernoulli_distribution(1.0/4.0)(rng))
381 {
382 phrase << verbly::token{verbly::token::fillin_type::adverb_phrase};
383 }
384
385 if (std::bernoulli_distribution(1.0/2.0)(rng))
386 {
387 phrase << verbly::token{verbly::token::fillin_type::participle_phrase};
388 } else {
389 phrase << verbly::token{database.adjectives().random().limit(1).run().front()};
390 }
391
392 it = phrase;
393
394 break;
395 }
396
397 case verbly::token::fillin_type::adverb_phrase:
398 {
399 it = verbly::token{database.adverbs().random().limit(1).run().front()};
400
401 break;
402 }
403
404 case verbly::token::fillin_type::infinitive_phrase:
405 {
406 verbly::token utter;
407 if (it.get_extra() != 1)
408 {
409 utter << verbly::token{"to"};
410 }
411
412 utter << verbly::token{database.verbs().random().limit(1).run().front()};
413
414 it = utter;
415
416 break;
417 }
418
419 default:
420 {
421 it = verbly::token{"*the reality of the situation*"};
422
423 break;
424 }
425 }
426
427 break;
428 }
429 }
430 }
431};
432 10
433int main(int argc, char** argv) 11int main(int argc, char** argv)
434{ 12{
@@ -437,51 +15,45 @@ int main(int argc, char** argv)
437 std::cout << "usage: furries [configfile]" << std::endl; 15 std::cout << "usage: furries [configfile]" << std::endl;
438 return -1; 16 return -1;
439 } 17 }
440 18
441 std::string configfile(argv[1]); 19 std::string configfile(argv[1]);
442 YAML::Node config = YAML::LoadFile(configfile); 20 YAML::Node config = YAML::LoadFile(configfile);
443 21
444 std::random_device random_device; 22 std::random_device random_device;
445 std::mt19937 random_engine{random_device()}; 23 std::mt19937 random_engine{random_device()};
446 24
447 twitter::auth auth; 25 twitter::auth auth;
448 auth.setConsumerKey(config["consumer_key"].as<std::string>()); 26 auth.setConsumerKey(config["consumer_key"].as<std::string>());
449 auth.setConsumerSecret(config["consumer_secret"].as<std::string>()); 27 auth.setConsumerSecret(config["consumer_secret"].as<std::string>());
450 auth.setAccessKey(config["access_key"].as<std::string>()); 28 auth.setAccessKey(config["access_key"].as<std::string>());
451 auth.setAccessSecret(config["access_secret"].as<std::string>()); 29 auth.setAccessSecret(config["access_secret"].as<std::string>());
452 30
453 twitter::client client(auth); 31 twitter::client client(auth);
454 verbly::data database {config["verbly_datafile"].as<std::string>()}; 32 verbly::database database(config["verbly_datafile"].as<std::string>());
455 33 sentence generator(database, random_engine);
34
456 for (;;) 35 for (;;)
457 { 36 {
458 std::cout << "Generating tweet..." << std::endl; 37 std::cout << "Generating tweet..." << std::endl;
459 38
460 fill_blanks yeah {database}; 39 std::string result = generator.generate();
461 verbly::token action{
462 {"the furries are"},
463 {verbly::token::fillin_type::adjective_phrase}
464 };
465 while (!action.is_complete())
466 {
467 yeah.visit(action, random_engine);
468 }
469
470 std::string result = action.compile();
471 result.resize(140); 40 result.resize(140);
472 41
42 std::cout << result << std::endl;
43
473 try 44 try
474 { 45 {
475 client.updateStatus(result); 46 client.updateStatus(result);
476 47
477 std::cout << "Tweeted!" << std::endl; 48 std::cout << "Tweeted!" << std::endl;
49 std::cout << "Waiting..." << std::endl;
50
51 std::this_thread::sleep_for(std::chrono::hours(1));
52
53 std::cout << std::endl;
478 } catch (const twitter::twitter_error& e) 54 } catch (const twitter::twitter_error& e)
479 { 55 {
480 std::cout << "Twitter error: " << e.what() << std::endl; 56 std::cout << "Twitter error: " << e.what() << std::endl;
481 } 57 }
482
483 std::cout << "Waiting..." << std::endl;
484
485 std::this_thread::sleep_for(std::chrono::hours(1));
486 } 58 }
487} 59}