6 #include <unordered_map>
34 #include "rapidjson/document.h"
36 #include "rapidjson/error/en.h"
38 #include <sys/types.h>
68 bool debug_flag =
false;
74 void set_debug_flag() {
81 string getOSMBaseURL()
const {
82 if (sourceType ==
"local")
83 return "http://localhost:3000/";
85 return "http://bridges-data-server-osm.bridgesuncc.org/";
87 string getElevationBaseURL()
const {
88 if (sourceType ==
"local")
89 return "http://localhost:3000/";
91 return "http://bridges-data-server-elevation.bridgesuncc.org/";
93 string getGutenbergBaseURL()
const {
94 if (sourceType ==
"local")
95 return "http://localhost:3000/";
97 if (sourceType ==
"testing")
98 return "http://bridges-data-server-gutenberg-t.bridgesuncc.org/";
100 return "http://bridges-data-server-gutenberg.bridgesuncc.org/";
102 string getRedditURL() {
103 if (sourceType ==
"testing")
104 return "http://bridges-data-server-reddit-t.bridgesuncc.org";
105 else if (sourceType ==
"local")
106 return "http://localhost:9999";
108 return "http://bridges-data-server-reddit.bridgesuncc.org";
111 string sourceType =
"live";
113 string getUSCitiesURL() {
114 return "http://bridgesdata.herokuapp.com/api/us_cities";
117 string getWorldCitiesURL() {
118 return "http://bridgesdata.herokuapp.com/api/world_cities";
121 string getUSStateCountiesURL() {
122 return "http://bridgesdata.herokuapp.com/api/us_map?state=";
125 void defaultDebug() {
126 char* force = getenv(
"FORCE_BRIDGES_DATADEBUG");
127 if (force !=
nullptr)
134 : bridges_inst(br), my_cache(120) {
151 if ( !((type ==
"live") || (type ==
"testing") || (type ==
"local")))
152 throw "Incorrect data server type. Must be live, testing or local";
153 if ((type ==
"testing") || (type ==
"local"))
186 keys.insert(
"state");
187 keys.insert(
"country");
188 keys.insert(
"min_elev");
189 keys.insert(
"max_elev");
190 keys.insert(
"min_pop");
191 keys.insert(
"max_pop");
192 keys.insert(
"min_long");
193 keys.insert(
"max_long");
194 keys.insert(
"min_lat");
195 keys.insert(
"max_lat");
196 keys.insert(
"limit");
197 keys.insert(
"time_zone");
199 unordered_map<string, string>::iterator it;
200 for (it = params.begin(); it != params.end(); it++) {
201 if (keys.find(it->first) == keys.end())
202 throw std::invalid_argument (
"\n\nKey value : " + it->first +
203 " incorrect\n\n Legal key values: \n 'city', 'state', 'country', 'min_lat', 'max_lat', 'min_long', 'max_long', 'min_pop', 'max_pop', 'time_zone', 'limit' ");
206 string url = getUSCitiesURL() +
"?";
207 if (params.find(
"city") != params.end())
208 url +=
"city=" + params[
"city"] +
"&";
209 if (params.find(
"state") != params.end())
210 url +=
"state=" + params[
"state"] +
"&";
211 if (params.find(
"country") != params.end())
212 url +=
"country=" + params[
"country"] +
"&";
213 if (params.find(
"min_lat") != params.end())
214 url +=
"minLat=" + params[
"min_lat"] +
"&";
215 if (params.find(
"max_lat") != params.end())
216 url +=
"maxLat=" + params[
"max_lat"] +
"&";
217 if (params.find(
"min_long") != params.end())
218 url +=
"minLong=" + params[
"min_long"] +
"&";
219 if (params.find(
"max_long") != params.end())
220 url +=
"maxLong=" + params[
"max_long"] +
"&";
221 if (params.find(
"min_elev") != params.end())
222 url +=
"minElevation=" + params[
"min_elev"] +
"&";
223 if (params.find(
"max_elev") != params.end())
224 url +=
"maxElevation=" + params[
"max_elev"] +
"&";
225 if (params.find(
"min_pop") != params.end())
226 url +=
"minPopulation=" + params[
"min_pop"] +
"&";
227 if (params.find(
"maxPopulation") != params.end())
228 url +=
"max_pop=" + params[
"max_pop"] +
"&";
229 if (params.find(
"limit") != params.end())
230 url +=
"limit=" + params[
"limit"] +
"&";
233 url = url.substr(0, url.length() - 1);
236 using namespace rapidjson;
239 ServerComm::makeRequest(url, {
"Accept: application/json"}).c_str()
243 const Value& city_json = doc[
"data"];
244 vector<City> us_cities;
245 for (SizeType i = 0; i < city_json.Size(); i++) {
246 const Value& val = city_json[i];
247 us_cities.push_back (
249 val[
"city"].GetString(),
250 val[
"state"].GetString(),
251 val[
"country"].GetString(),
252 val[
"timezone"].GetString(),
253 val[
"elevation"].GetInt(),
254 val[
"population"].GetInt(),
255 val[
"lat"].GetDouble(),
256 val[
"lon"].GetDouble()
341 const vector<string> all_states = {
"Alabama",
"Alaska",
"Arizona",
"Arkansas",
"California",
"Colorado",
"Connecticut",
"Delaware",
"Florida",
"Georgia",
"Hawaii",
"Idaho",
"Illinois",
"Indiana",
"Iowa",
"Kansas",
"Kentucky",
"Louisiana",
"Maine",
"Maryland",
"Massachusetts",
"Michigan",
"Minnesota",
"Mississippi",
"Missouri",
"Montana",
"Nebraska",
"Nevada",
"New Hampshire",
"New Jersey",
"New Mexico",
"New York",
"North Carolina",
"North Dakota",
"Ohio",
"Oklahoma",
"Oregon",
"Pennsylvania",
"Rhode Island",
"South Carolina",
"South Dakota",
"Tennessee",
"Texas",
"Utah",
"Vermont",
"Virginia",
"Washington",
"West Virginia",
"Wisconsin",
"Wyoming"};
350 return getUSMapCountyData(all_states,
false);
361 return getUSMapCountyData(all_states,
true);
373 bool view_counties =
true) {
376 string url = getUSStateCountiesURL();
377 for (
auto& k : state_names)
378 url += ServerComm::encodeURLPart(k) +
',';
381 url = url.substr(0, url.size() - 1);
384 std::cerr <<
"Hitting: " << url << std::endl;
387 using namespace rapidjson;
390 ServerComm::makeRequest(url, {
"Accept: application/json"}).c_str()
395 vector<USState> states;
396 const Value& state_data = doc[
"data"];
397 for (SizeType i = 0; i < state_names.size(); i++) {
398 const Value& st = state_data[i];
399 const Value& county_data = st[
"counties"];
400 const Value& st_name = st[
"_id"][
"input"];
403 states.push_back(
USState(st_name.GetString()));
404 unordered_map<string, USCounty> counties = states[i].getCounties();
406 states[i].setViewCountiesFlag(view_counties);
410 for (SizeType j = 0; j < county_data.Size(); j++) {
411 const Value& val = county_data[j];
413 string geoid = (val[
"properties"][
"GEOID"]).GetString();
415 (val[
"properties"][
"FIPS_CODE"]).GetString(),
416 (val[
"properties"][
"COUNTY_STATE_CODE"]).GetString(),
417 (val[
"properties"][
"COUNTY_STATE_NAME"]).GetString()
423 states[i].setCounties(counties);
441 using namespace rapidjson;
446 ServerComm::makeRequest(
"http://bridgesdata.herokuapp.com/api/games",
447 {
"Accept: application/json"}).c_str());
449 const Value& D = d[
"data"];
450 vector<Game> wrapper;
451 for (SizeType i = 0; i < D.Size(); i++) {
452 const Value& V = D[i];
453 const Value& G = V[
"genre"];
455 vector<string> genre;
456 for (SizeType j = 0; j < G.Size(); j++) {
457 genre.push_back(G[j].GetString());
460 Game( V[
"game"].GetString(),
461 V[
"platform"].GetString(),
462 V[
"rating"].GetDouble(),
479 using namespace rapidjson;
481 vector<ActorMovieIMDB> wrapper;
482 string url =
"http://bridgesdata.herokuapp.com/api/imdb?limit=" +
486 d.Parse(ServerComm::makeRequest( url, {
"Accept: application/json"}).c_str());
488 const Value& D = d[
"data"];
489 for (SizeType i = 0; i < D.Size(); i++) {
490 const Value& V = D[i];
493 V[
"actor"].GetString(),
494 V[
"movie"].GetString()
512 using namespace rapidjson;
514 vector<ActorMovieIMDB> wrapper;
515 string url =
"http://bridgesdata.herokuapp.com/api/imdb2";
518 d.Parse(ServerComm::makeRequest( url, {
"Accept: application/json"}).c_str());
520 const Value& D = d[
"data"];
521 for (SizeType i = 0; i < D.Size(); i++) {
522 const Value& V = D[i];
523 string actor = V[
"actor"].GetString();
524 string movie = V[
"movie"].GetString();
525 double rating = V[
"rating"].GetDouble();
526 const Value& G = V[
"genres"];
527 vector<string> genres;
528 for (SizeType j = 0; j < G.Size(); j++) {
529 genres.push_back(G[j].GetString());
531 wrapper.push_back(
ActorMovieIMDB( actor, movie, (
float)rating, genres));
548 using namespace rapidjson;
550 vector<EarthquakeUSGS> wrapper;
552 d.Parse(ServerComm::makeRequest(
"http://earthquakes-uncc.herokuapp.com/eq",
553 {
"Accept: application/json"}).c_str());
554 for (SizeType i = 0; i < d.Size(); i++) {
555 const Value& V = d[i][
"properties"];
556 const Value& G = d[i][
"geometry"][
"coordinates"];
559 V[
"mag"].GetDouble(),
562 V[
"place"].GetString(),
563 V[
"title"].GetString(),
564 V[
"url"].GetString(),
565 V[
"time"].GetString() )
570 d.Parse(ServerComm::makeRequest(
"http://earthquakes-uncc.herokuapp.com/eq/latest/" +
571 to_string(number), {
"Accept: application/json"}).c_str());
573 const Value& D = d[
"Earthquakes"];
574 for (SizeType i = 0; i < D.Size(); i++) {
575 const Value& V = D[i][
"properties"];
576 const Value& G = D[i][
"geometry"][
"coordinates"];
580 V[
"mag"].GetDouble(),
583 V[
"place"].GetString(),
584 V[
"title"].GetString(),
585 V[
"url"].GetString(),
586 V[
"time"].GetString() )
608 bool textonly =
false) {
609 using namespace rapidjson;
611 vector<Shakespeare> wrapper;
613 string url =
"http://bridgesdata.herokuapp.com/api/shakespeare/";
615 if (type ==
"plays" || type ==
"poems")
618 url +=
"?format=simple";
621 d.Parse(ServerComm::makeRequest( url, {
"Accept: application/json"}).c_str());
623 const Value& D = d[
"data"];
624 for (SizeType i = 0; i < D.Size(); i++) {
625 const Value& V = D[i];
628 V[
"title"].GetString(),
629 V[
"type"].GetString(),
630 V[
"text"].GetString()
657 using namespace rapidjson;
660 vector<Song> wrapper;
661 string url =
"http://bridgesdata.herokuapp.com/api/songs/find/";
663 if (songTitle.size() > 0)
666 throw "Incorrect use of getSong. songTitle should be given.";
669 if (artistName.size() > 0)
670 url +=
"?artistName=" + artistName;
674 string::size_type n = 0;
675 while ( (n = url.find(
" ", n)) != string::npos) {
676 url.replace(n, 1,
"%20");
680 std::cerr <<
"url: " << url <<
"\n";
681 d.Parse(ServerComm::makeRequest( url, {
"Accept: application/json"}).c_str());
683 string artist = (d.HasMember(
"artist")) ?
684 d[
"artist"].GetString() : string();
685 string song = (d.HasMember(
"song")) ?
686 d[
"song"].GetString() : string();
687 string album = (d.HasMember(
"album")) ?
688 d[
"album"].GetString() : string();
689 string lyrics = (d.HasMember(
"lyrics")) ?
690 d[
"lyrics"].GetString() : string();
691 string release_date = (d.HasMember(
"release_date")) ?
692 d[
"release_date"].GetString() : string();
694 return Song (artist, song, album, lyrics, release_date);
712 using namespace rapidjson;
715 vector<Song> all_songs;
717 string url =
"http://bridgesdata.herokuapp.com/api/songs/";
720 d.Parse(ServerComm::makeRequest( url, {
"Accept: application/json"}).c_str());
722 const Value& D = d[
"data"];
725 for (SizeType i = 0; i < D.Size(); i++) {
726 const Value& v = D[i];
729 string artist = (v.HasMember(
"artist")) ? v[
"artist"].GetString() : string();
730 string song = (v.HasMember(
"song")) ? v[
"song"].GetString() : string();
731 string album = (v.HasMember(
"album")) ? v[
"album"].GetString() : string();
732 string lyrics = (v.HasMember(
"lyrics")) ? v[
"lyrics"].GetString() : string();
733 string release_date = (v.HasMember(
"release_date")) ?
734 v[
"release_date"].GetString() : string();
735 all_songs.push_back(
Song ( artist, song, album, lyrics, release_date) );
753 GutenbergBook getAGutenbergBookMetaData(
const rapidjson::Value& V) {
754 using namespace rapidjson;
756 const string s_id = V[
"id"].GetString();
758 const int id = std::stoi(s_id);
760 string title = V[
"title"].GetString();
762 const Value& A = V[
"authors"];
763 vector<string> authors;
764 for (SizeType j = 0; j < A.Size(); j++)
765 authors.push_back(A[j].GetString());
767 const Value& L = V[
"lang"];
768 string lang = L.GetString();
770 const Value& da = V[
"date_added"];
771 string data_added = da.GetString();
773 const Value& G = V[
"genres"];
774 vector<string> genres;
775 for (SizeType j = 0; j < G.Size(); j++)
776 genres.push_back(G[j].GetString());
797 using namespace rapidjson;
800 string url = getGutenbergBaseURL() +
"/meta?id=" + std::to_string(
id);
804 d.Parse(ServerComm::makeRequest( url, {
"Accept: application/json"}).c_str());
806 return getAGutenbergBookMetaData(d[
"book_list"][0]);
819 using namespace rapidjson;
822 string url = getGutenbergBaseURL() +
"/search?search=" +
823 ServerComm::encodeURLPart(term) +
"&type="
824 + ServerComm::encodeURLPart(category);
828 d.Parse(ServerComm::makeRequest(url, {
"Accept: application/json"}).c_str());
830 vector<GutenbergBook> book_list;
831 int size = d[
"book_list"].Size();
834 for (
int k = 0; k < size; k++)
835 book_list.push_back(getAGutenbergBookMetaData(d[
"book_list"][k]));
848 using namespace rapidjson;
851 string data_url = getGutenbergBaseURL() +
"/book?id=" + std::to_string(
id);
854 string hash_value =
"gutenberg" + std::to_string(
id);
857 string book_data = getDataSetJSON(data_url, hash_value,
"gutenberg");
860 d.Parse(book_data.c_str());
862 string s = std::to_string(
id);
863 return d[s.c_str()].GetString();
873 using namespace rapidjson;
876 vector<CancerIncidence> wrapper;
877 string url =
"http://bridgesdata.herokuapp.com/api/cancer/withlocations";
879 url +=
"?limit=" + to_string(num);
882 d.Parse(ServerComm::makeRequest( url, {
"Accept: application/json"}).c_str());
885 const Value& D = d[
"data"];
888 for (SizeType i = 0; i < D.Size(); i++) {
889 const Value& v = D[i];
890 const Value& age = v[
"Age"];
892 c.setAgeAdjustedRate( age[
"Age Adjusted Rate"].GetDouble());
893 c.setAgeAdjustedCI_Lower(age[
"Age Adjusted CI Lower"].GetDouble());
894 c.setAgeAdjustedCI_Upper(age[
"Age Adjusted CI Upper"].GetDouble());
896 c.setYear(v[
"Year"].GetInt());
898 const Value& data = v[
"Data"];
899 c.setCrudeRate(data[
"Crude Rate"].GetDouble());
900 c.setCrudeRate_CI_Lower(data[
"Crude CI Lower"].GetDouble());
901 c.setCrudeRate_CI_Upper(data[
"Crude CI Upper"].GetDouble());
902 c.setRace(data[
"Race"].GetString());
903 c.setPopulation(data[
"Population"].GetInt());
904 c.setEventType(data[
"Event Type"].GetString());
905 c.setCount(data[
"Count"].GetInt());
907 c.setAffectedArea(v[
"Area"].GetString());
909 const Value& loc = v[
"loc"];
910 c.setLocationX(loc[0].GetDouble());
911 c.setLocationY(loc[1].GetDouble());
913 wrapper.push_back(c);
925 OSMData getOSMDataFromJSON (
const string& osm_json) {
926 using namespace rapidjson;
930 osm_data.Parse(osm_json.c_str());
935 if (osm_data.HasMember(
"nodes")) {
936 vector<OSMVertex> vertices;
937 Value& nodes = osm_data[
"nodes"];
939 vector<long> vertex_ids;
941 for (SizeType i = 0; i < nodes.Size(); i++) {
942 const Value& node = nodes[i];
945 vertex_ids.push_back(
id);
946 double lat = node[1].GetDouble(), longit = node[2].GetDouble();
947 vertices.push_back(
OSMVertex(
id, lat, longit));
954 if (osm_data.HasMember(
"edges")) {
955 vector<OSMEdge> edges;
956 Value& links = osm_data[
"edges"];
958 for (SizeType i = 0;
i < links.Size();
i++) {
959 const Value& link = links[
i];
962 double dist = link[2].GetDouble();
964 edges.push_back(
OSMEdge(id1, id2, dist));
970 if (osm_data.HasMember(
"meta")) {
972 Value& meta = osm_data[
"meta"];
973 double lat_min = meta[
"lat_min"].GetDouble();
974 double lat_max = meta[
"lat_max"].GetDouble();
975 double longit_min = meta[
"lon_min"].GetDouble();
976 double longit_max = meta[
"lon_max"].GetDouble();
979 osm.
setName(meta[
"name"].GetString());
1009 double lat_max,
double long_max,
string level =
"default") {
1012 string hash_url = getOSMBaseURL() +
1013 "hash?minLon=" + std::to_string(long_min) +
1014 "&minLat=" + std::to_string(lat_min) +
1015 "&maxLon=" + std::to_string(long_max) +
1016 "&maxLat=" + std::to_string(lat_max) +
1017 "&level=" + ServerComm::encodeURLPart(level);
1021 getOSMBaseURL() +
"coords?minLon=" + std::to_string(long_min) +
1022 "&minLat=" + std::to_string(lat_min) +
1023 "&maxLon=" + std::to_string(long_max) +
1024 "&maxLat=" + std::to_string(lat_max) +
1025 "&level=" + ServerComm::encodeURLPart(level);
1029 string osm_json = getDataSetJSON(osm_url, hash_url,
"osm");
1032 return getOSMDataFromJSON(osm_json);
1050 maxLat,
double maxLon, std::string amenity) {
1052 std::string amenity_url = getOSMBaseURL() +
"amenity?minLon=" +
1053 ServerComm::encodeURLPart(std::to_string(minLon)) +
1054 "&minLat=" + ServerComm::encodeURLPart(std::to_string(minLat)) +
1055 "&maxLon=" + ServerComm::encodeURLPart(std::to_string(maxLon)) +
1056 "&maxLat=" + ServerComm::encodeURLPart(std::to_string(maxLat)) +
1057 "&amenity=" + ServerComm::encodeURLPart(amenity);
1059 std::string hash_url = getOSMBaseURL() +
"hash?minLon=" +
1060 ServerComm::encodeURLPart(std::to_string(minLon)) +
1061 "&minLat=" + ServerComm::encodeURLPart(std::to_string(minLat)) +
1062 "&maxLon=" + ServerComm::encodeURLPart(std::to_string(maxLon)) +
1063 "&maxLat=" + ServerComm::encodeURLPart(std::to_string(maxLat)) +
1064 "&amenity=" + ServerComm::encodeURLPart(amenity);
1068 string amenity_json = getDataSetJSON(amenity_url, hash_url,
"amenity");
1071 return parseAmenityData (amenity_json);
1086 const std::string& amenity) {
1087 std::string amenity_url = getOSMBaseURL() +
"amenity?location=" +
1088 ServerComm::encodeURLPart(location) +
1089 "&amenity=" + ServerComm::encodeURLPart(amenity);
1091 std::string hash_url = getOSMBaseURL() +
"hash?location=" +
1092 ServerComm::encodeURLPart(location) +
1093 "&amenity=" + ServerComm::encodeURLPart(amenity);
1097 string amenity_json = getDataSetJSON(amenity_url, hash_url,
"amenity");
1100 return parseAmenityData (amenity_json);
1116 using namespace rapidjson;
1118 vector<Amenity> amenities;
1119 Document amenity_content;
1121 amenity_content.Parse(amenity_json.c_str());
1122 if (amenity_content.HasMember(
"nodes")) {
1123 const Value& nodes = amenity_content[
"nodes"];
1124 if (amenity_content.HasMember(
"meta")) {
1125 const Value& meta = amenity_content[
"meta"];
1135 for (SizeType i = 0; i < nodes.Size(); i++) {
1137 const Value& node = nodes[i];
1138 amen.
setId(node[0].GetInt64());
1139 amen.
setLat(node[1].GetDouble());
1140 amen.
setLon(node[2].GetDouble());
1141 amen.
setName(node[3].GetString());
1144 amenities.push_back(amen);
1148 cout <<
"meta data not found!\n";
1153 cout <<
"nodes data not found!\n";
1180 string hash_url = getOSMBaseURL() +
"hash?location=" +
1181 ServerComm::encodeURLPart(location) +
1182 "&level=" + ServerComm::encodeURLPart(level);
1185 string osm_url = getOSMBaseURL() +
1186 "loc?location=" + ServerComm::encodeURLPart(location) +
1187 "&level=" + ServerComm::encodeURLPart(level);
1191 string osm_json = getDataSetJSON(osm_url, hash_url,
"osm");
1193 return getOSMDataFromJSON(osm_json);
1212 const std::string& user,
1214 int subassignment = 0) {
1218 std::string s = this->getAssignment(user, assignment, subassignment);
1220 rapidjson::Document doc;
1221 doc.Parse(s.c_str());
1222 if (doc.HasParseError())
1223 throw "Malformed JSON";
1226 const auto& assjson = doc.FindMember(
"assignmentJSON");
1228 if (assjson == doc.MemberEnd())
1229 throw "Malformed GraphAdjacencyList JSON: no assignmentJSON";
1232 const auto& dataArray = assjson->value.FindMember(
"data");
1234 if (dataArray == assjson->value.MemberEnd()
1235 || dataArray->value.IsArray() ==
false)
1236 throw "Malformed GraphAdjacencyList JSON: No data";
1238 const auto& data = dataArray->value.GetArray()[0];
1241 const auto& dataVisual = data.FindMember(
"visual");
1243 if (dataVisual == data.MemberEnd() ||
1244 dataVisual->value.IsString() ==
false)
1245 throw "Malformed GraphAdjacencyList JSON";
1247 std::string assignment_type = dataVisual->value.GetString();
1249 if (assignment_type !=
"GraphAdjacencyList")
1250 throw "Malformed GraphAdjacencyList JSON: Not a GraphAdjacencyList";
1254 const auto& nodes = data.FindMember(
"nodes");
1255 if (nodes == data.MemberEnd() ||
1256 nodes->value.IsArray() ==
false)
1257 throw "Malformed GraphAdjacencyList JSON: malformed nodes";
1259 const auto& nodeArray = nodes->value.GetArray();
1260 int nbVertex = nodeArray.Size();
1261 for (
int i = 0; i < nbVertex; ++i) {
1264 const auto& vertexJSONstr = nodeArray[i];
1266 const auto& nameJSON = vertexJSONstr.FindMember(
"name");
1267 if (nameJSON != vertexJSONstr.MemberEnd()
1268 && nameJSON->value.IsString()) {
1269 name = nameJSON->value.GetString();
1277 const auto& links = data.FindMember(
"links");
1278 if (links == data.MemberEnd() ||
1279 links->value.IsArray() ==
false)
1280 throw "Malformed GraphAdjacencyList JSON: malformed links";
1282 const auto& linkArray = links->value.GetArray();
1283 int nbLink = linkArray.Size();
1284 for (
int i = 0; i < nbLink; ++i) {
1290 const auto& linkJSONstr = linkArray[i];
1293 const auto& nameJSON = linkJSONstr.FindMember(
"label");
1294 if (nameJSON != linkJSONstr.MemberEnd()
1295 && nameJSON->value.IsString()) {
1296 name = nameJSON->value.GetString();
1300 const auto& srcJSON = linkJSONstr.FindMember(
"source");
1301 if (srcJSON == linkJSONstr.MemberEnd()
1302 || srcJSON->value.IsInt() ==
false) {
1303 throw "Malformed GraphAdjacencyList JSON: malformed link";
1305 src = srcJSON->value.GetInt();
1308 const auto& dstJSON = linkJSONstr.FindMember(
"target");
1309 if (dstJSON == linkJSONstr.MemberEnd()
1310 || dstJSON->value.IsInt() ==
false) {
1311 throw "Malformed GraphAdjacencyList JSON: malformed link";
1313 dest = dstJSON->value.GetInt();
1316 const auto& wgtJSON = linkJSONstr.FindMember(
"weight");
1317 if (wgtJSON == linkJSONstr.MemberEnd()
1318 || wgtJSON->value.IsInt() ==
false) {
1319 throw "Malformed GraphAdjacencyList JSON: malformed link";
1321 wgt = wgtJSON->value.GetInt();
1340 int subassignment = 0) {
1342 std::string s = this->getAssignment(user, assignment, subassignment);
1344 rapidjson::Document doc;
1345 doc.Parse(s.c_str());
1346 if (doc.HasParseError())
1347 throw "Malformed JSON";
1350 std::string assignment_type = doc[
"assignment_type"].GetString();
1352 if (assignment_type !=
"ColorGrid")
1353 throw "Malformed ColorGrid JSON: Not a ColorGrid";
1356 throw "Malformed JSON: Not a Bridges assignment?";
1360 auto& data = doc[
"data"][0];
1362 std::string encoding = data[
"encoding"].GetString();
1363 if (encoding !=
"RAW" && encoding !=
"RLE")
1364 throw "Malformed ColorGrid JSON: encoding not supported";
1367 const auto& dimensions = data[
"dimensions"];
1368 int dimx = dimensions[0].GetInt();
1369 int dimy = dimensions[1].GetInt();
1372 std::cerr <<
"Dimensions: " << dimx <<
"x" << dimy << std::endl;
1375 std::string base64_encoded_assignment = data[
"nodes"][0].GetString();
1381 if (encoding ==
"RAW") {
1383 std::cerr <<
"decoding RAW" << std::endl;
1385 std::cerr <<
"length: " << decoded.size() << std::endl;
1386 if (decoded.size() < dimx * dimy * 4)
1387 throw "Malformed ColorGrid JSON: nodes is smaller than expected";
1396 for (
int x = 0; x < dimx; ++x) {
1397 for (
int y = 0; y < dimy; ++y) {
1399 (
int)decoded[base + 1],
1400 (
int)decoded[base + 2],
1401 (
int)decoded[base + 3]
1409 else if (encoding ==
"RLE") {
1411 std::cerr <<
"Decoding RLE" << std::endl;
1413 int currentInDecoded = 0;
1414 int currentInCG = 0;
1415 while (currentInDecoded != decoded.size()) {
1416 if (currentInDecoded + 5 > decoded.size())
1417 throw "Malformed ColorGrid JSON: nodes is not a multiple of 5";
1419 int repeat = (
BYTE) decoded[currentInDecoded++];
1420 int r = (
BYTE) decoded[currentInDecoded++];
1421 int g = (
BYTE) decoded[currentInDecoded++];
1422 int b = (
BYTE) decoded[currentInDecoded++];
1423 int a = (
BYTE) decoded[currentInDecoded++];
1426 std::cerr <<
"indecoded: " << currentInDecoded
1427 <<
" repeat: " << (
int)repeat
1428 <<
" color(" << (int)r <<
"," << (
int)g <<
"," << (int)b <<
"," << (
int)a <<
")"
1433 while (repeat >= 0) {
1434 int posX = currentInCG / dimy;
1435 int posY = currentInCG % dimy;
1436 if (posX >= dimx || posY >= dimy) {
1438 std::cerr << posX <<
" " << dimx <<
" " << posY <<
" " << dimy << std::endl;
1439 throw "Malformed ColorGrid JSON: Too much data in nodes";
1441 cg.
set(posX, posY, c);
1448 std::cerr <<
"written " << currentInCG <<
" pixels" << std::endl;
1449 if (currentInCG != dimx * dimy)
1450 throw "Malformed ColorGrid JSON: Not enough data in nodes";
1456 throw "Malformed ColorGrid JSON";
1469 std::string getAssignment(std::string user,
1471 int subassignment = 0) {
1472 std::vector<std::string> headers;
1474 std::stringstream ss;
1478 ss << bridges_inst->getServerURL();
1480 ss << bridges::Bridges::getDefaultServerURL();
1481 ss <<
"/assignmentJSON/"
1482 << assignment <<
".";
1483 ss << std::setfill(
'0') << std::setw(2) << subassignment;
1486 std::string url = ss.str();
1488 std::string s = bridges::ServerComm::makeRequest(url, headers);
1493 void removeFirstOccurence (std::string & str,
const std::string & toRemove) {
1494 size_t pos = str.find(toRemove);
1495 if (pos != std::string::npos) {
1496 str.erase(pos, toRemove.length());
1509 void getWikidataActorMovieDirect (
int yearbegin,
int yearend, std::vector<MovieActorWikidata>& vout) {
1510 std::string codename =
"wikidata-actormovie-" + std::to_string(yearbegin) +
"-" + std::to_string(yearend);
1512 bool from_cache =
false;
1514 if (my_cache.
inCache(codename)) {
1515 json = my_cache.
getDoc(codename);
1519 catch (CacheException& ce) {
1521 std::cout <<
"Exception while reading from cache. Ignoring cache and continue.\n( What was:" << ce.what() <<
")" << std::endl;
1525 std::vector<std::string> http_headers;
1526 http_headers.push_back(
"User-Agent: bridges-cxx");
1527 http_headers.push_back(
"Accept: application/json");
1529 string url =
"https://query.wikidata.org/sparql?";
1538 std::string sparqlquery =
1539 "SELECT ?movie ?movieLabel ?actor ?actorLabel WHERE \
1541 ?movie wdt:P31 wd:Q11424.\
1542 ?movie wdt:P161 ?actor.\
1543 ?movie wdt:P364 wd:Q1860.\
1544 ?movie wdt:P577 ?date.\
1545 FILTER(YEAR(?date) >= " + std::to_string(yearbegin) +
" && YEAR(?date) <= " + std::to_string(yearend) +
").\
1546 SERVICE wikibase:label { bd:serviceParam wikibase:language \"en\". } \
1548 url +=
"query=" + ServerComm::encodeURLPart(sparqlquery);
1550 url +=
"format=json";
1553 std::cout <<
"URL: " << url <<
"\n";
1556 json = ServerComm::makeRequest(url, http_headers);
1559 my_cache.
putDoc(codename, json);
1561 catch (CacheException& ce) {
1563 std::cerr <<
"Exception while storing in cache. Weird but not critical. (What was: " << ce.what() <<
" )" << std::endl;
1568 using namespace rapidjson;
1569 rapidjson::Document doc;
1570 doc.Parse(json.c_str());
1571 if (doc.HasParseError())
1572 throw "Malformed JSON";
1575 const auto& resultsArray = doc[
"results"][
"bindings"].GetArray();
1577 for (
auto& mak_json : resultsArray) {
1583 std::string actoruri = mak_json[
"actor"][
"value"].GetString();
1584 std::string movieuri = mak_json[
"movie"][
"value"].GetString();
1585 removeFirstOccurence (actoruri,
"http://www.wikidata.org/entity/");
1587 removeFirstOccurence (movieuri,
"http://www.wikidata.org/entity/");
1591 mak.
setActorName(mak_json[
"actorLabel"][
"value"].GetString());
1592 mak.
setMovieName(mak_json[
"movieLabel"][
"value"].GetString());
1593 vout.push_back(mak);
1598 throw "Malformed JSON: Not from wikidata?";
1622 std::vector<MovieActorWikidata> ret;
1623 for (
int y = yearbegin; y <= yearend; ++y) {
1625 getWikidataActorMovieDirect (y, y, ret);
1643 double minLat,
double minLon,
1644 double maxLat,
double maxLon,
double res = 0.0166) {
1649 std::string elev_url = getElevationBaseURL() +
1650 "elevation?minLon=" + ServerComm::encodeURLPart(std::to_string(minLon)) +
1651 "&minLat=" + ServerComm::encodeURLPart(std::to_string(minLat)) +
1652 "&maxLon=" + ServerComm::encodeURLPart(std::to_string(maxLon)) +
1653 "&maxLat=" + ServerComm::encodeURLPart(std::to_string(maxLat)) +
1654 "&resX=" + ServerComm::encodeURLPart(std::to_string(res)) +
1655 "&resY=" + ServerComm::encodeURLPart(std::to_string(res));
1658 cout <<
"Elevation URL:" << elev_url <<
"\n";
1659 cout <<
"Elevation URL:" << elev_url <<
"\n";
1661 std::string hash_url = getElevationBaseURL() +
1662 "hash?minLon=" + ServerComm::encodeURLPart(std::to_string(minLon)) +
1663 "&minLat=" + ServerComm::encodeURLPart(std::to_string(minLat)) +
1664 "&maxLon=" + ServerComm::encodeURLPart(std::to_string(maxLon)) +
1665 "&maxLat=" + ServerComm::encodeURLPart(std::to_string(maxLat)) +
1666 "&resX=" + ServerComm::encodeURLPart(std::to_string(res)) +
1667 "&resY=" + ServerComm::encodeURLPart(std::to_string(res));
1670 cout <<
"Hash URL:" << hash_url <<
"\n";
1675 string elev_json = getDataSetJSON(elev_url, hash_url,
"elevation");
1677 return parseElevationData(elev_json);
1691 stringstream ss(elev_json);
1693 int rows, cols, elev_val;
1694 double ll_x, ll_y, cell_size;
1698 ss >> tmp >> cols >> tmp >> rows >>
1699 tmp >> ll_x >> tmp >> ll_y >>
1703 throw "Parse Error";
1713 for (
int i = 0; i < rows; i++) {
1714 for (
int j = 0; j < cols; j++) {
1716 elev_data.
setVal(i, j, elev_val);
1720 throw "Parse Error";
1732 string base_url = getRedditURL();
1733 string url = base_url +
"/listJSON";
1735 std::cout <<
"hitting url: " << url <<
"\n";
1737 using namespace rapidjson;
1740 std::string s = ServerComm::makeRequest(url, {
"Accept: application/json"});
1742 std::cout <<
"Returned JSON:" << s <<
"\n";
1745 doc.Parse(s.c_str());
1748 std::cerr <<
"malformed subreddit list" <<
"\n";
1749 std::cerr <<
"Original exception: " << (std::string)re <<
"\n";
1753 std::vector<std::string> subreddits;
1755 for (
auto& m : doc.GetArray()) {
1757 std::string subred = m.GetString();
1758 subreddits.push_back(subred);
1763 std::cerr <<
"malformed subreddit list" <<
"\n";
1764 std::cerr <<
"Original exception: " << (std::string)re <<
"\n";
1781 string base_url = getRedditURL();
1783 cout <<
"reddit base url:" << base_url <<
"\n";
1785 string url = base_url +
"/cache?subreddit=" + subreddit +
1786 "&time_request=" + std::to_string(time_request);
1789 cout <<
"reddit url:" << url <<
"\n";
1792 using namespace rapidjson;
1795 std::string s = ServerComm::makeRequest(url, {
"Accept: application/json"});
1797 std::cout <<
"Returned JSON:" << s <<
"\n";
1799 doc.Parse(s.c_str());
1802 vector<Reddit> reddit_posts;
1803 for (
auto& m : doc.GetObject()) {
1806 std::cout << m.name.GetString() <<
"\n";
1808 auto& postJSON = m.value;
1810 std::string
id = postJSON[
"id"].GetString();
1811 std::string title = postJSON[
"title"].GetString();
1812 std::string author = postJSON[
"author"].GetString();
1813 int score = postJSON[
"score"].GetInt();
1814 float vote_ratio = postJSON[
"vote_ratio"].GetDouble();
1815 int comment_count = postJSON[
"comment_count"].GetInt();
1816 std::string subreddit = postJSON[
"subreddit"].GetString();
1817 int posttime = postJSON[
"post_time"].GetDouble();
1818 std::string url = postJSON[
"url"].GetString();
1819 std::string text = postJSON[
"text"].GetString();
1824 r.setAuthor(author);
1826 r.setVoteRatio(vote_ratio);
1827 r.setCommentCount(comment_count);
1828 r.setSubreddit(subreddit);
1829 r.setPostTime(posttime);
1832 reddit_posts.push_back(r);
1835 std::cerr <<
"malformed Reddit post" <<
"\n";
1836 std::cerr <<
"Original exception: " << (std::string)re <<
"\n";
1840 return reddit_posts;
1853 string getHashCode (
string hash_url,
string data_type) {
1855 if (data_type ==
"osm" || data_type ==
"amenity" ||
1856 data_type ==
"elevation") {
1857 hash_value = ServerComm::makeRequest(hash_url, {
"Accept: application/json"});
1859 else if (data_type ==
"gutenberg")
1860 hash_value = hash_url;
1886 std::string getDataSetJSON(std::string data_url, std::string hash_url,
1887 std::string data_type) {
1889 std::string data_json =
"";
1894 cerr <<
"Checking the cache: Hash url: " << hash_url <<
"\n";
1897 string hash_value = getHashCode(hash_url, data_type);
1899 bool dataloaded =
false;
1901 if ((hash_value !=
"false") && (my_cache.
inCache(hash_value) ==
true)) {
1903 data_json = my_cache.
getDoc(hash_value);
1906 catch (CacheException& ce) {
1908 std::cout <<
"Exception while reading from cache. "
1909 <<
"Ignoring cache and continuing..\n (What was:" << ce.what() <<
")\n";
1917 std::cerr <<
"Hitting data URL: " << data_url <<
"\n";
1920 data_json = ServerComm::makeRequest(data_url,
1921 {
"Accept: application/json"});
1927 if (hash_value ==
"false") {
1929 std::cerr <<
"Hitting hash URL: " << hash_value <<
"\n";
1931 hash_value = getHashCode(hash_url, data_type);
1935 if (hash_value ==
"false") {
1936 std::cerr <<
"Error while gathering hash value for " << data_type <<
" dataset..\n"
1937 <<
"Weird but not critical.\n";
1940 my_cache.
putDoc(hash_value, data_json);
1943 catch (CacheException& ce) {
1945 std::cerr <<
"Exception while storing in cache. " <<
1946 "Weird but not critical.\n" <<
1947 "(What was: " << ce.what() <<
")\n";
1949 std::cerr <<
"Tried to store hash=" << hash_value <<
1950 " key = " << data_json << std::endl;
Class that hold Open Street Map Amenity data.
Definition: Amenity.h:22
void setLat(double latitude)
Definition: Amenity.h:88
void setId(long id)
Definition: Amenity.h:72
void setLon(double longitude)
Definition: Amenity.h:104
void setName(string n)
Definition: Amenity.h:120
This class contains methods to connect and transmit a user's data structure representation to the Bri...
Definition: Bridges.h:50
This class provides an API to various data sources used in BRIDGES.
Definition: DataSource.h:64
vector< ActorMovieIMDB > getActorMovieIMDBData2()
Get ActorMovie IMDB Data Data is retrieved, formatted into a list of ActorMovieIMDB objects.
Definition: DataSource.h:511
DataSource(bridges::Bridges &br)
Definition: DataSource.h:138
vector< Amenity > getAmenityData(const std::string &location, const std::string &amenity)
Definition: DataSource.h:1085
std::vector< MovieActorWikidata > getWikidataActorMovie(int yearbegin, int yearend)
This function returns the Movie and Actors playing in them between two years.
Definition: DataSource.h:1611
vector< GutenbergBook > getGutenbergBookMetaData(string term, string category)
Search the gutenberg data for retrieving meta data of books matching a string and a category.
Definition: DataSource.h:818
OSMData getOSMData(string location, string level="default")
Definition: DataSource.h:1178
string getGutenbergBookText(int id=0)
Get the full text of the book with the provided id.
Definition: DataSource.h:847
vector< CancerIncidence > getCancerIncidenceData(int num=0)
Retrieves the CDC dataset of Cancer Incidence. Data is retrieved into a vector of records See CancerI...
Definition: DataSource.h:872
OSMData getOSMData(double lat_min, double long_min, double lat_max, double long_max, string level="default")
Get OpenStreetMap data given a bounding rectangle of lat/long values.
Definition: DataSource.h:1008
vector< Song > getSongData()
Get data of the songs (including lyrics) using the Genius API https://docs.genius....
Definition: DataSource.h:711
vector< EarthquakeUSGS > getEarthquakeUSGSData(int number=0)
Get USGS earthquake data USGS Tweet data (https://earthquake.usgs.gov/earthquakes/map/) retrieved,...
Definition: DataSource.h:547
vector< ActorMovieIMDB > getActorMovieIMDBData(int number=0)
Get ActorMovie IMDB Data Data is retrieved, formatted into a list of ActorMovieIMDB objects.
Definition: DataSource.h:478
vector< USState > getUSMapCountyData(vector< string > state_names, bool view_counties=true)
Get US State boundaries and counties of specified states.
Definition: DataSource.h:372
std::vector< std::string > getAvailableSubreddits()
retrieves the subreddits made available by BRIDGES
Definition: DataSource.h:1731
Song getSong(string songTitle, string artistName="")
Get data of a particular songs (including lyrics) using the Genius API (https://docs....
Definition: DataSource.h:656
vector< USState > getUSMapData()
Get US State of all 50 states.
Definition: DataSource.h:349
ElevationData parseElevationData(string elev_json)
Parses the elevation data string and retuns an Elevation object.
Definition: DataSource.h:1687
vector< Shakespeare > getShakespeareData(string type="", bool textonly=false)
Get data of Shakespeare works (plays, poems)
Definition: DataSource.h:607
vector< USState > getUSMapCountyData()
Get US State boundaries and counties af all 50 states.
Definition: DataSource.h:360
vector< City > getUSCities(unordered_map< string, string > params)
Retrieves US city data based on a set of filtering parameters.
Definition: DataSource.h:181
vector< Amenity > getAmenityData(double minLat, double minLon, double maxLat, double maxLon, std::string amenity)
Definition: DataSource.h:1049
bridges::ColorGrid getColorGridFromAssignment(const std::string &user, int assignment, int subassignment=0)
Definition: DataSource.h:1338
void setSourceType(string type)
set data server type
Definition: DataSource.h:150
vector< Game > getGameData()
Get meta data of the IGN games collection.
Definition: DataSource.h:440
ElevationData getElevationData(double minLat, double minLon, double maxLat, double maxLon, double res=0.0166)
Definition: DataSource.h:1642
bridges::GraphAdjList< int, std::string > getGraphFromAssignment(const std::string &user, int assignment, int subassignment=0)
Definition: DataSource.h:1211
vector< Reddit > getRedditData(string subreddit, int time_request=-9999)
Definition: DataSource.h:1780
vector< Amenity > parseAmenityData(string amenity_json)
Parses the amenity string and returns an AmenityData object.
Definition: DataSource.h:1115
DataSource(bridges::Bridges *br=nullptr)
Definition: DataSource.h:133
GutenbergBook getGutenbergBookMetaData(int id=0)
Get meta data of a single Gutenberg book This function retrieves, and formats the data into a list of...
Definition: DataSource.h:796
A class to hold actor movie data – using IMDB dataset.
Definition: ActorMovieIMDB.h:31
A class representing the attributes for cancer incidence.
Definition: CancerIncidence.h:32
Class that holds data of a city.
Definition: City.h:29
Class that hold earthquake data, for use with USGIS retrieved quake data.
Definition: EarthquakeUSGS.h:28
Class that hold elevation data.
Definition: ElevationData.h:27
void setyll(int y_ll)
Definition: ElevationData.h:206
void setxll(int x_ll)
Definition: ElevationData.h:189
void setVal(int r, int c, int val)
Definition: ElevationData.h:151
void setCellSize(float cell_size)
Definition: ElevationData.h:224
A Game object, used along with the Games data source.
Definition: Game.h:34
A Gutenberg Book object (meta data and book's full text)
Definition: GutenbergBook.h:32
This is a helper class for accessing actor-movie data from Wikidata.
Definition: MovieActorWikidata.h:23
void setMovieURI(std::string mu)
Definition: MovieActorWikidata.h:40
void setActorName(std::string an)
Definition: MovieActorWikidata.h:64
void setActorURI(std::string au)
Definition: MovieActorWikidata.h:48
void setMovieName(std::string mn)
Definition: MovieActorWikidata.h:56
Class that hold Open Street Map Data.
Definition: OSMData.h:38
void setEdges(const vector< OSMEdge > &e)
set edges
Definition: OSMData.h:307
void setVertices(const vector< OSMVertex > &verts)
replace the vertices stored by this new set.
Definition: OSMData.h:269
void setName(const string &n)
change the name of the dataset
Definition: OSMData.h:224
void setLatLongRange(double *lat_range, double *longit_range)
set the latitude and longitude range of the dataset
Definition: OSMData.h:111
Class that hold Open Street Map edges.
Definition: OSMEdge.h:27
Class that hold Open Street Map vertices.
Definition: OSMVertex.h:33
long OSMVertexID
Definition: OSMVertex.h:35
An object to represent a Reddit post, used along with the Reddit data source.
Definition: Reddit.h:28
A Shakespeare Data source object containing sonnets, poems and plays.
Definition: Shakespeare.h:31
A Song object, used along with the Songs data source.
Definition: Song.h:27
This object contains US county related information.
Definition: USCounty.h:28
This is a class in BRIDGES for representing an image.
Definition: ColorGrid.h:22
This class represents Color, and supports rgba, hexadecimal and named color values.
Definition: Color.h:50
This class provides methods to represent adjacency list based graphs.
Definition: GraphAdjList.h:110
void addVertex(const K &k, const E1 &e=E1())
Adds a vertex to the graph.
Definition: GraphAdjList.h:175
void addEdge(const K &src, const K &dest, const E2 &data=E2())
Definition: GraphAdjList.h:198
void set(int row, int col, E val)
Set the grid value for the (row, col) element.
Definition: Grid.h:186
virtual void putDoc(const std::string &hash_value, const std::string &content) override
Definition: Cache.h:233
virtual bool inCache(const std::string &hash_value) override
Definition: Cache.h:226
virtual std::string getDoc(const std::string &hash_value) override
Definition: Cache.h:217
vector< BYTE > decode(string const &encoded_string)
Definition: base64.h:100
Definition: ActorMovieIMDB.h:10
Support for drawing Bar charts.
Definition: alltypes.h:4
unsigned char BYTE
Definition: base64.h:43