Deconding base64 API response throws a FormatException - dart

An API I'm working on returns the following Base64, UTF8 Data.
H4sIAAAAAAAEAIXS3WvCMBAA8H8l5MmB9Wtf2idtJ86HDrHKNsYeYntrgzUpl2RQpf/7UsExsXZvCcf97ri7A0XQBsUT04y6BzoGRIlAXUrbFDIQkZx+g9D8GBR4+tChjY8NZgvOtGZfEnc2n6Za58rtdk3EOwnoQhqEKqMjRcYFVGb1ldT9sByPf7l+xcWgIuR7LkXVwKDXH5G1PyfByiOvErOY+Cnb5TasUp4rskBQNh1isilIABiBBRwPxJ60Aik0CRm3YTIRAhx/8nJDy/Z51duGqkvJ4tqyrXeJW/tEcGbe8hK9a0DXuGGiXvVTEElsHP+5ptH7OnPQO5p+EWWS+CiVqpMvrIcr1vkEj4OXRmg7Q+LxLZxsk1+Ijw3decEbCU1u7b8N1iDDBmSFLNr+fwMrrpTUpOUB2mtzZtOa7Ywaylxf+YQnGTgBQ80TUZBwPbfyZ1mWP1eCbD1AAwAA
I wrote the following test to convert it to a UTF8 Json string.
void main() {
const data = "H4sIAAAAAAAEAIXS3WvCMBAA8H8l5MmB9Wtf2idtJ86HDrHKNsYeYntrgzUpl2RQpf/7UsExsXZvCcf97ri7A0XQBsUT04y6BzoGRIlAXUrbFDIQkZx+g9D8GBR4+tChjY8NZgvOtGZfEnc2n6Za58rtdk3EOwnoQhqEKqMjRcYFVGb1ldT9sByPf7l+xcWgIuR7LkXVwKDXH5G1PyfByiOvErOY+Cnb5TasUp4rskBQNh1isilIABiBBRwPxJ60Aik0CRm3YTIRAhx/8nJDy/Z51duGqkvJ4tqyrXeJW/tEcGbe8hK9a0DXuGGiXvVTEElsHP+5ptH7OnPQO5p+EWWS+CiVqpMvrIcr1vkEj4OXRmg7Q+LxLZxsk1+Ijw3decEbCU1u7b8N1iDDBmSFLNr+fwMrrpTUpOUB2mtzZtOa7Ywaylxf+YQnGTgBQ80TUZBwPbfyZ1mWP1eCbD1AAwAA";
test('Base64 decoder', () {
final bytes = base64.decode(data);
String jsonString;
try {
jsonString = utf8.decode(bytes);
} catch (e) {
// ignore: avoid_print
print(e);
}
// ignore: avoid_print
print("$jsonString\n");
final decoded = json.decode(jsonString);
// ignore: avoid_print
print("$decoded\n");
expect(decoded, !null);
});
}
The bytes that get decoded from the base64 string look good to me, but when I try to utf8.decode() the bytes. I get a FormatException, or, if I add allowMalformed = true, a non-sense string.
How can I decode the Json string correctly?

Your BASE64 input is a GZip compressed JSON String. The following program will work:
import 'dart:convert';
import 'dart:io';
void main() {
const data = "H4sIAAAAAAAEAIXS3WvCMBAA8H8l5MmB9Wtf2idtJ86HDrHKNsYeYntrgzUpl2RQpf/7UsExsXZvCcf97ri7A0XQBsUT04y6BzoGRIlAXUrbFDIQkZx+g9D8GBR4+tChjY8NZgvOtGZfEnc2n6Za58rtdk3EOwnoQhqEKqMjRcYFVGb1ldT9sByPf7l+xcWgIuR7LkXVwKDXH5G1PyfByiOvErOY+Cnb5TasUp4rskBQNh1isilIABiBBRwPxJ60Aik0CRm3YTIRAhx/8nJDy/Z51duGqkvJ4tqyrXeJW/tEcGbe8hK9a0DXuGGiXvVTEElsHP+5ptH7OnPQO5p+EWWS+CiVqpMvrIcr1vkEj4OXRmg7Q+LxLZxsk1+Ijw3decEbCU1u7b8N1iDDBmSFLNr+fwMrrpTUpOUB2mtzZtOa7Ywaylxf+YQnGTgBQ80TUZBwPbfyZ1mWP1eCbD1AAwAA";
final bytes = base64.decode(data);
String jsonString;
try {
jsonString = utf8.decode(GZipCodec().decode(bytes));
} catch (e) {
// ignore: avoid_print
print(e);
}
// ignore: avoid_print
print("$jsonString\n");
final dynamic decoded = json.decode(jsonString);
// ignore: avoid_print
print("$decoded\n");
}
Returns:
{"returnData":{"#errore":"","elencoEventi":{"#nrEventi":"8","#urlPiattaforma":"https://uci.getyourevent.online","evento":[{"#idEventi":"1","#descrizione":"2019 UCI MTB World Championships Presented by Mercedes-Benz (Mont Sainte Anne-CAN)"},{"#idEventi":"3","#descrizione":"2019 UCI Road World Championships (Yorkshire-GBR)"},{"#idEventi":"4","#descrizione":"2019 UCI Urban World Championships (Chengdu-CHN)"},{"#idEventi":"5","#descrizione":"2020 UCI Cyclo Cross World Championships"},{"#idEventi":"6","#descrizione":"2020 Mercedes-Benz UCI Mountain Bike World Cup"},{"#idEventi":"7","#descrizione":"2020 UCI BMX Supercross World Cup"},{"#idEventi":"8","#descrizione":"2020 UCI Track World Championships Presented by Tissot (Berlin-GER)"},{"#idEventi":"9","#descrizione":"2020 UCI Road World Championships (Aigle-Martigny SUI)"}]}}}
{returnData: {#errore: , elencoEventi: {#nrEventi: 8, #urlPiattaforma: https://uci.getyourevent.online, evento: [{#idEventi: 1, #descrizione: 2019 UCI MTB World Championships Presented by Mercedes-Benz (Mont Sainte Anne-CAN)}, {#idEventi: 3, #descrizione: 2019 UCI Road World Championships (Yorkshire-GBR)}, {#idEventi: 4, #descrizione: 2019 UCI Urban World Championships (Chengdu-CHN)}, {#idEventi: 5, #descrizione: 2020 UCI Cyclo Cross World Championships}, {#idEventi: 6, #descrizione: 2020 Mercedes-Benz UCI Mountain Bike World Cup}, {#idEventi: 7, #descrizione: 2020 UCI BMX Supercross World Cup}, {#idEventi: 8, #descrizione: 2020 UCI Track World Championships Presented by Tissot (Berlin-GER)}, {#idEventi: 9, #descrizione: 2020 UCI Road World Championships (Aigle-Martigny SUI)}]}}}

Related

Why does as.ts create new values for the date variable?

I want to create a Time-Series with as.ts.
The Problem is that after using the function as.ts(), the date variable has different values.
Here you can see the difference of the date variable before and after using as.ts()-function:
[1]: https://i.stack.imgur.com/kc4np.png
[2]: https://i.stack.imgur.com/SKMnY.png
sapply(Oct_Apr, class)
#character to date
Oct_Apr$date <- lubridate::ymd(Oct_Apr$date)
#Klassen überprüfen
sapply(Oct_Apr, class)
Oct_Apr %>% as_tsibble(index = "date")
#################################################################
#Trainings and Test-Set
#Trainings-Set (Okt. 2019 - März 2020)
Oct_Mar <- Oct_Apr %>% slice(01:183)
#Test-Set (Apr. 2020)
Apr <- Oct_Apr %>% slice(184:213)
#################################################################
#################################################################
Oct_Mar <- as.ts(Oct_Mar)
Apr <- as.ts(Apr)```
#################################################################
Subset-data:
structure(list(date = structure(c(18170, 18171, 18172, 18173,
18174, 18175, 18176, 18177, 18178, 18179, 18180, 18181, 18182,
18183, 18184, 18185, 18186, 18187, 18188, 18189, 18190, 18191,
18192, 18193, 18194, 18195, 18196, 18197, 18198, 18199, 18200,
18201, 18202, 18203, 18204, 18205, 18206, 18207, 18208, 18209,
18210, 18211, 18212, 18213, 18214, 18215, 18216, 18217, 18218,
18219, 18220, 18221, 18222, 18223, 18224, 18225, 18226, 18227,
18228, 18229, 18230, 18231, 18232, 18233, 18234, 18235, 18236,
18237, 18238, 18239, 18240, 18241, 18242, 18243, 18244, 18245,
18246, 18247, 18248, 18249, 18250, 18251, 18252, 18253, 18254,
18255, 18256, 18257, 18258, 18259, 18260, 18261, 18262, 18263,
18264, 18265, 18266, 18267, 18268, 18269, 18270, 18271, 18272,
18273, 18274, 18275, 18276, 18277, 18278, 18279, 18280, 18281,
18282, 18283, 18284, 18285, 18286, 18287, 18288, 18289, 18290,
18291, 18292, 18293, 18294, 18295, 18296, 18297, 18298, 18299,
18300, 18301, 18302, 18303, 18304, 18305, 18306, 18307, 18308,
18309, 18310, 18311, 18312, 18313, 18314, 18315, 18316, 18317,
18318, 18319, 18320, 18321, 18322, 18323, 18324, 18325, 18326,
18327, 18328, 18329, 18330, 18331, 18332, 18333, 18334, 18335,
18336, 18337, 18338, 18339, 18340, 18341, 18342, 18343, 18344,
18345, 18346, 18347, 18348, 18349, 18350, 18351, 18352, 18353,
18354, 18355, 18356, 18357, 18358, 18359, 18360, 18361, 18362,
18363, 18364, 18365, 18366, 18367, 18368, 18369, 18370, 18371,
18372, 18373, 18374, 18375, 18376, 18377, 18378, 18379, 18380,
18381, 18382), class = "Date"), price_view = c(35.79, 180.16,
437.57, 10.3, 74.26, 79.8, 89.84, 121.24, 461.95, 142.06, 241.71,
52, 43.24, 41.16, 167.05, 764.06, 91.64, 189.82, 38.59, 152.64,
86.23, 321.33, 411.83, 256.88, 352.39, 76.32, 360.11, 123.53,
43.41, 149.38, 14.16, 489.07, 1661.74, 1253.07, 25.71, 154.42,
990.89, 1645.93, 144.12, 84.43, 240.25, 148.18, 41.13, 262.56,
168.78, 860.85, 239.31, 372.98, 165.64, 134.32, 20.7, 43.73,
765.76, 51.48, 599.49, 893.79, 155.29, 334.37, 46.82, 1814.72,
196.27, 1302.48, 40.16, 1161.68, 381.48, 184.48, 48.91, 221.11,
434.73, 149.27, 77.22, 882.49, 106.05, 669.23, 282.86, 179.67,
12.97, 460.24, 38.59, 278.26, 243.76, 1904.79, 84.93, 32.18,
25.71, 496.54, 29.6, 1466.83, 164.33, 234.76, 19.95, 308.37,
1130.02, 7.47, 79.8, 65.9, 746.45, 1347.78, 1270.82, 69.42, 231.41,
195.6, 715.33, 208.47, 720.46, 414.68, 24.45, 217.82, 434.45,
483.92, 1500.42, 318.15, 339.29, 267.45, 133.85, 9.03, 11.81,
280.57, 916.74, 58.51, 339.78, 33.98, 263.58, 19.31, 239.88,
489.07, 84.92, 344.9, 95.24, 99.1, 142.58, 480.58, 104.74, 14.83,
252, 1039.41, 28.3, 328.97, 341.55, 278.26, 43.73, 91.35, 102.32,
131.25, 155.15, 77.74, 14.67, 132.63, 1185.36, 291.13, 1106.59,
849.42, 117.63, 171.32, 167.31, 252.23, 248.14, 111.15, 257.15,
27.62, 169.86, 101.89, 282.89, 298.57, 86.49, 196.32, 1415.45,
898.35, 334.6, 17.99, 13.62, 566.27, 60.41, 36.34, 62.04, 308.81,
32.95, 127.44, 836.57, 221.34, 360.34, 159.31, 20.57, 230.38,
563.72, 103.71, 509.67, 125.87, 80.27, 37.58, 14.13, 527.14,
5.15, 567.3, 2316.4, 21.85, 141.06, 25.71, 62.16, 328.68, 15.44,
156.99, 15.42, 54.03, 514.56, 561.63, 97.56, 46.31, 41.16, 32.15,
60.17, 809.26, 1026.26), view = c(1206151L, 1152770L, 1087372L,
1344804L, 1270060L, 1262993L, 1159265L, 1323522L, 1301376L, 1240347L,
1445162L, 1432321L, 1583572L, 1376274L, 1462409L, 1443323L, 1337174L,
1413405L, 1382403L, 1443838L, 1342668L, 1353053L, 1318395L, 1252747L,
1369922L, 1288939L, 1330209L, 1220710L, 1187883L, 1169955L, 1207854L,
1402754L, 1513400L, 1524803L, 1743304L, 1670637L, 1644359L, 1748812L,
1789808L, 1783142L, 1845552L, 1907417L, 1892753L, 1920411L, 2864410L,
5691766L, 5986292L, 5759703L, 1905351L, 1627672L, 1598554L, 1573101L,
1471242L, 1474138L, 1500022L, 1496128L, 1557252L, 1547199L, 1560191L,
1727852L, 1644405L, 1706901L, 1629904L, 1547658L, 1468085L, 1540157L,
1652208L, 1725106L, 1724452L, 1627222L, 1651328L, 1605421L, 1650612L,
1634861L, 1760750L, 2167056L, 2875847L, 2780816L, 2665285L, 2528244L,
2387520L, 2340327L, 2471739L, 2372930L, 2326654L, 2322753L, 2240514L,
2058141L, 2089081L, 2474226L, 2294820L, 1603749L, 1427733L, 1700904L,
1765457L, 1754424L, 1738774L, 1696188L, 1701769L, 1585870L, 1556542L,
1557542L, 1618230L, 1645866L, 1627433L, 1612956L, 1555416L, 1773179L,
1826768L, 2021676L, 2104199L, 1801073L, 1733142L, 1593991L, 1645225L,
1557626L, 1637470L, 1721003L, 1545472L, 1594688L, 1565742L, 1651606L,
1999670L, 2217825L, 1985751L, 1680034L, 1608904L, 1620473L, 1628906L,
1726835L, 1589058L, 1714745L, 1751044L, 1896265L, 2429526L, 2268487L,
1935249L, 1916034L, 2239698L, 1916650L, 1981570L, 1948648L, 1987134L,
1749514L, 1822349L, 1830307L, 1748590L, 1734610L, 1798308L, 162557L,
1000204L, 1257475L, 1770064L, 2416707L, 2477258L, 2487470L, 2457500L,
2210539L, 2377633L, 2026050L, 2301337L, 2218894L, 2012789L, 1700619L,
1481115L, 1562027L, 1560348L, 1338829L, 1244973L, 1142989L, 1260747L,
1316975L, 1387394L, 1319559L, 1440470L, 1451015L, 1439649L, 1390411L,
1336076L, 1369834L, 1255626L, 1244163L, 1283731L, 1461785L, 1448716L,
1476317L, 1437668L, 1486293L, 1436223L, 1452446L, 1659354L, 1739377L,
1871914L, 1970269L, 1956840L, 2068666L, 1946201L, 2340731L, 2511860L,
2471888L, 2415311L, 2149298L, 2274084L, 2299902L, 2354228L, 2406987L,
2503970L, 2600158L, 2592352L, 2402199L, 2487428L, 2542154L, 2526912L
), price_cart = c(29.51, 1415.48, 99.86, 358.57, 617.51, 1052.79,
1747.79, 190.56, 128.28, 252.38, 250.91, 720.48, 33.42, 643,
191.77, 460.11, 408.5, 789.9, 577.94, 49.36, 380.7, 19.56, 994.86,
756.71, 223.66, 437.33, 1684.28, 366.16, 968.34, 1683.07, 550.77,
503.09, 29.09, 179.67, 210.62, 22.66, 131.66, 68.96, 360.06,
494.22, 1023.62, 1569.92, 28.29, 694.97, 127.05, 37.85, 282.89,
178.9, 913.28, 1022.42, 424.7, 573.7, 1029.34, 30.12, 20.82,
17.99, 107.53, 41.19, 85.82, 1002.55, 140.98, 167.03, 231.67,
25.71, 205.64, 30.81, 51.22, 65.9, 7.08, 308.63, 227.79, 16.22,
7.89, 62.52, 48.88, 586.63, 602.07, 1312.26, 128.32, 179.9, 849.42,
100.9, 1284.2, 12.84, 128.42, 59.18, 176.99, 38.02, 48.88, 694.54,
262.3, 1402.84, 1453.18, 3.84, 453.01, 76.93, 7.04, 865.93, 865.4,
40.75, 1423.07, 1534.66, 679.27, 11.25, 102.63, 436.3, 853.93,
694.97, 850.47, 477.49, 1234.97, 10.27, 23.94, 643.23, 89.84,
290.34, 320.99, 6.44, 140.28, 188.89, 56.88, 1326.31, 194.34,
140.28, 771.96, 140.03, 20.21, 1464.39, 59.18, 57.92, 1156.81,
50.43, 300.12, 38.1, 832.71, 57.91, 174.5, 100.36, 248.14, 109.34,
100.7, 242.7, 266.67, 592.01, 242.18, 22.66, 566.04, 38.61, 812.06,
123.92, 168.6, 172.03, 49.91, 16.73, 108.04, 347.47, 97.79, 111.15,
514.79, 126.1, 178.87, 870.03, 529.31, 43.5, 2110.48, 771.94,
15.32, 105.25, 7.14, 312.67, 61.75, 165.51, 48.37, 643.49, 303.48,
35.78, 154.42, 209.71, 76.69, 25.46, 1415.45, 123.53, 602.31,
117.12, 334.35, 455.3, 643.26, 101.16, 245.82, 280.74, 143.89,
114.67, 12.84, 31.89, 32.69, 203.35, 66.9, 208.24, 57.92, 14.32,
400.5, 146.46, 827.35, 30.86, 143.89, 47.29, 426.01, 30.07, 36.28,
108.09, 81.06, 301.91, 434.73), cart = c(16658L, 17268L, 19323L,
43826L, 35493L, 32145L, 18052L, 18442L, 18432L, 18997L, 21450L,
20691L, 24833L, 44821L, 49513L, 45272L, 40368L, 40127L, 39455L,
40533L, 36675L, 36945L, 36407L, 35721L, 36800L, 34776L, 34256L,
17838L, 17455L, 16996L, 16798L, 18911L, 19350L, 20211L, 21960L,
19231L, 19670L, 19446L, 77319L, 70093L, 71585L, 75135L, 69669L,
71613L, 170183L, 481862L, 405584L, 426261L, 83117L, 72450L, 72311L,
75530L, 70171L, 64801L, 68099L, 71405L, 71622L, 71324L, 71504L,
92345L, 81760L, 84473L, 80869L, 70192L, 66718L, 71048L, 83618L,
84231L, 80773L, 80675L, 81420L, 78947L, 80162L, 82360L, 86689L,
109721L, 183764L, 155406L, 146906L, 137487L, 127900L, 124577L,
127381L, 126700L, 124797L, 127554L, 123966L, 120940L, 127769L,
148663L, 148608L, 119062L, 57614L, 71342L, 95608L, 80629L, 78782L,
79099L, 77396L, 74671L, 72772L, 74827L, 72221L, 73406L, 72999L,
71182L, 70235L, 79414L, 104791L, 103481L, 102597L, 94354L, 90666L,
83642L, 83223L, 73075L, 73582L, 73849L, 70067L, 71600L, 72179L,
130757L, 208231L, 169156L, 137970L, 116560L, 104701L, 102836L,
101145L, 101605L, 90864L, 92635L, 95114L, 100283L, 158447L, 131720L,
118661L, 126405L, 132399L, 98277L, 96270L, 95284L, 96886L, 89046L,
91384L, 89585L, 83771L, 83241L, 84151L, 6104L, 40574L, 48944L,
81869L, 146953L, 135144L, 132819L, 134255L, 131648L, 141696L,
122204L, 122752L, 120927L, 112159L, 102239L, 95998L, 97600L,
99032L, 79662L, 76622L, 69585L, 73822L, 74488L, 75621L, 69098L,
73761L, 76429L, 75664L, 77671L, 77090L, 77835L, 68888L, 69091L,
73986L, 84497L, 91080L, 90245L, 82691L, 78239L, 83518L, 80689L,
85564L, 89912L, 101674L, 103058L, 98647L, 111911L, 109854L, 139952L,
138776L, 140779L, 129910L, 114445L, 114822L, 115894L, 121939L,
124642L, 123978L, 120518L, 117401L, 118844L, 118196L, 117715L,
117497L), price_purchase = c(130.76, 419.6, 251.74, 252.88, 64.02,
272.59, 172.72, 88.81, 28.73, 1003.86, 346.47, 130.48, 29.86,
280.11, 358.57, 385.83, 287.61, 22.95, 58.08, 854.08, 28.28,
62.91, 994.86, 51.22, 9.01, 77.21, 244.15, 366.16, 366.8, 213.25,
35.52, 566.3, 35.78, 1106.82, 64.35, 722.18, 131.66, 166.1, 823.9,
138.23, 334.6, 328.19, 243.51, 488.8, 159.57, 106.8, 54.03, 27,
308.63, 1022.42, 463.31, 144.66, 44.53, 25.48, 126.18, 365.52,
133.92, 97.27, 12.84, 1002.55, 107.41, 132.31, 131.2, 789.57,
230.2, 12.36, 229.86, 1386.91, 154.19, 18.19, 76.96, 882.49,
191.55, 46.08, 24.17, 102.65, 326.62, 924.06, 923.73, 88.29,
41.16, 128.42, 326.88, 137.96, 30.68, 108.88, 181.19, 241.34,
128.32, 137.46, 1279.81, 643.23, 1275.16, 717.245, 159.33, 745.37,
288.27, 177.26, 168.58, 66.85, 331.51, 437.31, 643.23, 9.3, 0.85,
436.3, 105.51, 7.7, 79.44, 1321.37, 160.89, 107.21, 172.25, 514.79,
141.06, 900.64, 153.22, 924.4, 176.34, 94.98, 162.6, 1326.25,
39.9, 38.15, 2162.22, 180.95, 153.41, 720.48, 720.48, 15.42,
140.28, 514.02, 720.47, 174.7, 197.69, 411.08, 741.07, 230.12,
501.89, 109.34, 643.26, 23.17, 242.48, 1317.36, 69.76, 178.11,
153.55, 32.18, 812.06, 302.2, 153.34, 172.03, 128.68, 939.54,
108.04, 165.89, 56.63, 43.76, 171.17, 98.59, 21.95, 280.28, 181.47,
730.01, 159.31, 60.75, 31.15, 1412.39, 7.14, 942.84, 321.06,
165.51, 284.95, 169.42, 303.48, 224.3, 416.43, 385.85, 492.08,
334.6, 1415.45, 123.53, 308.55, 60.49, 334.35, 771.96, 126.88,
73.36, 203.84, 164.48, 143.89, 74.36, 23.17, 2.55, 7.84, 168,
935.67, 208.24, 269.22, 14.32, 8.98, 128.94, 40.67, 150.04, 48.65,
309.27, 37.3, 489.79, 82.11, 3.84, 563.44, 918.26, 470.8), purchase = c(19307,
19469, 19255, 27041, 23494, 22171, 21378, 23072, 22748, 21993,
26224, 25373, 29561, 28405, 26372, 31394, 28318, 25850, 24657,
25098, 25167, 25385, 24731, 23999, 23929, 22653, 23403, 21112,
20374, 20817, 20099, 22458, 21864, 22145, 26889, 24875, 25319,
24863, 25714, 22768, 22878, 24931, 22725, 22548, 22124, 45185.5,
68247, 185195, 28537, 24967, 24947, 25266, 24187, 22243, 23163,
24827, 24226, 24443, 24305, 32107, 28178, 28345, 28548, 24358,
24473, 25469, 27505, 27012, 25766, 26802, 27059, 25906, 26044,
26712, 26559, 35077, 63796, 51899, 49578, 48212, 46405, 44255,
44719, 46602, 44917, 44949, 44154, 43081, 45287, 49597, 50729,
38233, 3574, 13975.5, 24377, 28938, 28427, 28875, 27722, 27510,
26492, 27481, 26059, 25869, 27525, 26322, 27121, 29614, 35086,
32884, 32548, 32619, 31698, 30089, 30398, 27579, 26662, 26880,
27052, 26841, 27403, 27612, 33750, 32536, 32308, 28645, 27652,
28276, 28533, 28426, 25379, 26027, 46480, 60013, 102117, 83216,
76048, 72365, 87586, 63260, 36377, 31438, 31258, 29324, 28946,
29017, 27884, 28063, 27809, 27499, 27490, 26316, 31358, 55087,
46356, 45228, 44406, 43521, 45501, 40281, 41091, 41681, 39266,
36268, 34754, 35341, 35943, 28852, 27810, 25186, 25501, 26232,
25775, 23698, 25314, 25960, 26259, 27487, 26966, 25817, 21294,
22704, 23997, 27250, 29639, 28900, 25449, 23845, 25050, 24045,
25544, 26410, 30056, 31179, 29752, 33751, 34105, 41573, 43563,
43421, 39800, 34089, 22, 29, 40618, 41499, 41579, 40945, 39870,
41703, 40669, 40671, 41733), ctr = c(0.0157890561813006, 0.0166396305077271,
0.0173986509381537, 0.0194731497951218, 0.0179954394804347, 0.01711863909483,
0.0181582360570687, 0.0171927115779559, 0.0172358403646591, 0.0174638541971058,
0.0178806664612045, 0.017462347179514, 0.0183790774089859, 0.0199881077619723,
0.0174426987635606, 0.021089685240109, 0.0205569049800296, 0.0177842661874661,
0.0173413941476575, 0.0169081718788632, 0.0182456430344012, 0.0182626162052032,
0.0182543279386951, 0.0186259961442581, 0.0170104683085926, 0.0171132003490177,
0.0171517774365777, 0.0170457664943143, 0.016903142521019, 0.0175382134561578,
0.0164120092891695, 0.0157969704536582, 0.014264557168488, 0.0143332034531726,
0.0152322825367764, 0.0147200846456646, 0.0152154800186776, 0.0140607309566817,
0.0137719608789332, 0.0122855439272407, 0.0119334194687182, 0.0125752060979989,
0.01158007808718, 0.0113191407332442, 0.00729059877222415, 0.00731911608538772,
0.0106771470535411, 0.029937936916542, 0.0143512493034839, 0.0146854166936255,
0.0149305898441825, 0.015325442746133, 0.015691446743994, 0.0144534643673336,
0.0147711815606066, 0.0158382630541111, 0.0148728508159624, 0.015102040564144,
0.0148955533969277, 0.0176392994824187, 0.0163240478169816, 0.0158230497930639,
0.0166871934499785, 0.0150557839107457, 0.0159453688844757, 0.0158074236363467,
0.0158454822084702, 0.0149292254566175, 0.0142730130593139, 0.0156929838274791,
0.0156162350209032, 0.0153802494466767, 0.0150476029799385, 0.015555365325721,
0.0143761174252573, 0.0154064275947974, 0.0208510166815324, 0.0176754346231314,
0.0176296702464377, 0.01808584587117, 0.0184482114318881, 0.0179540460804964,
0.0172054387638893, 0.0186435592467685, 0.0183226179107802, 0.0183442319676677,
0.0186738733252132, 0.0197702609494553, 0.0204285359857455, 0.0189093019186096,
0.0207614056972417, 0.0221922195760301, 0.00240617175649865,
0.00788575626634226, 0.01309841408011, 0.0157695717780358, 0.0156402333683254,
0.0162649757475833, 0.0155814665868539, 0.0165668899473124, 0.0162596037350689,
0.0168350415867981, 0.0154154128099543, 0.0150464847912372, 0.0161870630522126,
0.0156293605393382, 0.0166831626222356, 0.0159851624182969, 0.0181646017543342,
0.0154736802975027, 0.0147489845006063, 0.0172093148404027, 0.0173801189598905,
0.0179353887292393, 0.017586875624838, 0.0169123585500959, 0.0155822266067893,
0.0149761651657073, 0.0167448758587691, 0.0161082597966258, 0.0167303551270177,
0.0154917937591837, 0.015286011465188, 0.0136306070303869, 0.0152129210946259,
0.0159440585908669, 0.0161367409642245, 0.0164079686231546, 0.016492577386447,
0.0155465861608803, 0.0151072490270382, 0.0144004027929932, 0.0251766100192941,
0.0300583807652007, 0.0394582941939502, 0.0346703430162482, 0.0370259651104479,
0.0354306787130485, 0.0369234479028471, 0.0313956783546004, 0.0175071227813499,
0.0153811379243536, 0.0149988963637585, 0.0159494386911496, 0.0151254119566314,
0.0151138709885764, 0.0152175253675449, 0.015437458845637, 0.0147726989007463,
0.163043027137275, 0.0264129334017437, 0.0201436139554002, 0.0169325780144314,
0.0214876387664511, 0.0177445890793224, 0.0172606914733451, 0.017133563936406,
0.0185813515317095, 0.0180607614170281, 0.0187505760492009, 0.0169511102933927,
0.0178137558385877, 0.0184785698285323, 0.0201169476464591, 0.0220364679005246,
0.0212945438945016, 0.021660499704709, 0.0203399246100257, 0.021042755155702,
0.0207706911083365, 0.0191080416224264, 0.0188521002714409, 0.0176177277744931,
0.0170654092407268, 0.0167173964870618, 0.0169957130997929, 0.0173290930652611,
0.018723068602435, 0.0190819762151085, 0.0178334964691514, 0.0160768402598991,
0.0172883539665594, 0.0176745227466401, 0.0176229174238593, 0.0192486537177652,
0.0184480409967815, 0.016738809715337, 0.0152409794110955, 0.0164830717865742,
0.0156835503722764, 0.0146390833265517, 0.014437303236394, 0.0152291157019601,
0.0150381488303582, 0.0144744286877027, 0.0154780133881995, 0.0165875912852526,
0.0167586910540363, 0.016434923542878, 0.016619416098569, 0.0156371489941345,
0.0150586881991463, 9.20923636174885e-06, 1.20043248684906e-05,
0.0164035785954663, 0.0163922122870294, 0.0158218503562475, 0.0150495685631071,
0.0147135181693682, 0.0165419629891279, 0.015608161423137, 0.0152906026574993,
0.0157815980810835)), row.names = c(NA, -213L), class = "data.frame")
as.ts() converts a numeric vector to a ts object, but it does not convert from one time series class to another. For that you need the tsbox package. In your example, you can convert your data frame to a tsibble, and then to a ts object (although why you would want to do that, I have no idea).
Oct_Apr |>
tsibble::as_tsibble(index = "date") |>
tsbox::ts_ts()
In general, ts objects are extremely limited, and with daily data, you would be better using tsibble objects and the functions provided in the feasts and fable packages.

Issue with converting R script into Shiny app

I need your support converting this R script into a Shiny app. Been trying for quite sometime but I'm always getting errors. The code is to apply stochastic modeling in business valuation.
The inputs are:
the number of desired simulations i.e. lapply (1:10000) ,
Sales volume ,
Selling price,
Unit cost,
Fixed costs,
The outputs are:
Net profit (= SalesVolume * (SellingPrice - unitcost) - fixedcosts)
a histogram for Net profit
The script is:
mydata <- lapply(1:10000, function(i) {
DU <- sample(x= 1:3, size = 1, replace = T)
if(DU==1) {
SalesVolume= 100000
SellingPrice= 8
}
if(DU==2) {
SalesVolume= 75000
SellingPrice= 10
}
if(DU==3) {
SalesVolume= 50000
SellingPrice= 11
}
unitcost <- rtriangle(1,5.5,7.5)
fixedcosts <- 120000
NetProfit <- SalesVolume * (SellingPrice - unitcost) - fixedcosts
})
NetProfit <- unlist(mydata)
summary(NetProfit)
par(mfrow= c(1,1))
hist(NetProfit)

Ignite ML with multiple preprocessing

Using Ignite machine learning, say I have a labeled dataset like this:
IgniteCache<Integer, LabeledVector<Integer>> contents = ignite.createCache(cacheConfiguration);
contents.put(1, new LabeledVector<Integer>(new DenseVector(new Serializable[] { 705.2, "HD", 29.97, 1, 1, 96.13 }), 2));
contents.put(2, new LabeledVector<Integer>(new DenseVector(new Serializable[] { 871.3, "HD", 30, 1, 1, 95.35 }), 3));
contents.put(3, new LabeledVector<Integer>(new DenseVector(new Serializable[] { 2890.2, "SD", 29.97, 1, 1, 95.65 }), 10));
contents.put(4, new LabeledVector<Integer>(new DenseVector(new Serializable[] { 1032, "SD", 29.97, 1, 1, 96.8 }), 4));
How would I use the NormalizationTrainer on features 0 and 5 but the EncoderTrainer on feature 1? I think I'm having difficulties understanding how to concatenate multiple preprocessing before finally feeding the model trainer.
What I currently have is this (modified Ignite sample):
Vectorizer<Integer, LabeledVector<Integer>, Integer, Integer> vectorizer = new LabeledDummyVectorizer<Integer, Integer>(0, 5);
Preprocessor<Integer, LabeledVector<Integer>> preprocessor1 = new NormalizationTrainer<Integer, LabeledVector<Integer>>().withP(1).fit(ignite, data, vectorizer);
Preprocessor<Integer, LabeledVector<Integer>> preprocessor2 = new EncoderTrainer<Integer, LabeledVector<Integer>>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).fit(ignite, data, preprocessor1);
KNNClassificationTrainer trainer = new KNNClassificationTrainer();
KNNClassificationModel mdl = trainer.fit(ignite, data, preprocessor2);
Do I understand the multiple preprocessor correctly? If so, how would I add another BinarizationTrainer on feature 2? I think I'm getting confused by where to specify which feature to apply the preprocessing trainer on. For one trainer (NormalizationTrainer) I have to use the Vectorizer to tell which features to use, for the EncoderTrainer I can do this as a method function. How would I then add BinarizationTrainer with another Vectorizer?
One preprocessor builds on top of another.
Coordinates are relative to the preprocessor that comes before.
This example shows how to accomplish what you want to do:
https://github.com/apache/ignite/blob/master/examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_6_KNN.java
put a breakpoint here: https://github.com/apache/ignite/blob/eabe50d90d5db2d363da36393cd957ff54a18d90/modules/ml/src/main/java/org/apache/ignite/ml/preprocessing/encoding/EncoderTrainer.java#L93
to see how the String Encoder references coordinates
examine all the variables:
UpstreamEntry<K, V> entity = upstream.next(); //this is the row from the file
LabeledVector<Double> row = basePreprocessor.apply(entity.getKey(), entity.getValue()); //after the previous preprocessor has been applied
categoryFrequencies = calculateFrequencies(row, categoryFrequencies); //use the given coordinates to calculate results.
more about preprocessing: https://apacheignite.readme.io/docs/preprocessing
Alternatively, you can use the pipelines API for a more streamlined approach to preprocessing: https://apacheignite.readme.io/docs/pipeline-api

OCR confidence score from Google Vision API

I am using Google Vision OCR for extracting text from images in python.
Using the following code snippet.
However, the confidence score always shows 0.0 which is definitely incorrect.
How to extract the OCR confidence score for individual char or word from the Google response?
content = cv2.imencode('.jpg', cv2.imread(file_name))[1].tostring()
img = types.Image(content=content)
response1 = client.text_detection(image=img, image_context={"language_hints": ["en"]})
response_annotations = response1.text_annotations
for x in response1.text_annotations:
print(x)
print(f'confidence:{x.confidence}')
Ex: output for an iteration
description: "Date:"
bounding_poly {
vertices {
x: 127
y: 11
}
vertices {
x: 181
y: 10
}
vertices {
x: 181
y: 29
}
vertices {
x: 127
y: 30
}
}
confidence:0.0
I managed to reproduce your issue. I used the following function and obtained confidence 0.0 for all items.
from google.cloud import vision
def detect_text_uri(uri):
client = vision.ImageAnnotatorClient()
image = vision.types.Image()
image.source.image_uri = uri
response = client.text_detection(image=image)
texts = response.text_annotations
print('Texts:')
for text in texts:
print('\n"{}"'.format(text.description))
vertices = (['({},{})'.format(vertex.x, vertex.y)
for vertex in text.bounding_poly.vertices])
print('bounds: {}'.format(','.join(vertices)))
print("confidence: {}".format(text.confidence))
if response.error.message:
raise Exception(
'{}\nFor more info on error messages, check: '
'https://cloud.google.com/apis/design/errors'.format(
response.error.message))
However, when using the same image with the "Try the API" option in the documentation I obtained a result with confidences non 0. This happened also when detecting text from a local image.
One should expect confidences to have the same value using both methods. I've opened an issue tracker, check it here.
Working code that retrieves the right confidence values of GOCR response.
(using document_text_detection() instead of text_detection())
def detect_document(path):
"""Detects document features in an image."""
from google.cloud import vision
import io
client = vision.ImageAnnotatorClient()
# [START vision_python_migration_document_text_detection]
with io.open(path, 'rb') as image_file:
content = image_file.read()
image = vision.types.Image(content=content)
response = client.document_text_detection(image=image)
for page in response.full_text_annotation.pages:
for block in page.blocks:
print('\nBlock confidence: {}\n'.format(block.confidence))
for paragraph in block.paragraphs:
print('Paragraph confidence: {}'.format(
paragraph.confidence))
for word in paragraph.words:
word_text = ''.join([
symbol.text for symbol in word.symbols
])
print('Word text: {} (confidence: {})'.format(
word_text, word.confidence))
for symbol in word.symbols:
print('\tSymbol: {} (confidence: {})'.format(
symbol.text, symbol.confidence))
if response.error.message:
raise Exception(
'{}\nFor more info on error messages, check: '
'https://cloud.google.com/apis/design/errors'.format(
response.error.message))
# [END vision_python_migration_document_text_detection]
# [END vision_fulltext_detection]
# add your own path
path = "gocr_vision.png"
detect_document(path)

Initialize custom weights in deeplearning4j

I'm trying to implement something like this https://www.youtube.com/watch?v=Fp9kzoAxsA4 which is a GANN (Genetic Algorithm Neural Network) using DL4J library.
Genetic learning variables:
Genes: Creature Neural Network weights
Fitness: Total distance moved.
Neural network layers for every creature:
input layer: 5 sensors that either 1 if there's a wall in the sensor direction or 0 if not.
output layer: Linear output that maps to the angle of the creature.
This is my createBrain method for the creature object:
private void createBrain() {
Layer inputLayer = new DenseLayer.Builder()
// 5 eye sensors
.nIn(5)
.nOut(5)
// How do I initialize custom weights using creature genes (this.genes)?
// .weightInit(WeightInit.ZERO)
.activation(Activation.RELU)
.build();
Layer outputLayer = new OutputLayer.Builder()
.nIn(5)
.nOut(1)
.activation(Activation.IDENTITY)
.lossFunction(LossFunctions.LossFunction.MSE)
.build();
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(6)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.iterations(1)
.learningRate(0.006)
.updater(Updater.NESTEROVS).momentum(0.9)
.list()
.layer(0,inputLayer)
.layer(1, outputLayer)
.pretrain(false).backprop(true)
.build();
this.brain = new MultiLayerNetwork(conf);
this.brain.init();
}
If it might help I have pushed to this repo
https://github.com/kareem3d/GeneticNeuralNetwork
And this is the Creature class
https://github.com/kareem3d/GeneticNeuralNetwork/blob/master/src/main/java/com/mycompany/gaan/Creature.java
I'm a machine learning student so if you see any obvious mistakes please let me know, thanks :)
I don't know if you can set weights in the layer configuration(I couldn't see in the API docs) but you can get and set to network parameters after initializing model.
To set them individually for layers you may follow this example;
Iterator paramap_iterator = convolutionalEncoder.paramTable().entrySet().iterator();
while(paramap_iterator.hasNext()) {
Map.Entry<String, INDArray> me = (Map.Entry<String, INDArray>) paramap_iterator.next();
System.out.println(me.getKey());//print key
System.out.println(Arrays.toString(me.getValue().shape()));//print shape of INDArray
convolutionalEncoder.setParam(me.getKey(), Nd4j.rand(me.getValue().shape()));//set some random values
}
If you want set all parameters of network at once you can use setParams() and params(), for example;
INDArray all_params = convolutionalEncoder.params();
convolutionalEncoder.setParams(Nd4j.rand(all_params.shape()));//set random values with the same shape
You can check API for more information;
https://deeplearning4j.org/doc/org/deeplearning4j/nn/api/Model.html#params--
It worked for me:
int inputNum = 4;
int outputNum = 3;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(123)
.layer(new EmbeddingLayer.Builder()
.nIn(inputNum) // Number of input datapoints.
.nOut(8) // Number of output datapoints.
.activation(Activation.RELU) // Activation function.
.weightInit(WeightInit.XAVIER) // Weight initialization.
.build())
.list()
.layer(new DenseLayer.Builder()
.nIn(inputNum) // Number of input datapoints.
.nOut(8) // Number of output datapoints.
.activation(Activation.RELU) // Activation function.
.weightInit(WeightInit.XAVIER) // Weight initialization.
.build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nIn(8)
.nOut(outputNum)
.activation(Activation.SOFTMAX)
.weightInit(WeightInit.XAVIER)
.build())
.pretrain(false).backprop(false)
.build();
MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(conf);
multiLayerNetwork.init();
Map<String, INDArray> paramTable = multiLayerNetwork.paramTable();
Set<String> keys = paramTable.keySet();
Iterator<String> it = keys.iterator();
while (it.hasNext()) {
String key = it.next();
INDArray values = paramTable.get(key);
System.out.print(key+" ");//print keys
System.out.println(Arrays.toString(values.shape()));//print shape of INDArray
System.out.println(values);
multiLayerNetwork.setParam(key, Nd4j.rand(values.shape()));//set some random values
}

Resources