UseCase :
I want to automatically test all sitemap URLs (more than 1000) of our website after each new code release to see if the update broke any of them.
My problem :
Test fails once a URL returns anything else than status 200 (even with failOnStatus: false) but instead I need to save the results in a sort of a callback function and only check them at the end and fail the test at the end (in order to find all the broken links)
My code :
describe('Validate sitemaps files', () => {
let urls = [];
it("Should succesfully load each url in the sitemap", () => {
cy.fixture('sitemaps.json').then((data) => {
for (var index in data) {
cy.log(data[index].url)
cy.request({
//url: Cypress.config().baseUrl + data[index].url, failOnStatusCode: false,
url: data[index].url, failOnStatus: false,
headers: {
"Content-Type": "text/xml; charset=utf-8",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36",
},
}).as("sitemap").then((response) => {
urls = Cypress.$(response.body)
.find("loc")
.toArray()
.map((el) => el.innerText
)
})
}
urls.forEach((url) => {
// check if the resource exists
cy.request(url).its('status').then(status => {
if (status !== 200) {
failed.push(url)
}
}).then(() => {
// check inside then to ensure loop has finished
cy.log('Failed links: ' + `${failed.join(', ')}`)
expect(failed.length).to.eq(0)
})
cy.wrap('passed').as('ctrl')
})
})
})
})
Fixture (just an example to test my code) :
[
{
"url": "https://gooogle.com"
},
{
"url": "https://browserstack.com/notfound"
},
{
"url": "https://yahoo.com"
},
{
"url": "https://browserstack.com"
}
]
Test result
enter image description here
I have already seen this answer ==> Google but no success so far.
Any help is much appreciated
I am sending nginx logs to elasticsearch by using filebeat and logstash. My logs have the following form:
000.000.000.000 - - [17/Oct/2022:08:25:18 +0000] "OPTIONS /favicon.svg HTTP/1.1" 405 559 "https://example.net/auth/login" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36" "111.111.111.111, 222.222.222.222"
I have the following configuration file for logstash:
input {
beats {
port => 5035
}
}
filter {
grok {
match => [ "message" , "%{COMBINEDAPACHELOG}+%{GREEDYDATA:http_x_forwarded_for}"]
}
mutate {
convert => ["response", "integer"]
convert => ["bytes", "integer"]
convert => ["responsetime", "float"]
}
geoip {
source => "clientip"
target => "geoip"
add_tag => [ "nginx-geoip" ]
}
date {
match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
}
useragent {
source => "message"
}
}
output {
elasticsearch {
hosts => "elasticsearch:9200"
index => "weblogs-%{+YYYY.MM.dd}"
document_type => "nginx_logs"
user => "elastic"
password => "changeme"
}
stdout { codec => rubydebug }
}
This pipeline saves the logs to elasticsearch in the following form:
"response" : 405,
"timestamp" : "17/Oct/2022:08:25:18 +0000",
"os_version" : "10",
"auth" : "-",
"verb" : "OPTIONS",
"clientip" : "000.000.000.000",
"httpversion" : "1.1",
"referrer" : "\"https://example.net/auth/login\"",
"geoip" : { },
"os" : "Windows",
"os_name" : "Windows",
"agent" : {
"version" : "7.17.6",
"hostname" : "0242869f2486",
"type" : "filebeat",
"id" : "4de3a108-35bf-4bd9-8b18-a5d8f9f2bc83",
"ephemeral_id" : "3a5f78b5-bae0-41f6-8d63-eea700df6c3c",
"name" : "0242869f2486"
},
"log" : {
"file" : {
"path" : "/var/log/nginx/access.log"
},
"offset" : 1869518
},
"bytes" : 559,
"ident" : "-",
"http_x_forwarded_for" : " \"111.111.111.111, 222.222.222.222\"",
"os_full" : "Windows 10",
"#timestamp" : "2022-10-17T08:25:18.000Z",
"request" : "/favicon.svg",
"device" : "Spider",
"name" : "favicon",
"input" : {
"type" : "log"
},
"host" : {
"name" : "0242869f2486"
},
"os_major" : "10",
"#version" : "1",
"message" : "000.000.000.000 - - [17/Oct/2022:08:25:18 +0000] \"OPTIONS /favicon.svg HTTP/1.1\" 405 559 \"https://example.net/auth/login\" \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36\" \"111.111.111.111, 222.222.222.222\"",
"tags" : [
"beats_input_codec_plain_applied",
"_geoip_lookup_failure"
]
However, my goal is to parse the first IP from the http_forwared_for field and add a new filed called real_client_ip and add it save it to the index. Is there a way to achieve that?
You can add one more grok filter to your logstash pipeline after first grok filter.
filter {
grok {
match => [ "message" , "%{COMBINEDAPACHELOG}+%{GREEDYDATA:http_x_forwarded_for}"]
}
grok {
match => [ "http_x_forwarded_for" , "%{IP:real_client_ip}"]
}
mutate {
convert => ["response", "integer"]
convert => ["bytes", "integer"]
convert => ["responsetime", "float"]
}
geoip {
source => "clientip"
target => "geoip"
add_tag => [ "nginx-geoip" ]
}
date {
match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
}
useragent {
source => "message"
}
}
PS: I have validated grok pattern in Kibana but not by running logstash pipeline. but this should work for your usecase.
I need help to fix the error for Secure Null in this part of my code in Dart
if (selectedDatum.isNotEmpty) {
time = selectedDatum.first.datum;
selectedDatum.forEach((SeriesDatum datumPair) {
measures[datumPair.series.displayName] = datumPair.datum;
});
}
And this part with error for int?
Series<double, int>(
id: 'Gasto',
colorFn: (_, __) => MaterialPalette.blue.shadeDefault,
domainFn: (value, index) => index,
measureFn: (value, _) => value,
data: data,
strokeWidthPxFn: (_, __) => 4,
)
I'm struggling a bit trying to integrate spiderfier https://github.com/jawj/OverlappingMarkerSpiderfier with https://github.com/apneadiving/Google-Maps-for-Rails
In my map.html.haml:
= gmaps(:markers => {:data => #map, :options => { "rich_marker" => true, :raw => '{ animation: google.maps.Animation.DROP }' } }, :map_options => { :draggable => true, :auto_zoom => false, :zoom => 9, :disableDefaultUI => false, :scrollwheel => true, :disableDoubleClickZoom => true, :custom_infowindow_class => "province" })
- content_for :scripts do
:javascript
Gmaps.map.infobox = function(boxText) {
return {
content: boxText
,disableAutoPan: false
,maxWidth: 0
,pixelOffset: new google.maps.Size(-140, -50)
,alignBottom: true
,zIndex: 999
,hideCloseButton: false
,boxStyle: {
background: "white"
,width: "280px"
,padding: "10px"
,border: "1px solid #b2b2b2"
,arrowStyle: 0
,arrowPosition: 50
,arrowSize: 20
}
,infoBoxClearance: new google.maps.Size(10, 10)
,isHidden: false
,pane: "floatPane"
,enableEventPropagation: false
}};
This works fine, but how would one integrate spiderfier https://github.com/jawj/OverlappingMarkerSpiderfier#how-to-use ?
This is an old question using an old version of Gmaps 4 Rails, however to anyone that is interested in working with both libs in their latest versions, here's how to do it.
This example is based off the documentation sample code
handler = Gmaps.build('Google');
handler.buildMap({ provider: {}, internal: {id: 'map'}}, function(){
markers = handler.addMarkers([
{
"lat": 0,
"lng": 0,
"picture": {
"url": "https://addons.cdn.mozilla.net/img/uploads/addon_icons/13/13028-64.png",
"width": 36,
"height": 36
},
"infowindow": "hello!"
}
]);
handler.bounds.extendWith(markers);
handler.fitMapToBounds();
// Create a OverlappingMarkerSpiderfier instance
var oms = new OverlappingMarkerSpiderfier(handler.getMap(), {
keepSpiderfied: true
// Other options you need
});
// Track each marker with OMS
_.each(markers, function(marker) {
oms.addMarker(marker.getServiceObject());
});
});
The important thing here is to get down to the native Google objects that Gmaps4Rails wraps inside its own classes.
If you need click events on your markers, add a single handler to oms as it is explained in the doc.
Having a Term that may consist of anything such as orddicts, lists of orddicts, orddicts of orddicts, list of lists, proplists of orddicts or ... such as:
Term1 = [
{"AAA", [
{ "K1", "ka1" },
{ "K2", "ka2" },
{ "K3", "ka3" }
]
},
{"BBB","one"},
{"CCC", [
{ "K1", "kb1" },
{ "K2", "" },
{ "K3", "kb3" }
]
},
{"DDD", [
[
{ "K1", "kc1" },
{ "K2", "kc2" }
],
[
{ "K1", "kd1" },
{ "K2", "kd2" }
],
"CACETA",
123
]
},
{"EEE", [
{ "K1", "kb1" },
{ "K2", 1122 },
{ "K3", "kb3" }
]
},
{ "T1", "T2", "T3" },
123,
{ "X" },
[ 1, 2, 3, { "K5", "V5" } ]
],
I would need to produce a list of all proplists [{K,V},...] such as
[
{ "AAA" , [ ...... ] },
{ "K1" , "ka1" },
{ "K2" , "ka2" },
...
{ "BBB" ,"one"},
{ "CCC" , [ ... ] },
{ "K1" , "kb1" },
...
{ "K5", "V5" }
]
notice that there are keys that must repeat along the list, their value may be a string, a list, a tupple or number, anything.
the last items in the data in the example above, such as { "T1", "T2", "T3" } should not be in the result since it is not a proplist of two terms { K, V}, but the nested { "K5", "V5" } is and should be part of the result.
I looked at this similar solution and tried to tune it up a little bit, but it is getting hard for my novice erlang brain to get it to work with my scenario above.
Here is an example of what I am trying to use to make it work, but there are some errors, pattern matching related:
extractPropList( [], ResultList ) -> ResultList;
extractPropList( [H|T], ResultList ) -> extractPropList(T, extractPropList(H, ResultList));
extractPropList( {K,V}, ResultList ) -> [ {K,V} | extractPropList(K, extractPropList(V, ResultList)) ].
While testing the above approach, the missing part was the last row that treats a term that had no other matching ( not a list, not a {K,V} ):
extractPropLists( [], ResultList ) -> ResultList;
extractPropLists( [H|T], ResultList ) -> extractPropLists(T, extractPropLists(H, ResultList));
extractPropLists( {K,V}, ResultList ) -> [ {K,V} | extractPropLists(K, extractPropLists(V, ResultList)) ];
extractPropLists( T, ResultList ) -> ResultList.
Given data similar to the avove, the results I obtained where
[{"EEE",[{"K1","kb1"},{"K2",1122},{"K3","kb3"}]},
{"K3","kb3"},
{"K2",1122},
{"K1","kb1"},
{"DDD",
[[{"K1","kc1"},{"K2","kc2"}],
[{"K1","kd1"},{"K2","kd2"}],
"CACETA",123]},
{"K2","kd2"},
{"K1","kd1"},
{"K2","kc2"},
{"K1","kc1"},
{"CCC",[{"K1","kb1"},{"K2","kb2"},{"K3","kb3"}]},
{"K3","kb3"},
{"K2","kb2"},
{"K1","kb1"},
{"BBB","one"},
{"AAA",[{"K1","ka1"},{"K2","ka2"},{"K3","ka3"}]},
{"K3","ka3"},
{"K2","ka2"},
{"K1","ka1"}]