How to convert UTC to client time zone in plotly - timezone

I have done some digging, but I could not find an answer. Is there a way to have plotly convert date times to the clients time zone (also respecting daylight savings)
I am combining data from different sources in my graph and some is in UTC format (e.g., 2021-01-06T06:00:00Z or 2021-01-06T06:00:00+00:00) and some is already in the client’s time zone (e.g., 2021-01-06 08:00:00).
As a result, the two are not aligned.
Here is an example of what I am sending to plotly (I have shortened it quite a bit)
[
{
"x": [
"2021-06-16T09:38:00Z",
"2021-06-16T09:40:00Z",
"2021-06-16T09:42:00Z",
"2021-06-16T09:44:00Z",
],
"y": [
11.0725,
9.1375,
3.8775,
16.98625,
],
"type": "scatter"
},
{
"x": [
"2021-06-16 11:00:07",
"2021-06-16 11:00:07"
],
"y": [
0.32065714285714289,
20.704228571428574
],
"type": "scatter",
"mode": "lines",
"line": {
"dash": "dot",
"color": "#f62447"
}
},
{
"x": [
"2021-06-16 11:00:07"
],
"y": [
22.334914285714289
],
"type": "scatter",
"mode": "text",
"text": [
"⚠"
],
"textfont": {
"size": "25",
"color": "#f62447"
}
}
]

Related

convert grib to geojson

I want to convert a grib2 file to a geojson with the following format:
{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "properties": { "ID": 0, "sigwaveht": 1.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 20.5, 77.559374979743737 ], [ 20.756756711040964, 77.5 ], [ 21.0, 77.426829270065582 ], [ 21.5, 77.426829270065582 ] ] } },
{ "type": "Feature", "properties": { "ID": 1, "sigwaveht": 1.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 17.5, 76.879518074163784 ], [ 18.0, 76.840000001907356 ], [ 18.555555592348554, 77.0 ], [ 18.555555592348554, 77.5 ] ] } },
{ "type": "Feature", "properties": { "ID": 2, "sigwaveht": 1.000000 }, "geometry": { "type": "LineString", "coordinates": [ [ 28.5, 76.732142838136269 ], [ 29.0, 76.634146323734484 ], [ 29.937500058207661, 77.0 ], [ 29.937500058207661, 77.5 ] ] } },
I can accomplish this by using ogr2ogr2 to convert a shape file to a geojson in this format but what can I do to convert a grib2 to a geoJSON of this format?
You can't convert a GRIB, which is a raster format, to GeoJSON, which is a vector format.
What do you expect to achieve? Vector data composed of points where each point is one of the pixels of the raster format?
If this is what you want, you will probably have to code it yourself, I don't think there are any standard tools to do this. Just make a loop over the raster data pixels and write one point feature for every pixel.

Twilio IVR Speech Recognition

I'm new to developing IVR with twilio studio so I started with the basic template and even that's not working.
This is the log:
LOG
Split Based On...
DETAIL
Input evaluated to 'Sales.' from '{{widgets.gather_input.SpeechResult}}'
Transitioning to 'say_play_1' because 'Sales.' did not match any expression
The split is set to "Equal to" sales which then connects the call to a number. It's obviously recognizing the correct speech input but still not working. Any ideas?
{
"description": "IVR",
"states": [
{
"name": "Trigger",
"type": "trigger",
"transitions": [
{
"event": "incomingMessage"
},
{
"next": "gather_input",
"event": "incomingCall"
},
{
"event": "incomingRequest"
}
],
"properties": {
"offset": {
"x": 250,
"y": 50
}
}
},
{
"name": "gather_input",
"type": "gather-input-on-call",
"transitions": [
{
"next": "split_key_press",
"event": "keypress"
},
{
"next": "split_speech_result",
"event": "speech"
},
{
"event": "timeout"
}
],
"properties": {
"voice": "alice",
"speech_timeout": "auto",
"offset": {
"x": 290,
"y": 250
},
"loop": 1,
"hints": "support,sales",
"finish_on_key": "",
"say": "Hello, how can we direct your call? Press 1 for sales, or say sales. To reach support, press 2 or say support.",
"language": "en",
"stop_gather": false,
"gather_language": "en-US",
"profanity_filter": "false",
"timeout": 5
}
},
{
"name": "split_key_press",
"type": "split-based-on",
"transitions": [
{
"event": "noMatch"
},
{
"next": "connect_call_to_sales",
"event": "match",
"conditions": [
{
"friendly_name": "1",
"arguments": [
"{{widgets.gather_input.Digits}}"
],
"type": "equal_to",
"value": "1"
}
]
},
{
"next": "connect_call_to_support",
"event": "match",
"conditions": [
{
"friendly_name": "2",
"arguments": [
"{{widgets.gather_input.Digits}}"
],
"type": "equal_to",
"value": "2"
}
]
}
],
"properties": {
"input": "{{widgets.gather_input.Digits}}",
"offset": {
"x": 100,
"y": 510
}
}
},
{
"name": "split_speech_result",
"type": "split-based-on",
"transitions": [
{
"next": "say_play_1",
"event": "noMatch"
},
{
"next": "connect_call_to_sales",
"event": "match",
"conditions": [
{
"friendly_name": "sales",
"arguments": [
"{{widgets.gather_input.SpeechResult}}"
],
"type": "equal_to",
"value": "sales"
}
]
},
{
"next": "connect_call_to_support",
"event": "match",
"conditions": [
{
"friendly_name": "support",
"arguments": [
"{{widgets.gather_input.SpeechResult}}"
],
"type": "equal_to",
"value": "support"
}
]
}
],
"properties": {
"input": "{{widgets.gather_input.SpeechResult}}",
"offset": {
"x": 510,
"y": 510
}
}
},
{
"name": "connect_call_to_sales",
"type": "connect-call-to",
"transitions": [
{
"event": "callCompleted"
}
],
"properties": {
"offset": {
"x": 100,
"y": 750
},
"caller_id": "{{contact.channel.address}}",
"noun": "number",
"to": "12222222",
"timeout": 30
}
},
{
"name": "connect_call_to_support",
"type": "connect-call-to",
"transitions": [
{
"event": "callCompleted"
}
],
"properties": {
"offset": {
"x": 520,
"y": 750
},
"caller_id": "{{contact.channel.address}}",
"noun": "number",
"to": "12222222",
"timeout": 30
}
},
{
"name": "say_play_1",
"type": "say-play",
"transitions": [
{
"next": "gather_input",
"event": "audioComplete"
}
],
"properties": {
"offset": {
"x": 710,
"y": 200
},
"loop": 1,
"say": "not valid choice."
}
}
],
"initial_state": "Trigger",
"flags": {
"allow_concurrent_calls": true
}
}
Twilio developer evangelist here.
That is weird behaviour, mainly because the log says "evaluated to 'Sales.'". Split widgets conditions are not case-sensitive and should trim leading and following white-space. For some reason this appears to have a capital "S" and a full-stop.
I would suggest a couple of things. Firstly, raise a ticket with Twilio support to look into why the condition didn't match correctly.
Then, try some of the other conditions. When I generate a new IVR template in Studio, the conditions use "Matches any of" instead of "Equal to". You might also try "Contains".
So, experiment with the ways you can match the operators, but get support involved to drill down into why it didn't work in the first place.
The period is needed after the word for some reason...I just put both, for example:
"1, 1., Uno, Uno., Una, Una., Uno, Uno., Español, Español., Español, Español., Español, Español."

Getting error object field starting or ending with a [.]

While inserting the below document in an Elasticsearch index:
{
"id": "122223334444",
"name": "Mei",
"url": "mei-2019-tamil",
"alternate_urls": [
"mei-2019-tamil",
"sa-baskaran-aishwarya-rajesh-untitled"
],
"type": "Movie",
"poster": "ed3e439b-1ac1-45fe-a915-a5dae60257df",
"poster_url": "//assets.appserver.com/ed3e439b-1ac1-45fe-a915-a5dae60257df",
"alternate_names": [
"Mei",
"SA Baskaran - Aishwarya Rajesh Untitled"
],
"popularity": 0.2,
"info": {
"running_time": 0,
"cpl_types": [
"teaser",
"feature"
],
"has_cpls": true,
"genres": [
"Drama",
"Thriller"
],
"international_release_date": null,
"country_specific_release_dates": {},
"international_film_status": "CS",
"country_specific_film_statuses": {
"IN": "CS",
"CN": "CS",
"": "CS",
"SG": "CS"
},
"country_specific_certifications": {},
"language": "Tamil",
"synopsis": "A thriller film directed by SA Baskaran, starring Aishwarya Rajesh in the lead role.",
"schedules": {
"cities": [],
"countries": []
},
"featured": 0,
"movie_rating": 0,
"cast": [
{
"id": "05ffe715-db60-4947-a45a-99722537571c",
"name": "Aishwarya Rajesh",
"url": "aishwarya-rajesh",
"role": "Actress",
"poster": "65ab15b6-d54a-4965-95d5-38a839cee17d",
"poster_url": "//assets.appserver.com/65ab15b6-d54a-4965-95d5-38a839cee17d",
"type": "Person"
}
],
"crew": [
{
"id": "d9354648-5f48-4bf0-9a00-3de8c4d7a8d0",
"name": "SA Baskaran",
"url": "sa-baskaran",
"role": "Director",
"poster": null,
"poster_url": null,
"type": "Person"
}
]
},
"published": true
}
I'm getting the following Message:
Error: object field starting or ending with a [.] makes object
resolution ambiguous:
However, there is no value that is starting with .
I am clueless as regards which key is causing this issue.
Please help me identify and fix this issue.
ElasticSearch Version: 5.6.14
I am trying to index ES from a rails app using chewy gem.
Values of the JSON could be empty. But if the keys are empty then ES throws an error while indexing. Remove "":{} from the JSON and index again.
Reference: https://discuss.elastic.co/t/object-field-starting-or-ending-with-a-makes-object-resolution-ambiguous/123351

CEP generate Measurement/Event not in UTC time but in Local

All Measurements came with a time-stamp (event time) of when the measurement was created. Some of these measurements are artificial ones, meaning that they are not created by the device itself, but by a CEP rule running inside the CoT.
The "normal" measurements have the time format coded as UTC
[{
"id": "12704547",
"data": {
"data": {
"time": "2016-07-25T15:24:11.000Z",
"id": "1152930",
"self": "http://testTenant.c8y.com/measurement/measurements/1152930",
"source": {
"id": "222812",
"self": "http://testTenant.c8y.com/inventory/managedObjects/222812"
},
"type": "tsystems_cumulocity_energymeter_digital_ping",
"Energieverbrauch": {
"Ping": {
"unit": "Wh",
"value": 1
}
}
},
"realtimeAction": "CREATE"
},
"channel": "/measurements/222812"
}, {
"successful": true,
"channel": "/meta/connect"
}]
But the "artificial" measurements (created by the CEP rule) use a timestamp with local time
[{
"id": "12704578",
"data": {
"data": {
"time": "2016-07-25T17:24:00.952+02:00",
"id": "1152931",
"self": "http://testTenant.c8y.com/measurement/measurements/1152931",
"source": {
"id": "222812",
"self": "http://testTenant.c8y.com/inventory/managedObjects/222812"
},
"type": "tsystems_cumulocity_energymeter_power_consumption",
"Leistung": {
"Aggregation_1min": {
"unit": "W",
"value": 900
}
}
},
"realtimeAction": "CREATE"
},
"channel": "/measurements/222812"
}]
The measurements from one device should always be encoded with the same timezone (UTC preferred) as different timezone can create problems in clients using that data.
I create the 'time' in the CEP with
current_timestamp().toDate() as time
please use:
com.cumulocity.model.util.DateTimeUtils.newUTC(current_timestamp().toDate()) as dateTime,
instead of
current_timestamp().toDate() as time
in your cep rule.
Best regards,
Arkadiusz
Cumulocity Support Team

Highstocks - set extremes at export

I have a Highstock graph with dual pane (top & bottom). I understand that the range selector will read from the xAxis extremes to set the from and to date.
Everytime I add a series, be it to the top or bottom, I will have to invoke : chart.xAxis[0].setExtremes(); in order for the range selector to "refresh" the From and To date (if necessary).
Now, the problem comes when I have to do the same steps for my export module. How do I formulate the extremes into my options array before I pass it to my local export server? Currently, it seems that when I perform the export, the From / To Date is occasionally different from what I have set using xAxis.min and xAxis.max.
Any help/advice is appreciated. Thank you.
UPDATE/EDIT:
I am currently using the following options to generate my chart. The chart's xAxis displayed according to the xAxis' min and max values 1315756800000 (12th sep 2011) and 1338134400000 (28th may 2012). However, if I were to copy the following codes and do export on http://export.highcharts.com/, the date becomes 11th sep 2011 to 27th may 2012. Any idea why?
{
"series": [
{
"data": [
{
"y": 101.3,
"x": 1315756800000
},
{
"y": 101.4228571428,
"x": 1316361600000
},
{
"y": 101.4657142857,
"x": 1316966400000
},
{
"y": 101.47,
"x": 1317571200000
},
{
"y": 101.4714285714,
"x": 1318176000000
},
{
"y": 101.4628571428,
"x": 1318780800000
},
{
"y": 101.4657142857,
"x": 1319385600000
},
{
"y": 101.4542857142,
"x": 1319990400000
},
{
"y": 101.4628571428,
"x": 1320595200000
},
{
"y": 101.4557142857,
"x": 1321200000000
},
{
"y": 101.4614285714,
"x": 1321804800000
},
{
"y": 101.45,
"x": 1322409600000
},
{
"y": 101.4414285714,
"x ": 1323014400000
},
{
"y": 101.44,
"x": 1323619200000
},
{
"y": 101.4514285714,
"x": 1324224000000
},
{
"y": 101.4328571428,
"x": 1324828800000
},
{
"y": 101.4214285714,
"x": 1325433600000
},
{
"y": 101.4614285714,
"x": 1326038400000
},
{
"y": 101.4257142857,
"x": 1326643200000
},
{
"y": 101.4742857142,
"x": 1327248000000
},
{
"y": 101.4785714285,
"x": 1327852800000
},
{
"y": 101.4357142857,
"x": 1328457600000
},
{
"y": 101.3085714285,
"x": 1329062400000
},
{
"y": 101.3071428571,
"x": 1329667200000
},
{
"y": 101.1742857142,
"x": 1330272000000
},
{
"y": 101.1285714285,
"x": 1330876800000
},
{
"y": 101.3242857142,
"x": 1331481600000
},
{
"y": 101.4042857142,
"x": 1332086400000
},
{
"y": 101.4742857142,
"x": 1332691200000
},
{
"y": 101.4771428571,
"x": 1333296000000
},
{
"y": 101.48,
"x": 1333900800000
},
{
"y": 101.4771428571,
"x": 1334505600000
},
{
"y": 101.4642857142,
"x": 1335110400000
},
{
"y": 101.4628571428,
"x": 1335715200000
},
{
"y": 101.4871428571,
"x": 1336320000000
},
{
"y": 101.4757142857,
"x": 1336924800000
},
{
"y": 101.4785714285,
"x": 1337529600000
},
{
"y": 101.475,
"x": 1338134400000
}
],
"color": "#5b0f00",
"type": "line",
"name": "series 1 (mRL)",
"yAxis": "testAxis",
"id": "testSeries1",
"turboThreshold": 3000
},
{
"name": "series 1 (mRL) Min Max Range",
"type": "errorbar",
"id": "primaryWATERLEVELmRLSeries_ErrorBar",
"linkedTo": "testSeries1",
"data": [
[
1315756800000,
101.3,
101.3
],
[
1316361600000,
101.36,
101.5
],
[
1316966400000,
101.44,
101.5
],
[
1317571200000,
101.45,
101.5
],
[
1318176000000,
101.46,
101.5
],
[
1318780800000,
101.45,
101.48
],
[
1319385600000,
101.45,
101.48
],
[
1319990400000,
101.44,
101.47
],
[
1320595200000,
101.44,
101.48
],
[
1321200000000,
101.44,
101.47
],
[
1321804800000,
101.44,
101.47
],
[
1322409600000,
101.42,
101.47
],
[
1323014400000,
101.43,
101.45
],
[
1323619200000,
101.42,
101.46
],
[
1324224000000,
101.44,
101.47
],
[
1324828800000,
101.42,
101.44
],
[
1325433600000,
101.41,
101.43
],
[
1326038400000,
101.45,
101.47
],
[
1326643200000,
101.4,
101.46
],
[
1327248000000,
101.45,
101.49
],
[
1327852800000,
101.46,
101.5
],
[
1328457600000,
101.38,
101.47
],
[
1329062400000,
101.27,
101.37
],
[
1329667200000,
101.28,
101.33
],
[
1330272000000,
101.14,
101.24
],
[
1330876800000,
101.09,
101.18
],
[
1331481600000,
101.21,
101.43
],
[
1332086400000,
101.37,
101.46
],
[
1332691200000,
101.42,
101.5
],
[
1333296000000,
101.47,
101.49
],
[
1333900800000,
101.47,
101.49
],
[
1334505600000,
101.46,
101.5
],
[
1335110400000,
101.42,
101.49
],
[
1335715200000,
101.43,
101.5
],
[
1336320000000,
101.48,
101.5
],
[
1336924800000,
101.46,
101.49
],
[
1337529600000,
101.47,
101.5
],
[
1338134400000,
101.46,
101.5
]
],
"zIndex": "1000",
"yAxis": "testAxis"
},
{
"color": "",
"type": "line",
"name": "series 2",
"yAxis": "testAxis",
"id": "testSeries",
"turboThreshold": 3000
}
],
"yAxis": [
{
"id": "testAxis",
"labels": {
"style": {
"font": "8px Helvetica",
"fontWeight": "normal"
}
},
"title": {
"enabled": true,
"text": "axis label",
"style": {
"font": "8px Helvetica",
"fontWeight": "normal"
}
},
"top": "0%",
"height": "100%",
"opposite": true,
"onGraph": "primary"
}
],
"legend": {
"enabled": true,
"itemStyle": {
"font": "11px Helvetica",
"fontWeight": "normal"
}
},
"xAxis": {
"min": 1315756800000,
"max": 1338134400000
},
"navigator": {
"enabled": true
},
"credits": {
"enabled": false
}
}

Resources